mirror of
https://github.com/jc21/nginx-proxy-manager.git
synced 2024-08-30 18:22:48 +00:00
Merge branch 'develop' into openidc
This commit is contained in:
commit
694d8a0f21
21
.github/workflows/stale.yml
vendored
Normal file
21
.github/workflows/stale.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: 'Close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
stale-issue-label: 'stale'
|
||||
stale-pr-label: 'stale'
|
||||
stale-issue-message: 'Issue is now considered stale. If you want to keep it open, please comment :+1:'
|
||||
stale-pr-message: 'PR is now considered stale. If you want to keep it open, please comment :+1:'
|
||||
close-issue-message: 'Issue was closed due to inactivity.'
|
||||
close-pr-message: 'PR was closed due to inactivity.'
|
||||
days-before-stale: 182
|
||||
days-before-close: 365
|
||||
operations-per-run: 50
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -3,3 +3,7 @@
|
||||
._*
|
||||
.vscode
|
||||
certbot-help.txt
|
||||
test/node_modules
|
||||
*/node_modules
|
||||
docker/dev/dnsrouter-config.json.tmp
|
||||
docker/dev/resolv.conf
|
||||
|
263
Jenkinsfile
vendored
263
Jenkinsfile
vendored
@ -1,3 +1,9 @@
|
||||
import groovy.transform.Field
|
||||
|
||||
@Field
|
||||
def shOutput = ""
|
||||
def buildxPushTags = ""
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
label 'docker-multiarch'
|
||||
@ -8,14 +14,12 @@ pipeline {
|
||||
ansiColor('xterm')
|
||||
}
|
||||
environment {
|
||||
IMAGE = "nginx-proxy-manager"
|
||||
IMAGE = 'nginx-proxy-manager'
|
||||
BUILD_VERSION = getVersion()
|
||||
MAJOR_VERSION = "2"
|
||||
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('/', '-')}"
|
||||
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
|
||||
COMPOSE_FILE = 'docker/docker-compose.ci.yml'
|
||||
MAJOR_VERSION = '2'
|
||||
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
|
||||
BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
|
||||
COMPOSE_INTERACTIVE_NO_CLI = 1
|
||||
BUILDX_NAME = "${COMPOSE_PROJECT_NAME}"
|
||||
}
|
||||
stages {
|
||||
stage('Environment') {
|
||||
@ -26,7 +30,7 @@ pipeline {
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
|
||||
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -39,7 +43,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
// Defaults to the Branch name, which is applies to all branches AND pr's
|
||||
env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
|
||||
buildxPushTags = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -54,105 +58,96 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Frontend') {
|
||||
steps {
|
||||
sh './scripts/frontend-build'
|
||||
}
|
||||
}
|
||||
stage('Backend') {
|
||||
steps {
|
||||
echo 'Checking Syntax ...'
|
||||
// See: https://github.com/yarnpkg/yarn/issues/3254
|
||||
sh '''docker run --rm \\
|
||||
-v "$(pwd)/backend:/app" \\
|
||||
-v "$(pwd)/global:/app/global" \\
|
||||
-w /app \\
|
||||
node:latest \\
|
||||
sh -c "yarn install && yarn eslint . && rm -rf node_modules"
|
||||
'''
|
||||
|
||||
echo 'Docker Build ...'
|
||||
sh '''docker build --pull --no-cache --squash --compress \\
|
||||
-t "${IMAGE}:ci-${BUILD_NUMBER}" \\
|
||||
-f docker/Dockerfile \\
|
||||
--build-arg TARGETPLATFORM=linux/amd64 \\
|
||||
--build-arg BUILDPLATFORM=linux/amd64 \\
|
||||
--build-arg BUILD_VERSION="${BUILD_VERSION}" \\
|
||||
--build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
|
||||
--build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
|
||||
.
|
||||
'''
|
||||
}
|
||||
}
|
||||
stage('Integration Tests Sqlite') {
|
||||
steps {
|
||||
// Bring up a stack
|
||||
sh 'docker-compose up -d fullstack-sqlite'
|
||||
sh './scripts/wait-healthy $(docker-compose ps -q fullstack-sqlite) 120'
|
||||
|
||||
// Run tests
|
||||
sh 'rm -rf test/results'
|
||||
sh 'docker-compose up cypress-sqlite'
|
||||
// Get results
|
||||
sh 'docker cp -L "$(docker-compose ps -q cypress-sqlite):/test/results" test/'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
// Dumps to analyze later
|
||||
sh 'mkdir -p debug'
|
||||
sh 'docker-compose logs fullstack-sqlite | gzip > debug/docker_fullstack_sqlite.log.gz'
|
||||
sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
|
||||
// Cypress videos and screenshot artifacts
|
||||
dir(path: 'test/results') {
|
||||
archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
|
||||
stage('Builds') {
|
||||
parallel {
|
||||
stage('Project') {
|
||||
steps {
|
||||
script {
|
||||
// Frontend and Backend
|
||||
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
|
||||
set -e
|
||||
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
|
||||
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
|
||||
''')
|
||||
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
|
||||
if (shStatusCode != 0) {
|
||||
error "${shOutput}"
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
|
||||
}
|
||||
failure {
|
||||
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Docs') {
|
||||
steps {
|
||||
dir(path: 'docs') {
|
||||
sh 'yarn install'
|
||||
sh 'yarn build'
|
||||
}
|
||||
}
|
||||
junit 'test/results/junit/*'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Integration Tests Mysql') {
|
||||
steps {
|
||||
// Bring up a stack
|
||||
sh 'docker-compose up -d fullstack-mysql'
|
||||
sh './scripts/wait-healthy $(docker-compose ps -q fullstack-mysql) 120'
|
||||
|
||||
// Run tests
|
||||
sh 'rm -rf test/results'
|
||||
sh 'docker-compose up cypress-mysql'
|
||||
// Get results
|
||||
sh 'docker cp -L "$(docker-compose ps -q cypress-mysql):/test/results" test/'
|
||||
stage('Test Sqlite') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
|
||||
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
// Dumps to analyze later
|
||||
sh 'mkdir -p debug'
|
||||
sh 'docker-compose logs fullstack-mysql | gzip > debug/docker_fullstack_mysql.log.gz'
|
||||
sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
|
||||
// Cypress videos and screenshot artifacts
|
||||
dir(path: 'test/results') {
|
||||
archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
|
||||
}
|
||||
junit 'test/results/junit/*'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Docs') {
|
||||
when {
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
}
|
||||
}
|
||||
steps {
|
||||
dir(path: 'docs') {
|
||||
sh 'yarn install'
|
||||
sh 'yarn build'
|
||||
sh 'rm -rf ./test/results/junit/*'
|
||||
sh './scripts/ci/fulltest-cypress'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
// Dumps to analyze later
|
||||
sh 'mkdir -p debug/sqlite'
|
||||
sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
|
||||
junit 'test/results/junit/*'
|
||||
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
|
||||
}
|
||||
|
||||
dir(path: 'docs/.vuepress/dist') {
|
||||
sh 'tar -czf ../../docs.tgz *'
|
||||
}
|
||||
}
|
||||
stage('Test Mysql') {
|
||||
environment {
|
||||
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
|
||||
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
|
||||
}
|
||||
when {
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ./test/results/junit/*'
|
||||
sh './scripts/ci/fulltest-cypress'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
// Dumps to analyze later
|
||||
sh 'mkdir -p debug/mysql'
|
||||
sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
|
||||
sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
|
||||
junit 'test/results/junit/*'
|
||||
sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
|
||||
}
|
||||
|
||||
archiveArtifacts(artifacts: 'docs/docs.tgz', allowEmptyArchive: false)
|
||||
}
|
||||
}
|
||||
stage('MultiArch Build') {
|
||||
@ -163,78 +158,60 @@ pipeline {
|
||||
}
|
||||
steps {
|
||||
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
|
||||
// Docker Login
|
||||
sh "docker login -u '${duser}' -p '${dpass}'"
|
||||
// Buildx with push from cache
|
||||
sh "./scripts/buildx --push ${BUILDX_PUSH_TAGS}"
|
||||
sh 'docker login -u "${duser}" -p "${dpass}"'
|
||||
sh "./scripts/buildx --push ${buildxPushTags}"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Docs Deploy') {
|
||||
when {
|
||||
allOf {
|
||||
branch 'master'
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
stage('Docs / Comment') {
|
||||
parallel {
|
||||
stage('Docs Job') {
|
||||
when {
|
||||
allOf {
|
||||
branch pattern: "^(develop|master)\$", comparator: "REGEXP"
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'npm-s3-docs', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
|
||||
sh """docker run --rm \\
|
||||
--name \${COMPOSE_PROJECT_NAME}-docs-upload \\
|
||||
-e S3_BUCKET=jc21-npm-site \\
|
||||
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
|
||||
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
|
||||
-v \$(pwd):/app \\
|
||||
-w /app \\
|
||||
jc21/ci-tools \\
|
||||
scripts/docs-upload /app/docs/.vuepress/dist/
|
||||
"""
|
||||
|
||||
sh """docker run --rm \\
|
||||
--name \${COMPOSE_PROJECT_NAME}-docs-invalidate \\
|
||||
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
|
||||
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
|
||||
jc21/ci-tools \\
|
||||
aws cloudfront create-invalidation --distribution-id EN1G6DEWZUTDT --paths '/*'
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('PR Comment') {
|
||||
when {
|
||||
allOf {
|
||||
changeRequest()
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
stage('PR Comment') {
|
||||
when {
|
||||
allOf {
|
||||
changeRequest()
|
||||
not {
|
||||
equals expected: 'UNSTABLE', actual: currentBuild.result
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
npmGithubPrComment("Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.", true)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def comment = pullRequest.comment("This is an automated message from CI:\n\nDocker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
sh 'docker-compose down --rmi all --remove-orphans --volumes -t 30'
|
||||
sh 'echo Reverting ownership'
|
||||
sh 'docker run --rm -v $(pwd):/data jc21/ci-tools chown -R $(id -u):$(id -g) /data'
|
||||
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
|
||||
}
|
||||
success {
|
||||
juxtapose event: 'success'
|
||||
sh 'figlet "SUCCESS"'
|
||||
}
|
||||
failure {
|
||||
archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
|
||||
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
|
||||
juxtapose event: 'failure'
|
||||
sh 'figlet "FAILURE"'
|
||||
}
|
||||
unstable {
|
||||
archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
|
||||
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
|
||||
juxtapose event: 'unstable'
|
||||
sh 'figlet "UNSTABLE"'
|
||||
}
|
||||
|
427
README.md
427
README.md
@ -1,22 +1,13 @@
|
||||
<p align="center">
|
||||
<img src="https://nginxproxymanager.com/github.png">
|
||||
<br><br>
|
||||
<img src="https://img.shields.io/badge/version-2.9.8-green.svg?style=for-the-badge">
|
||||
<img src="https://img.shields.io/badge/version-2.11.3-green.svg?style=for-the-badge">
|
||||
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
|
||||
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
|
||||
</a>
|
||||
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
|
||||
<img src="https://img.shields.io/docker/pulls/jc21/nginx-proxy-manager.svg?style=for-the-badge">
|
||||
</a>
|
||||
<a href="https://ci.nginxproxymanager.com/blue/organizations/jenkins/nginx-proxy-manager/branches/">
|
||||
<img src="https://img.shields.io/jenkins/build?jobUrl=https%3A%2F%2Fci.nginxproxymanager.com%2Fjob%2Fnginx-proxy-manager%2Fjob%2Fmaster&style=for-the-badge">
|
||||
</a>
|
||||
<a href="https://gitter.im/nginx-proxy-manager/community">
|
||||
<img alt="Gitter" src="https://img.shields.io/gitter/room/nginx-proxy-manager/community?style=for-the-badge">
|
||||
</a>
|
||||
<a href="https://reddit.com/r/nginxproxymanager">
|
||||
<img alt="Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/nginxproxymanager?label=Reddit%20Community&style=for-the-badge">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
This project comes as a pre-built docker image that enables you to easily forward to your websites
|
||||
@ -28,7 +19,7 @@ running at home or otherwise, including free SSL, without having to know too muc
|
||||
|
||||
## Project Goal
|
||||
|
||||
I created this project to fill a personal need to provide users with a easy way to accomplish reverse
|
||||
I created this project to fill a personal need to provide users with an easy way to accomplish reverse
|
||||
proxying hosts with SSL termination and it had to be so easy that a monkey could do it. This goal hasn't changed.
|
||||
While there might be advanced options they are optional and the project should be as simple as possible
|
||||
so that the barrier for entry here is low.
|
||||
@ -65,40 +56,29 @@ I won't go in to too much detail here but here are the basics for someone new to
|
||||
2. Create a docker-compose.yml file similar to this:
|
||||
|
||||
```yml
|
||||
version: '3'
|
||||
services:
|
||||
app:
|
||||
image: 'jc21/nginx-proxy-manager:latest'
|
||||
image: 'docker.io/jc21/nginx-proxy-manager:latest'
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- '80:80'
|
||||
- '81:81'
|
||||
- '443:443'
|
||||
environment:
|
||||
DB_MYSQL_HOST: "db"
|
||||
DB_MYSQL_PORT: 3306
|
||||
DB_MYSQL_USER: "npm"
|
||||
DB_MYSQL_PASSWORD: "npm"
|
||||
DB_MYSQL_NAME: "npm"
|
||||
volumes:
|
||||
- ./data:/data
|
||||
- ./letsencrypt:/etc/letsencrypt
|
||||
db:
|
||||
image: 'jc21/mariadb-aria:latest'
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: 'npm'
|
||||
MYSQL_DATABASE: 'npm'
|
||||
MYSQL_USER: 'npm'
|
||||
MYSQL_PASSWORD: 'npm'
|
||||
volumes:
|
||||
- ./data/mysql:/var/lib/mysql
|
||||
```
|
||||
|
||||
3. Bring up your stack
|
||||
This is the bare minimum configuration required. See the [documentation](https://nginxproxymanager.com/setup/) for more.
|
||||
|
||||
3. Bring up your stack by running
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
|
||||
# If using docker-compose-plugin
|
||||
docker compose up -d
|
||||
|
||||
```
|
||||
|
||||
4. Log in to the Admin UI
|
||||
@ -117,373 +97,24 @@ Password: changeme
|
||||
Immediately after logging in with this default user you will be asked to modify your details and change your password.
|
||||
|
||||
|
||||
## Contributors
|
||||
## Contributing
|
||||
|
||||
Special thanks to the following contributors:
|
||||
All are welcome to create pull requests for this project, against the `develop` branch. Official releases are created from the `master` branch.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Subv">
|
||||
<img src="https://avatars1.githubusercontent.com/u/357072?s=460&u=d8adcdc91d749ae53e177973ed9b6bb6c4c894a3&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Sebastian Valle</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Indemnity83">
|
||||
<img src="https://avatars3.githubusercontent.com/u/35218?s=460&u=7082004ff35138157c868d7d9c683ccebfce5968&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Kyle Klaus</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/theraw">
|
||||
<img src="https://avatars1.githubusercontent.com/u/32969774?s=460&u=6b359971e15685fb0359e6a8c065a399b40dc228&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>ƬHE ЯAW</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/spalger">
|
||||
<img src="https://avatars2.githubusercontent.com/u/1329312?s=400&u=565223e38f1c052afb4c5dcca3fcf1c63ba17ae7&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Spencer</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Xantios">
|
||||
<img src="https://avatars3.githubusercontent.com/u/1507836?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Xantios Krugor</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/dpanesso">
|
||||
<img src="https://avatars2.githubusercontent.com/u/2687121?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>David Panesso</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/IronTooch">
|
||||
<img src="https://avatars3.githubusercontent.com/u/27360514?s=460&u=69bf854a6647c55725f62ecb8d39249c6c0b2602&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>IronTooch</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/damianog">
|
||||
<img src="https://avatars1.githubusercontent.com/u/2786682?s=460&u=76c6136fae797abb76b951cd8a246dcaecaf21af&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Damiano</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/tfmm">
|
||||
<img src="https://avatars3.githubusercontent.com/u/6880538?s=460&u=ce0160821cc4aa802df8395200f2d4956a5bc541&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Russ</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/margaale">
|
||||
<img src="https://avatars3.githubusercontent.com/u/20794934?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Marcelo Castagna</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Steven-Harris">
|
||||
<img src="https://avatars2.githubusercontent.com/u/7720242?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Steven Harris</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/jlesage">
|
||||
<img src="https://avatars0.githubusercontent.com/u/1791123?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Jocelyn Le Sage</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/cmer">
|
||||
<img src="https://avatars0.githubusercontent.com/u/412?s=460&u=67dd8b2e3661bfd6f68ec1eaa5b9821bd8a321cd&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Carl Mercier</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/the1ts">
|
||||
<img src="https://avatars1.githubusercontent.com/u/84956?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Paul Mansfield</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/OhHeyAlan">
|
||||
<img src="https://avatars0.githubusercontent.com/u/11955126?s=460&u=fbaa5a1a4f73ef8960132c703349bfd037fe2630&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>OhHeyAlan</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/dogmatic69">
|
||||
<img src="https://avatars2.githubusercontent.com/u/94674?s=460&u=ca7647de53145c6283b6373ade5dc94ba99347db&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Carl Sutton</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/tg44">
|
||||
<img src="https://avatars0.githubusercontent.com/u/31839?s=460&u=ad32f4cadfef5e5fb09cdfa4b7b7b36a99ba6811&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Gergő Törcsvári</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/vrenjith">
|
||||
<img src="https://avatars3.githubusercontent.com/u/2093241?s=460&u=96ce93a9bebabdd0a60a2dc96cd093a41d5edaba&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>vrenjith</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/duhruh">
|
||||
<img src="https://avatars2.githubusercontent.com/u/1133969?s=460&u=c0691e6131ec6d516416c1c6fcedb5034f877bbe&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>David Rivera</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/jipjan">
|
||||
<img src="https://avatars2.githubusercontent.com/u/1384618?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Jaap-Jan de Wit</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/jmwebslave">
|
||||
<img src="https://avatars2.githubusercontent.com/u/6118262?s=460&u=7db409c47135b1e141c366bbb03ed9fae6ac2638&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>James Morgan</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/chaptergy">
|
||||
<img src="https://avatars2.githubusercontent.com/u/26956711?s=460&u=7d9adebabb6b4e7af7cb05d98d751087a372304b&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>chaptergy</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Philip-Mooney">
|
||||
<img src="https://avatars0.githubusercontent.com/u/48624631?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Philip Mooney</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/WaterCalm">
|
||||
<img src="https://avatars1.githubusercontent.com/u/23502129?s=400&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>WaterCalm</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/lebrou34">
|
||||
<img src="https://avatars1.githubusercontent.com/u/16373103?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>lebrou34</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/lightglitch">
|
||||
<img src="https://avatars0.githubusercontent.com/u/196953?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Mário Franco</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/klutchell">
|
||||
<img src="https://avatars3.githubusercontent.com/u/20458272?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Kyle Harding</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/ahgraber">
|
||||
<img src="https://avatars.githubusercontent.com/u/24922003?s=460&u=8376c9f00af9b6057ba4d2fb03b4f1b20a75277f&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Alex Graber</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/MooBaloo">
|
||||
<img src="https://avatars.githubusercontent.com/u/9493496?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>MooBaloo</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Shuro">
|
||||
<img src="https://avatars.githubusercontent.com/u/944030?s=460&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Shuro</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/lorisbergeron">
|
||||
<img src="https://avatars.githubusercontent.com/u/51918567?s=460&u=778e4ff284b7d7304450f98421c99f79298371fb&v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Loris Bergeron</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/hepelayo">
|
||||
<img src="https://avatars.githubusercontent.com/u/8243119?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>hepelayo</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/jonasled">
|
||||
<img src="https://avatars.githubusercontent.com/u/46790650?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Jonas Leder</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/stegmannb">
|
||||
<img src="https://avatars.githubusercontent.com/u/12850482?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Bastian Stegmann</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Stealthii">
|
||||
<img src="https://avatars.githubusercontent.com/u/998920?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Stealthii</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/thegamingninja">
|
||||
<img src="https://avatars.githubusercontent.com/u/8020534?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>THEGamingninja</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/italobb">
|
||||
<img src="https://avatars.githubusercontent.com/u/1801687?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Italo Borssatto</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/GurjinderSingh">
|
||||
<img src="https://avatars.githubusercontent.com/u/3470709?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Gurjinder Singh</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/phantomski77">
|
||||
<img src="https://avatars.githubusercontent.com/u/69464125?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>David Dosoudil</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/ijaron">
|
||||
<img src="https://avatars.githubusercontent.com/u/5156472?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>ijaron</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/nielscil">
|
||||
<img src="https://avatars.githubusercontent.com/u/9073152?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Niels Bouma</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/ogarai">
|
||||
<img src="https://avatars.githubusercontent.com/u/2949572?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Orko Garai</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/baruffaldi">
|
||||
<img src="https://avatars.githubusercontent.com/u/36949?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Filippo Baruffaldi</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/bikram990">
|
||||
<img src="https://avatars.githubusercontent.com/u/6782131?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Bikramjeet Singh</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/razvanstoica89">
|
||||
<img src="https://avatars.githubusercontent.com/u/28236583?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Razvan Stoica</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/psharma04">
|
||||
<img src="https://avatars.githubusercontent.com/u/22587474?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>RBXII3</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/demize">
|
||||
<img src="https://avatars.githubusercontent.com/u/264914?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>demize</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/PUP-Loki">
|
||||
<img src="https://avatars.githubusercontent.com/u/75944209?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>PUP-Loki</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/DSorlov">
|
||||
<img src="https://avatars.githubusercontent.com/u/8133650?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Daniel Sörlöv</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Theyooo">
|
||||
<img src="https://avatars.githubusercontent.com/u/58510131?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Theyooo</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/mrdink">
|
||||
<img src="https://avatars.githubusercontent.com/u/514751?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Justin Peacock</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/ChrisTracy">
|
||||
<img src="https://avatars.githubusercontent.com/u/58871574?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Chris Tracy</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/Fuechslein">
|
||||
<img src="https://avatars.githubusercontent.com/u/15112818?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Fuechslein</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/nightah">
|
||||
<img src="https://avatars.githubusercontent.com/u/3339418?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Amir Zarrinkafsh</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/gabbe">
|
||||
<img src="https://avatars.githubusercontent.com/u/156397?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>gabbe</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/bmbvenom">
|
||||
<img src="https://avatars.githubusercontent.com/u/20530371?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>bmbvenom</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/FMeinicke">
|
||||
<img src="https://avatars.githubusercontent.com/u/42121639?v=4" width="80" alt=""/>
|
||||
<br /><sub><b>Florian Meinicke</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<!-- markdownlint-enable -->
|
||||
<!-- prettier-ignore-end -->
|
||||
CI is used in this project. All PR's must pass before being considered. After passing,
|
||||
docker builds for PR's are available on dockerhub for manual verifications.
|
||||
|
||||
Documentation within the `develop` branch is available for preview at
|
||||
[https://develop.nginxproxymanager.com](https://develop.nginxproxymanager.com)
|
||||
|
||||
|
||||
### Contributors
|
||||
|
||||
Special thanks to [all of our contributors](https://github.com/NginxProxyManager/nginx-proxy-manager/graphs/contributors).
|
||||
|
||||
|
||||
## Getting Support
|
||||
|
||||
1. [Found a bug?](https://github.com/NginxProxyManager/nginx-proxy-manager/issues)
|
||||
2. [Discussions](https://github.com/NginxProxyManager/nginx-proxy-manager/discussions)
|
||||
3. [Reddit](https://reddit.com/r/nginxproxymanager)
|
||||
|
@ -2,6 +2,7 @@ const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const fileUpload = require('express-fileupload');
|
||||
const compression = require('compression');
|
||||
const config = require('./lib/config');
|
||||
const log = require('./logger').express;
|
||||
|
||||
/**
|
||||
@ -24,7 +25,7 @@ app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
|
||||
app.enable('strict routing');
|
||||
|
||||
// pretty print JSON when not live
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
if (config.debug()) {
|
||||
app.set('json spaces', 2);
|
||||
}
|
||||
|
||||
@ -40,13 +41,12 @@ app.use(function (req, res, next) {
|
||||
}
|
||||
|
||||
res.set({
|
||||
'Strict-Transport-Security': 'includeSubDomains; max-age=631138519; preload',
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'X-Frame-Options': x_frame_options,
|
||||
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
|
||||
Pragma: 'no-cache',
|
||||
Expires: 0
|
||||
'X-XSS-Protection': '1; mode=block',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'X-Frame-Options': x_frame_options,
|
||||
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
|
||||
Pragma: 'no-cache',
|
||||
Expires: 0
|
||||
});
|
||||
next();
|
||||
});
|
||||
@ -66,7 +66,7 @@ app.use(function (err, req, res, next) {
|
||||
}
|
||||
};
|
||||
|
||||
if (process.env.NODE_ENV === 'development' || (req.baseUrl + req.path).includes('nginx/certificates')) {
|
||||
if (config.debug() || (req.baseUrl + req.path).includes('nginx/certificates')) {
|
||||
payload.debug = {
|
||||
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
|
||||
previous: err.previous
|
||||
@ -75,7 +75,7 @@ app.use(function (err, req, res, next) {
|
||||
|
||||
// Not every error is worth logging - but this is good for now until it gets annoying.
|
||||
if (typeof err.stack !== 'undefined' && err.stack) {
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
if (config.debug()) {
|
||||
log.debug(err.stack);
|
||||
} else if (typeof err.public == 'undefined' || !err.public) {
|
||||
log.warn(err.message);
|
||||
|
@ -1,33 +1,27 @@
|
||||
const config = require('config');
|
||||
const config = require('./lib/config');
|
||||
|
||||
if (!config.has('database')) {
|
||||
throw new Error('Database config does not exist! Please read the instructions: https://github.com/jc21/nginx-proxy-manager/blob/master/doc/INSTALL.md');
|
||||
throw new Error('Database config does not exist! Please read the instructions: https://nginxproxymanager.com/setup/');
|
||||
}
|
||||
|
||||
function generateDbConfig() {
|
||||
if (config.database.engine === 'knex-native') {
|
||||
return config.database.knex;
|
||||
} else
|
||||
return {
|
||||
client: config.database.engine,
|
||||
connection: {
|
||||
host: config.database.host,
|
||||
user: config.database.user,
|
||||
password: config.database.password,
|
||||
database: config.database.name,
|
||||
port: config.database.port
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'migrations'
|
||||
}
|
||||
};
|
||||
const cfg = config.get('database');
|
||||
if (cfg.engine === 'knex-native') {
|
||||
return cfg.knex;
|
||||
}
|
||||
return {
|
||||
client: cfg.engine,
|
||||
connection: {
|
||||
host: cfg.host,
|
||||
user: cfg.user,
|
||||
password: cfg.password,
|
||||
database: cfg.name,
|
||||
port: cfg.port
|
||||
},
|
||||
migrations: {
|
||||
tableName: 'migrations'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
let data = generateDbConfig();
|
||||
|
||||
if (typeof config.database.version !== 'undefined') {
|
||||
data.version = config.database.version;
|
||||
}
|
||||
|
||||
module.exports = require('knex')(data);
|
||||
module.exports = require('knex')(generateDbConfig());
|
||||
|
@ -40,6 +40,210 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/nginx/proxy-hosts": {
|
||||
"get": {
|
||||
"operationId": "getProxyHosts",
|
||||
"summary": "Get all proxy hosts",
|
||||
"tags": ["Proxy Hosts"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"in": "query",
|
||||
"name": "expand",
|
||||
"description": "Expansions",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"enum": ["access_list", "owner", "certificate"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "200 response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"examples": {
|
||||
"default": {
|
||||
"value": [
|
||||
{
|
||||
"id": 1,
|
||||
"created_on": "2023-03-30T01:12:23.000Z",
|
||||
"modified_on": "2023-03-30T02:15:40.000Z",
|
||||
"owner_user_id": 1,
|
||||
"domain_names": ["aasdasdad"],
|
||||
"forward_host": "asdasd",
|
||||
"forward_port": 80,
|
||||
"access_list_id": 0,
|
||||
"certificate_id": 0,
|
||||
"ssl_forced": 0,
|
||||
"caching_enabled": 0,
|
||||
"block_exploits": 0,
|
||||
"advanced_config": "sdfsdfsdf",
|
||||
"meta": {
|
||||
"letsencrypt_agree": false,
|
||||
"dns_challenge": false,
|
||||
"nginx_online": false,
|
||||
"nginx_err": "Command failed: /usr/sbin/nginx -t -g \"error_log off;\"\nnginx: [emerg] unknown directive \"sdfsdfsdf\" in /data/nginx/proxy_host/1.conf:37\nnginx: configuration file /etc/nginx/nginx.conf test failed\n"
|
||||
},
|
||||
"allow_websocket_upgrade": 0,
|
||||
"http2_support": 0,
|
||||
"forward_scheme": "http",
|
||||
"enabled": 1,
|
||||
"locations": [],
|
||||
"hsts_enabled": 0,
|
||||
"hsts_subdomains": 0,
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"created_on": "2023-03-30T01:11:50.000Z",
|
||||
"modified_on": "2023-03-30T01:11:50.000Z",
|
||||
"is_deleted": 0,
|
||||
"is_disabled": 0,
|
||||
"email": "admin@example.com",
|
||||
"name": "Administrator",
|
||||
"nickname": "Admin",
|
||||
"avatar": "",
|
||||
"roles": ["admin"]
|
||||
},
|
||||
"access_list": null,
|
||||
"certificate": null
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"created_on": "2023-03-30T02:11:49.000Z",
|
||||
"modified_on": "2023-03-30T02:11:49.000Z",
|
||||
"owner_user_id": 1,
|
||||
"domain_names": ["test.example.com"],
|
||||
"forward_host": "1.1.1.1",
|
||||
"forward_port": 80,
|
||||
"access_list_id": 0,
|
||||
"certificate_id": 0,
|
||||
"ssl_forced": 0,
|
||||
"caching_enabled": 0,
|
||||
"block_exploits": 0,
|
||||
"advanced_config": "",
|
||||
"meta": {
|
||||
"letsencrypt_agree": false,
|
||||
"dns_challenge": false,
|
||||
"nginx_online": true,
|
||||
"nginx_err": null
|
||||
},
|
||||
"allow_websocket_upgrade": 0,
|
||||
"http2_support": 0,
|
||||
"forward_scheme": "http",
|
||||
"enabled": 1,
|
||||
"locations": [],
|
||||
"hsts_enabled": 0,
|
||||
"hsts_subdomains": 0,
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"created_on": "2023-03-30T01:11:50.000Z",
|
||||
"modified_on": "2023-03-30T01:11:50.000Z",
|
||||
"is_deleted": 0,
|
||||
"is_disabled": 0,
|
||||
"email": "admin@example.com",
|
||||
"name": "Administrator",
|
||||
"nickname": "Admin",
|
||||
"avatar": "",
|
||||
"roles": ["admin"]
|
||||
},
|
||||
"access_list": null,
|
||||
"certificate": null
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ProxyHostsList"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"operationId": "createProxyHost",
|
||||
"summary": "Create a Proxy Host",
|
||||
"tags": ["Proxy Hosts"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"in": "body",
|
||||
"name": "proxyhost",
|
||||
"description": "Proxy Host Payload",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ProxyHostObject"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "201 response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"examples": {
|
||||
"default": {
|
||||
"value": {
|
||||
"id": 3,
|
||||
"created_on": "2023-03-30T02:31:27.000Z",
|
||||
"modified_on": "2023-03-30T02:31:27.000Z",
|
||||
"owner_user_id": 1,
|
||||
"domain_names": ["test2.example.com"],
|
||||
"forward_host": "1.1.1.1",
|
||||
"forward_port": 80,
|
||||
"access_list_id": 0,
|
||||
"certificate_id": 0,
|
||||
"ssl_forced": 0,
|
||||
"caching_enabled": 0,
|
||||
"block_exploits": 0,
|
||||
"advanced_config": "",
|
||||
"meta": {
|
||||
"letsencrypt_agree": false,
|
||||
"dns_challenge": false
|
||||
},
|
||||
"allow_websocket_upgrade": 0,
|
||||
"http2_support": 0,
|
||||
"forward_scheme": "http",
|
||||
"enabled": 1,
|
||||
"locations": [],
|
||||
"hsts_enabled": 0,
|
||||
"hsts_subdomains": 0,
|
||||
"certificate": null,
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"created_on": "2023-03-30T01:11:50.000Z",
|
||||
"modified_on": "2023-03-30T01:11:50.000Z",
|
||||
"is_deleted": 0,
|
||||
"is_disabled": 0,
|
||||
"email": "admin@example.com",
|
||||
"name": "Administrator",
|
||||
"nickname": "Admin",
|
||||
"avatar": "",
|
||||
"roles": ["admin"]
|
||||
},
|
||||
"access_list": null,
|
||||
"use_default_location": true,
|
||||
"ipv6": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ProxyHostObject"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/schema": {
|
||||
"get": {
|
||||
"operationId": "schema",
|
||||
@ -55,14 +259,10 @@
|
||||
"get": {
|
||||
"operationId": "refreshToken",
|
||||
"summary": "Refresh your access token",
|
||||
"tags": [
|
||||
"Tokens"
|
||||
],
|
||||
"tags": ["Tokens"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"tokens"
|
||||
]
|
||||
"BearerAuth": ["tokens"]
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@ -104,19 +304,14 @@
|
||||
"scope": {
|
||||
"minLength": 1,
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"user"
|
||||
]
|
||||
"enum": ["user"]
|
||||
},
|
||||
"secret": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"identity",
|
||||
"secret"
|
||||
],
|
||||
"required": ["identity", "secret"],
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
@ -144,23 +339,17 @@
|
||||
}
|
||||
},
|
||||
"summary": "Request a new access token from credentials",
|
||||
"tags": [
|
||||
"Tokens"
|
||||
]
|
||||
"tags": ["Tokens"]
|
||||
}
|
||||
},
|
||||
"/settings": {
|
||||
"get": {
|
||||
"operationId": "getSettings",
|
||||
"summary": "Get all settings",
|
||||
"tags": [
|
||||
"Settings"
|
||||
],
|
||||
"tags": ["Settings"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"settings"
|
||||
]
|
||||
"BearerAuth": ["settings"]
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@ -194,14 +383,10 @@
|
||||
"get": {
|
||||
"operationId": "getSetting",
|
||||
"summary": "Get a setting",
|
||||
"tags": [
|
||||
"Settings"
|
||||
],
|
||||
"tags": ["Settings"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"settings"
|
||||
]
|
||||
"BearerAuth": ["settings"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -244,14 +429,10 @@
|
||||
"put": {
|
||||
"operationId": "updateSetting",
|
||||
"summary": "Update a setting",
|
||||
"tags": [
|
||||
"Settings"
|
||||
],
|
||||
"tags": ["Settings"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"settings"
|
||||
]
|
||||
"BearerAuth": ["settings"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -305,14 +486,10 @@
|
||||
"get": {
|
||||
"operationId": "getUsers",
|
||||
"summary": "Get all users",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -322,9 +499,7 @@
|
||||
"description": "Expansions",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"permissions"
|
||||
]
|
||||
"enum": ["permissions"]
|
||||
}
|
||||
}
|
||||
],
|
||||
@ -345,9 +520,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
]
|
||||
"roles": ["admin"]
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -362,9 +535,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
],
|
||||
"roles": ["admin"],
|
||||
"permissions": {
|
||||
"visibility": "all",
|
||||
"proxy_hosts": "manage",
|
||||
@ -389,14 +560,10 @@
|
||||
"post": {
|
||||
"operationId": "createUser",
|
||||
"summary": "Create a User",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -426,9 +593,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
],
|
||||
"roles": ["admin"],
|
||||
"permissions": {
|
||||
"visibility": "all",
|
||||
"proxy_hosts": "manage",
|
||||
@ -454,14 +619,10 @@
|
||||
"get": {
|
||||
"operationId": "getUser",
|
||||
"summary": "Get a user",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -501,9 +662,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
]
|
||||
"roles": ["admin"]
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -518,14 +677,10 @@
|
||||
"put": {
|
||||
"operationId": "updateUser",
|
||||
"summary": "Update a User",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -574,9 +729,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
]
|
||||
"roles": ["admin"]
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -591,14 +744,10 @@
|
||||
"delete": {
|
||||
"operationId": "deleteUser",
|
||||
"summary": "Delete a User",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -637,14 +786,10 @@
|
||||
"put": {
|
||||
"operationId": "updateUserAuth",
|
||||
"summary": "Update a User's Authentication",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -700,14 +845,10 @@
|
||||
"put": {
|
||||
"operationId": "updateUserPermissions",
|
||||
"summary": "Update a User's Permissions",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -755,14 +896,10 @@
|
||||
"put": {
|
||||
"operationId": "loginAsUser",
|
||||
"summary": "Login as this user",
|
||||
"tags": [
|
||||
"Users"
|
||||
],
|
||||
"tags": ["Users"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"users"
|
||||
]
|
||||
"BearerAuth": ["users"]
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
@ -797,9 +934,7 @@
|
||||
"name": "Jamie Curnow",
|
||||
"nickname": "James",
|
||||
"avatar": "//www.gravatar.com/avatar/3c8d73f45fd8763f827b964c76e6032a?default=mm",
|
||||
"roles": [
|
||||
"admin"
|
||||
]
|
||||
"roles": ["admin"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -807,11 +942,7 @@
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"description": "Login object",
|
||||
"required": [
|
||||
"expires",
|
||||
"token",
|
||||
"user"
|
||||
],
|
||||
"required": ["expires", "token", "user"],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"expires": {
|
||||
@ -840,14 +971,10 @@
|
||||
"get": {
|
||||
"operationId": "reportsHosts",
|
||||
"summary": "Report on Host Statistics",
|
||||
"tags": [
|
||||
"Reports"
|
||||
],
|
||||
"tags": ["Reports"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"reports"
|
||||
]
|
||||
"BearerAuth": ["reports"]
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@ -878,14 +1005,10 @@
|
||||
"get": {
|
||||
"operationId": "getAuditLog",
|
||||
"summary": "Get Audit Log",
|
||||
"tags": [
|
||||
"Audit Log"
|
||||
],
|
||||
"tags": ["Audit Log"],
|
||||
"security": [
|
||||
{
|
||||
"BearerAuth": [
|
||||
"audit-log"
|
||||
]
|
||||
"BearerAuth": ["audit-log"]
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@ -925,10 +1048,7 @@
|
||||
"type": "object",
|
||||
"description": "Health object",
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"status",
|
||||
"version"
|
||||
],
|
||||
"required": ["status", "version"],
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
@ -944,11 +1064,7 @@
|
||||
"revision": 0
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"major",
|
||||
"minor",
|
||||
"revision"
|
||||
],
|
||||
"required": ["major", "minor", "revision"],
|
||||
"properties": {
|
||||
"major": {
|
||||
"type": "integer",
|
||||
@ -969,10 +1085,7 @@
|
||||
"TokenObject": {
|
||||
"type": "object",
|
||||
"description": "Token object",
|
||||
"required": [
|
||||
"expires",
|
||||
"token"
|
||||
],
|
||||
"required": ["expires", "token"],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"expires": {
|
||||
@ -988,16 +1101,147 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"ProxyHostObject": {
|
||||
"type": "object",
|
||||
"description": "Proxy Host object",
|
||||
"required": [
|
||||
"id",
|
||||
"created_on",
|
||||
"modified_on",
|
||||
"owner_user_id",
|
||||
"domain_names",
|
||||
"forward_host",
|
||||
"forward_port",
|
||||
"access_list_id",
|
||||
"certificate_id",
|
||||
"ssl_forced",
|
||||
"caching_enabled",
|
||||
"block_exploits",
|
||||
"advanced_config",
|
||||
"meta",
|
||||
"allow_websocket_upgrade",
|
||||
"http2_support",
|
||||
"forward_scheme",
|
||||
"enabled",
|
||||
"locations",
|
||||
"hsts_enabled",
|
||||
"hsts_subdomains",
|
||||
"certificate",
|
||||
"use_default_location",
|
||||
"ipv6"
|
||||
],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "Proxy Host ID",
|
||||
"minimum": 1,
|
||||
"example": 1
|
||||
},
|
||||
"created_on": {
|
||||
"type": "string",
|
||||
"description": "Created Date",
|
||||
"example": "2020-01-30T09:36:08.000Z"
|
||||
},
|
||||
"modified_on": {
|
||||
"type": "string",
|
||||
"description": "Modified Date",
|
||||
"example": "2020-01-30T09:41:04.000Z"
|
||||
},
|
||||
"owner_user_id": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"example": 1
|
||||
},
|
||||
"domain_names": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
}
|
||||
},
|
||||
"forward_host": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"forward_port": {
|
||||
"type": "integer",
|
||||
"minimum": 1
|
||||
},
|
||||
"access_list_id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"certificate_id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"ssl_forced": {
|
||||
"type": "integer"
|
||||
},
|
||||
"caching_enabled": {
|
||||
"type": "integer"
|
||||
},
|
||||
"block_exploits": {
|
||||
"type": "integer"
|
||||
},
|
||||
"advanced_config": {
|
||||
"type": "string"
|
||||
},
|
||||
"meta": {
|
||||
"type": "object"
|
||||
},
|
||||
"allow_websocket_upgrade": {
|
||||
"type": "integer"
|
||||
},
|
||||
"http2_support": {
|
||||
"type": "integer"
|
||||
},
|
||||
"forward_scheme": {
|
||||
"type": "string"
|
||||
},
|
||||
"enabled": {
|
||||
"type": "integer"
|
||||
},
|
||||
"locations": {
|
||||
"type": "array"
|
||||
},
|
||||
"hsts_enabled": {
|
||||
"type": "integer"
|
||||
},
|
||||
"hsts_subdomains": {
|
||||
"type": "integer"
|
||||
},
|
||||
"certificate": {
|
||||
"type": "object",
|
||||
"nullable": true
|
||||
},
|
||||
"owner": {
|
||||
"type": "object",
|
||||
"nullable": true
|
||||
},
|
||||
"access_list": {
|
||||
"type": "object",
|
||||
"nullable": true
|
||||
},
|
||||
"use_default_location": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ipv6": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ProxyHostsList": {
|
||||
"type": "array",
|
||||
"description": "Proxyn Hosts list",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ProxyHostObject"
|
||||
}
|
||||
},
|
||||
"SettingObject": {
|
||||
"type": "object",
|
||||
"description": "Setting object",
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"value",
|
||||
"meta"
|
||||
],
|
||||
"required": ["id", "name", "description", "value", "meta"],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"id": {
|
||||
@ -1057,17 +1301,7 @@
|
||||
"UserObject": {
|
||||
"type": "object",
|
||||
"description": "User object",
|
||||
"required": [
|
||||
"id",
|
||||
"created_on",
|
||||
"modified_on",
|
||||
"is_disabled",
|
||||
"email",
|
||||
"name",
|
||||
"nickname",
|
||||
"avatar",
|
||||
"roles"
|
||||
],
|
||||
"required": ["id", "created_on", "modified_on", "is_disabled", "email", "name", "nickname", "avatar", "roles"],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"id": {
|
||||
@ -1117,9 +1351,7 @@
|
||||
},
|
||||
"roles": {
|
||||
"description": "Roles applied",
|
||||
"example": [
|
||||
"admin"
|
||||
],
|
||||
"example": ["admin"],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
@ -1137,10 +1369,7 @@
|
||||
"AuthObject": {
|
||||
"type": "object",
|
||||
"description": "Authentication Object",
|
||||
"required": [
|
||||
"type",
|
||||
"secret"
|
||||
],
|
||||
"required": ["type", "secret"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
@ -1167,64 +1396,37 @@
|
||||
"visibility": {
|
||||
"type": "string",
|
||||
"description": "Visibility Type",
|
||||
"enum": [
|
||||
"all",
|
||||
"user"
|
||||
]
|
||||
"enum": ["all", "user"]
|
||||
},
|
||||
"access_lists": {
|
||||
"type": "string",
|
||||
"description": "Access Lists Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
},
|
||||
"dead_hosts": {
|
||||
"type": "string",
|
||||
"description": "404 Hosts Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
},
|
||||
"proxy_hosts": {
|
||||
"type": "string",
|
||||
"description": "Proxy Hosts Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
},
|
||||
"redirection_hosts": {
|
||||
"type": "string",
|
||||
"description": "Redirection Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
},
|
||||
"streams": {
|
||||
"type": "string",
|
||||
"description": "Streams Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
},
|
||||
"certificates": {
|
||||
"type": "string",
|
||||
"description": "Certificates Permissions",
|
||||
"enum": [
|
||||
"hidden",
|
||||
"view",
|
||||
"manage"
|
||||
]
|
||||
"enum": ["hidden", "view", "manage"]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -3,9 +3,6 @@
|
||||
const logger = require('./logger').global;
|
||||
|
||||
async function appStart () {
|
||||
// Create config file db settings if environment variables have been set
|
||||
await createDbConfigFromEnvironment();
|
||||
|
||||
const migrate = require('./migrate');
|
||||
const setup = require('./setup');
|
||||
const app = require('./app');
|
||||
@ -42,89 +39,6 @@ async function appStart () {
|
||||
});
|
||||
}
|
||||
|
||||
async function createDbConfigFromEnvironment() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
|
||||
const envMysqlPort = process.env.DB_MYSQL_PORT || null;
|
||||
const envMysqlUser = process.env.DB_MYSQL_USER || null;
|
||||
const envMysqlName = process.env.DB_MYSQL_NAME || null;
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || null;
|
||||
|
||||
if ((envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) || envSqliteFile) {
|
||||
const fs = require('fs');
|
||||
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
|
||||
let configData = {};
|
||||
|
||||
try {
|
||||
configData = require(filename);
|
||||
} catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (configData.database && configData.database.engine && !configData.database.fromEnv) {
|
||||
logger.info('Manual db configuration already exists, skipping config creation from environment variables');
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
if (envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) {
|
||||
const newConfig = {
|
||||
fromEnv: true,
|
||||
engine: 'mysql',
|
||||
host: envMysqlHost,
|
||||
port: envMysqlPort,
|
||||
user: envMysqlUser,
|
||||
password: process.env.DB_MYSQL_PASSWORD,
|
||||
name: envMysqlName,
|
||||
};
|
||||
|
||||
if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
|
||||
// Config is unchanged, skip overwrite
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Generating MySQL db configuration from environment variables');
|
||||
configData.database = newConfig;
|
||||
|
||||
} else {
|
||||
const newConfig = {
|
||||
fromEnv: true,
|
||||
engine: 'knex-native',
|
||||
knex: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: envSqliteFile
|
||||
},
|
||||
useNullAsDefault: true
|
||||
}
|
||||
};
|
||||
if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
|
||||
// Config is unchanged, skip overwrite
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Generating Sqlite db configuration from environment variables');
|
||||
configData.database = newConfig;
|
||||
}
|
||||
|
||||
// Write config
|
||||
fs.writeFile(filename, JSON.stringify(configData, null, 2), (err) => {
|
||||
if (err) {
|
||||
logger.error('Could not write db config to config file: ' + filename);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info('Wrote db configuration to config file: ' + filename);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
appStart();
|
||||
} catch (err) {
|
||||
|
@ -3,13 +3,13 @@ const fs = require('fs');
|
||||
const batchflow = require('batchflow');
|
||||
const logger = require('../logger').access;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const accessListModel = require('../models/access_list');
|
||||
const accessListAuthModel = require('../models/access_list_auth');
|
||||
const accessListClientModel = require('../models/access_list_client');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
const utils = require('../lib/utils');
|
||||
|
||||
function omissions () {
|
||||
return ['is_deleted'];
|
||||
@ -27,13 +27,13 @@ const internalAccessList = {
|
||||
.then((/*access_data*/) => {
|
||||
return accessListModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch({
|
||||
name: data.name,
|
||||
satisfy_any: data.satisfy_any,
|
||||
pass_auth: data.pass_auth,
|
||||
owner_user_id: access.token.getUserId(1)
|
||||
});
|
||||
})
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
data.id = row.id;
|
||||
@ -204,7 +204,6 @@ const internalAccessList = {
|
||||
});
|
||||
}
|
||||
})
|
||||
.then(internalNginx.reload)
|
||||
.then(() => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
@ -218,7 +217,7 @@ const internalAccessList = {
|
||||
// re-fetch with expansions
|
||||
return internalAccessList.get(access, {
|
||||
id: data.id,
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
|
||||
expand: ['owner', 'items', 'clients', 'proxy_hosts.[certificate,access_list.[clients,items]]']
|
||||
}, true /* <- skip masking */);
|
||||
})
|
||||
.then((row) => {
|
||||
@ -227,7 +226,7 @@ const internalAccessList = {
|
||||
if (row.proxy_host_count) {
|
||||
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
|
||||
}
|
||||
})
|
||||
}).then(internalNginx.reload)
|
||||
.then(() => {
|
||||
return internalAccessList.maskItems(row);
|
||||
});
|
||||
@ -256,35 +255,31 @@ const internalAccessList = {
|
||||
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
|
||||
.where('access_list.is_deleted', 0)
|
||||
.andWhere('access_list.id', data.id)
|
||||
.allowEager('[owner,items,clients,proxy_hosts.[*, access_list.[clients,items]]]')
|
||||
.omit(['access_list.is_deleted'])
|
||||
.allowGraph('[owner,items,clients,proxy_hosts.[certificate,access_list.[clients,items]]]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
|
||||
row = internalAccessList.maskItems(row);
|
||||
}
|
||||
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
|
||||
row = internalAccessList.maskItems(row);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -381,8 +376,7 @@ const internalAccessList = {
|
||||
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
|
||||
.where('access_list.is_deleted', 0)
|
||||
.groupBy('access_list.id')
|
||||
.omit(['access_list.is_deleted'])
|
||||
.allowEager('[owner,items,clients]')
|
||||
.allowGraph('[owner,items,clients]')
|
||||
.orderBy('access_list.name', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -397,10 +391,10 @@ const internalAccessList = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (rows) {
|
||||
@ -507,7 +501,7 @@ const internalAccessList = {
|
||||
if (typeof item.password !== 'undefined' && item.password.length) {
|
||||
logger.info('Adding: ' + item.username);
|
||||
|
||||
utils.exec('/usr/bin/htpasswd -b "' + htpasswd_file + '" "' + item.username + '" "' + item.password + '"')
|
||||
utils.execFile('/usr/bin/htpasswd', ['-b', htpasswd_file, item.username, item.password])
|
||||
.then((/*result*/) => {
|
||||
next();
|
||||
})
|
||||
|
@ -19,7 +19,7 @@ const internalAuditLog = {
|
||||
.orderBy('created_on', 'DESC')
|
||||
.orderBy('id', 'DESC')
|
||||
.limit(100)
|
||||
.allowEager('[user]');
|
||||
.allowGraph('[user]');
|
||||
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
@ -29,7 +29,7 @@ const internalAuditLog = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
|
@ -1,20 +1,26 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const tempWrite = require('temp-write');
|
||||
const moment = require('moment');
|
||||
const logger = require('../logger').ssl;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const certificateModel = require('../models/certificate');
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalHost = require('./host');
|
||||
const letsencryptStaging = process.env.NODE_ENV !== 'production';
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const https = require('https');
|
||||
const tempWrite = require('temp-write');
|
||||
const moment = require('moment');
|
||||
const logger = require('../logger').ssl;
|
||||
const config = require('../lib/config');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const certificateModel = require('../models/certificate');
|
||||
const tokenModel = require('../models/token');
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalHost = require('./host');
|
||||
const certbot = require('../lib/certbot');
|
||||
const archiver = require('archiver');
|
||||
const path = require('path');
|
||||
const { isArray } = require('lodash');
|
||||
|
||||
const letsencryptStaging = config.useLetsencryptStaging();
|
||||
const letsencryptConfig = '/etc/letsencrypt.ini';
|
||||
const certbotCommand = 'certbot';
|
||||
const archiver = require('archiver');
|
||||
const path = require('path');
|
||||
|
||||
function omissions() {
|
||||
return ['is_deleted'];
|
||||
@ -22,10 +28,11 @@ function omissions() {
|
||||
|
||||
const internalCertificate = {
|
||||
|
||||
allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
|
||||
intervalTimeout: 1000 * 60 * 60, // 1 hour
|
||||
interval: null,
|
||||
intervalProcessing: false,
|
||||
allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
|
||||
intervalTimeout: 1000 * 60 * 60, // 1 hour
|
||||
interval: null,
|
||||
intervalProcessing: false,
|
||||
renewBeforeExpirationBy: [30, 'days'],
|
||||
|
||||
initTimer: () => {
|
||||
logger.info('Let\'s Encrypt Renewal Timer initialized');
|
||||
@ -40,60 +47,51 @@ const internalCertificate = {
|
||||
processExpiringHosts: () => {
|
||||
if (!internalCertificate.intervalProcessing) {
|
||||
internalCertificate.intervalProcessing = true;
|
||||
logger.info('Renewing SSL certs close to expiry...');
|
||||
logger.info('Renewing SSL certs expiring within ' + internalCertificate.renewBeforeExpirationBy[0] + ' ' + internalCertificate.renewBeforeExpirationBy[1] + ' ...');
|
||||
|
||||
const cmd = certbotCommand + ' renew --non-interactive --quiet ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--disable-hook-validation ' +
|
||||
(letsencryptStaging ? '--staging' : '');
|
||||
const expirationThreshold = moment().add(internalCertificate.renewBeforeExpirationBy[0], internalCertificate.renewBeforeExpirationBy[1]).format('YYYY-MM-DD HH:mm:ss');
|
||||
|
||||
return utils.exec(cmd)
|
||||
.then((result) => {
|
||||
if (result) {
|
||||
logger.info('Renew Result: ' + result);
|
||||
// Fetch all the letsencrypt certs from the db that will expire within the configured threshold
|
||||
certificateModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.andWhere('expires_on', '<', expirationThreshold)
|
||||
.then((certificates) => {
|
||||
if (!certificates || !certificates.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return internalNginx.reload()
|
||||
.then(() => {
|
||||
logger.info('Renew Complete');
|
||||
return result;
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Now go and fetch all the letsencrypt certs from the db and query the files and update expiry times
|
||||
return certificateModel
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.then((certificates) => {
|
||||
if (certificates && certificates.length) {
|
||||
let promises = [];
|
||||
|
||||
certificates.map(function (certificate) {
|
||||
promises.push(
|
||||
internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
|
||||
.then((cert_info) => {
|
||||
return certificateModel
|
||||
.query()
|
||||
.where('id', certificate.id)
|
||||
.andWhere('provider', 'letsencrypt')
|
||||
.patch({
|
||||
expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
// Don't want to stop the train here, just log the error
|
||||
logger.error(err.message);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
});
|
||||
/**
|
||||
* Renews must be run sequentially or we'll get an error 'Another
|
||||
* instance of Certbot is already running.'
|
||||
*/
|
||||
let sequence = Promise.resolve();
|
||||
|
||||
certificates.forEach(function (certificate) {
|
||||
sequence = sequence.then(() =>
|
||||
internalCertificate
|
||||
.renew(
|
||||
{
|
||||
can: () =>
|
||||
Promise.resolve({
|
||||
permission_visibility: 'all',
|
||||
}),
|
||||
token: new tokenModel(),
|
||||
},
|
||||
{ id: certificate.id },
|
||||
)
|
||||
.catch((err) => {
|
||||
// Don't want to stop the train here, just log the error
|
||||
logger.error(err.message);
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
return sequence;
|
||||
})
|
||||
.then(() => {
|
||||
logger.info('Completed SSL cert renew process');
|
||||
internalCertificate.intervalProcessing = false;
|
||||
})
|
||||
.catch((err) => {
|
||||
@ -114,13 +112,13 @@ const internalCertificate = {
|
||||
data.owner_user_id = access.token.getUserId(1);
|
||||
|
||||
if (data.provider === 'letsencrypt') {
|
||||
data.nice_name = data.domain_names.sort().join(', ');
|
||||
data.nice_name = data.domain_names.join(', ');
|
||||
}
|
||||
|
||||
return certificateModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((certificate) => {
|
||||
if (certificate.provider === 'letsencrypt') {
|
||||
@ -169,6 +167,7 @@ const internalCertificate = {
|
||||
// 3. Generate the LE config
|
||||
return internalNginx.generateLetsEncryptRequestConfig(certificate)
|
||||
.then(internalNginx.reload)
|
||||
.then(async() => await new Promise((r) => setTimeout(r, 5000)))
|
||||
.then(() => {
|
||||
// 4. Request cert
|
||||
return internalCertificate.requestLetsEncryptSsl(certificate);
|
||||
@ -266,8 +265,8 @@ const internalCertificate = {
|
||||
|
||||
return certificateModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.patchAndFetchById(row.id, data)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
|
||||
data.meta = internalCertificate.cleanMeta(data.meta);
|
||||
@ -285,7 +284,7 @@ const internalCertificate = {
|
||||
meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
return saved_row;
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -310,30 +309,28 @@ const internalCertificate = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[owner]')
|
||||
.allowGraph('[owner]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -463,8 +460,7 @@ const internalCertificate = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[owner]')
|
||||
.allowGraph('[owner]')
|
||||
.orderBy('nice_name', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -474,15 +470,15 @@ const internalCertificate = {
|
||||
// Query is used for searching
|
||||
if (typeof search_query === 'string') {
|
||||
query.where(function () {
|
||||
this.where('name', 'like', '%' + search_query + '%');
|
||||
this.where('nice_name', 'like', '%' + search_query + '%');
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
@ -659,7 +655,6 @@ const internalCertificate = {
|
||||
meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
|
||||
})
|
||||
.then((certificate) => {
|
||||
console.log('ROWMETA:', row.meta);
|
||||
certificate.meta = row.meta;
|
||||
return internalCertificate.writeCustomCert(certificate);
|
||||
});
|
||||
@ -832,8 +827,10 @@ const internalCertificate = {
|
||||
requestLetsEncryptSsl: (certificate) => {
|
||||
logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
const cmd = certbotCommand + ' certonly --non-interactive ' +
|
||||
const cmd = certbotCommand + ' certonly ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--work-dir "/tmp/letsencrypt-lib" ' +
|
||||
'--logs-dir "/tmp/letsencrypt-log" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--agree-tos ' +
|
||||
'--authenticator webroot ' +
|
||||
@ -853,41 +850,40 @@ const internalCertificate = {
|
||||
|
||||
/**
|
||||
* @param {Object} certificate the certificate row
|
||||
* @param {String} dns_provider the dns provider name (key used in `certbot-dns-plugins.js`)
|
||||
* @param {String} dns_provider the dns provider name (key used in `certbot-dns-plugins.json`)
|
||||
* @param {String | null} credentials the content of this providers credentials file
|
||||
* @param {String} propagation_seconds the cloudflare api token
|
||||
* @param {String} propagation_seconds
|
||||
* @returns {Promise}
|
||||
*/
|
||||
requestLetsEncryptSslWithDnsChallenge: (certificate) => {
|
||||
const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
|
||||
|
||||
if (!dns_plugin) {
|
||||
throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
|
||||
}
|
||||
|
||||
logger.info(`Requesting Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
|
||||
requestLetsEncryptSslWithDnsChallenge: async (certificate) => {
|
||||
await certbot.installPlugin(certificate.meta.dns_provider);
|
||||
const dnsPlugin = dnsPlugins[certificate.meta.dns_provider];
|
||||
logger.info(`Requesting Let'sEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
|
||||
|
||||
const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
|
||||
const credentialsCmd = 'mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + certificate.meta.dns_provider_credentials.replace('\'', '\\\'') + '\' > \'' + credentialsLocation + '\' && chmod 600 \'' + credentialsLocation + '\'';
|
||||
const prepareCmd = 'pip install ' + dns_plugin.package_name + '==' + dns_plugin.package_version + ' ' + dns_plugin.dependencies;
|
||||
fs.mkdirSync('/etc/letsencrypt/credentials', { recursive: true });
|
||||
fs.writeFileSync(credentialsLocation, certificate.meta.dns_provider_credentials, {mode: 0o600});
|
||||
|
||||
// Whether the plugin has a --<name>-credentials argument
|
||||
const hasConfigArg = certificate.meta.dns_provider !== 'route53';
|
||||
|
||||
let mainCmd = certbotCommand + ' certonly --non-interactive ' +
|
||||
let mainCmd = certbotCommand + ' certonly ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--work-dir "/tmp/letsencrypt-lib" ' +
|
||||
'--logs-dir "/tmp/letsencrypt-log" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--agree-tos ' +
|
||||
'--email "' + certificate.meta.letsencrypt_email + '" ' +
|
||||
'--domains "' + certificate.domain_names.join(',') + '" ' +
|
||||
'--authenticator ' + dns_plugin.full_plugin_name + ' ' +
|
||||
'--authenticator ' + dnsPlugin.full_plugin_name + ' ' +
|
||||
(
|
||||
hasConfigArg
|
||||
? '--' + dns_plugin.full_plugin_name + '-credentials "' + credentialsLocation + '"'
|
||||
? '--' + dnsPlugin.full_plugin_name + '-credentials "' + credentialsLocation + '"'
|
||||
: ''
|
||||
) +
|
||||
(
|
||||
certificate.meta.propagation_seconds !== undefined
|
||||
? ' --' + dns_plugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
|
||||
? ' --' + dnsPlugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
|
||||
: ''
|
||||
) +
|
||||
(letsencryptStaging ? ' --staging' : '');
|
||||
@ -897,24 +893,21 @@ const internalCertificate = {
|
||||
mainCmd = 'AWS_CONFIG_FILE=\'' + credentialsLocation + '\' ' + mainCmd;
|
||||
}
|
||||
|
||||
logger.info('Command:', `${credentialsCmd} && ${prepareCmd} && ${mainCmd}`);
|
||||
if (certificate.meta.dns_provider === 'duckdns') {
|
||||
mainCmd = mainCmd + ' --dns-duckdns-no-txt-restore';
|
||||
}
|
||||
|
||||
return utils.exec(credentialsCmd)
|
||||
.then(() => {
|
||||
return utils.exec(prepareCmd)
|
||||
.then(() => {
|
||||
return utils.exec(mainCmd)
|
||||
.then(async (result) => {
|
||||
logger.info(result);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
}).catch(async (err) => {
|
||||
// Don't fail if file does not exist
|
||||
const delete_credentialsCmd = `rm -f '${credentialsLocation}' || true`;
|
||||
await utils.exec(delete_credentialsCmd);
|
||||
throw err;
|
||||
});
|
||||
logger.info('Command:', mainCmd);
|
||||
|
||||
try {
|
||||
const result = await utils.exec(mainCmd);
|
||||
logger.info(result);
|
||||
return result;
|
||||
} catch (err) {
|
||||
// Don't fail if file does not exist, so no need for action in the callback
|
||||
fs.unlink(credentialsLocation, () => {});
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@ -969,10 +962,13 @@ const internalCertificate = {
|
||||
renewLetsEncryptSsl: (certificate) => {
|
||||
logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
const cmd = certbotCommand + ' renew --force-renewal --non-interactive ' +
|
||||
const cmd = certbotCommand + ' renew --force-renewal ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--work-dir "/tmp/letsencrypt-lib" ' +
|
||||
'--logs-dir "/tmp/letsencrypt-log" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--preferred-challenges "dns,http" ' +
|
||||
'--no-random-sleep-on-renew ' +
|
||||
'--disable-hook-validation ' +
|
||||
(letsencryptStaging ? '--staging' : '');
|
||||
|
||||
@ -990,17 +986,21 @@ const internalCertificate = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renewLetsEncryptSslWithDnsChallenge: (certificate) => {
|
||||
const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
|
||||
const dnsPlugin = dnsPlugins[certificate.meta.dns_provider];
|
||||
|
||||
if (!dns_plugin) {
|
||||
if (!dnsPlugin) {
|
||||
throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
|
||||
}
|
||||
|
||||
logger.info(`Renewing Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
|
||||
logger.info(`Renewing Let'sEncrypt certificates via ${dnsPlugin.name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
|
||||
|
||||
let mainCmd = certbotCommand + ' renew --non-interactive ' +
|
||||
let mainCmd = certbotCommand + ' renew --force-renewal ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--work-dir "/tmp/letsencrypt-lib" ' +
|
||||
'--logs-dir "/tmp/letsencrypt-log" ' +
|
||||
'--cert-name "npm-' + certificate.id + '" ' +
|
||||
'--disable-hook-validation' +
|
||||
'--disable-hook-validation ' +
|
||||
'--no-random-sleep-on-renew ' +
|
||||
(letsencryptStaging ? ' --staging' : '');
|
||||
|
||||
// Prepend the path to the credentials file as an environment variable
|
||||
@ -1026,7 +1026,10 @@ const internalCertificate = {
|
||||
revokeLetsEncryptSsl: (certificate, throw_errors) => {
|
||||
logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
|
||||
|
||||
const mainCmd = certbotCommand + ' revoke --non-interactive ' +
|
||||
const mainCmd = certbotCommand + ' revoke ' +
|
||||
'--config "' + letsencryptConfig + '" ' +
|
||||
'--work-dir "/tmp/letsencrypt-lib" ' +
|
||||
'--logs-dir "/tmp/letsencrypt-log" ' +
|
||||
'--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
|
||||
'--delete-after-revoke ' +
|
||||
(letsencryptStaging ? '--staging' : '');
|
||||
@ -1119,6 +1122,108 @@ const internalCertificate = {
|
||||
} else {
|
||||
return Promise.resolve();
|
||||
}
|
||||
},
|
||||
|
||||
testHttpsChallenge: async (access, domains) => {
|
||||
await access.can('certificates:list');
|
||||
|
||||
if (!isArray(domains)) {
|
||||
throw new error.InternalValidationError('Domains must be an array of strings');
|
||||
}
|
||||
if (domains.length === 0) {
|
||||
throw new error.InternalValidationError('No domains provided');
|
||||
}
|
||||
|
||||
// Create a test challenge file
|
||||
const testChallengeDir = '/data/letsencrypt-acme-challenge/.well-known/acme-challenge';
|
||||
const testChallengeFile = testChallengeDir + '/test-challenge';
|
||||
fs.mkdirSync(testChallengeDir, {recursive: true});
|
||||
fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
|
||||
|
||||
async function performTestForDomain (domain) {
|
||||
logger.info('Testing http challenge for ' + domain);
|
||||
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
|
||||
const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
|
||||
const options = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Content-Length': Buffer.byteLength(formBody)
|
||||
}
|
||||
};
|
||||
|
||||
const result = await new Promise((resolve) => {
|
||||
|
||||
const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, function (res) {
|
||||
let responseBody = '';
|
||||
|
||||
res.on('data', (chunk) => responseBody = responseBody + chunk);
|
||||
res.on('end', function () {
|
||||
try {
|
||||
const parsedBody = JSON.parse(responseBody + '');
|
||||
if (res.statusCode !== 200) {
|
||||
logger.warn(`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned: ${parsedBody.message}`);
|
||||
resolve(undefined);
|
||||
} else {
|
||||
resolve(parsedBody);
|
||||
}
|
||||
} catch (err) {
|
||||
if (res.statusCode !== 200) {
|
||||
logger.warn(`Failed to test HTTP challenge for domain ${domain} because HTTP status code ${res.statusCode} was returned`);
|
||||
} else {
|
||||
logger.warn(`Failed to test HTTP challenge for domain ${domain} because response failed to be parsed: ${err.message}`);
|
||||
}
|
||||
resolve(undefined);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Make sure to write the request body.
|
||||
req.write(formBody);
|
||||
req.end();
|
||||
req.on('error', function (e) { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
|
||||
resolve(undefined); });
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
// Some error occurred while trying to get the data
|
||||
return 'failed';
|
||||
} else if (result.error) {
|
||||
logger.info(`HTTP challenge test failed for domain ${domain} because error was returned: ${result.error.msg}`);
|
||||
return `other:${result.error.msg}`;
|
||||
} else if (`${result.responsecode}` === '200' && result.htmlresponse === 'Success') {
|
||||
// Server exists and has responded with the correct data
|
||||
return 'ok';
|
||||
} else if (`${result.responsecode}` === '200') {
|
||||
// Server exists but has responded with wrong data
|
||||
logger.info(`HTTP challenge test failed for domain ${domain} because of invalid returned data:`, result.htmlresponse);
|
||||
return 'wrong-data';
|
||||
} else if (`${result.responsecode}` === '404') {
|
||||
// Server exists but responded with a 404
|
||||
logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
|
||||
return '404';
|
||||
} else if (`${result.responsecode}` === '0' || (typeof result.reason === 'string' && result.reason.toLowerCase() === 'host unavailable')) {
|
||||
// Server does not exist at domain
|
||||
logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
|
||||
return 'no-host';
|
||||
} else {
|
||||
// Other errors
|
||||
logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
|
||||
return `other:${result.responsecode}`;
|
||||
}
|
||||
}
|
||||
|
||||
const results = {};
|
||||
|
||||
for (const domain of domains){
|
||||
results[domain] = await performTestForDomain(domain);
|
||||
}
|
||||
|
||||
// Remove the test challenge file
|
||||
fs.unlinkSync(testChallengeFile);
|
||||
|
||||
return results;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const deadHostModel = require('../models/dead_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
@ -49,8 +50,8 @@ const internalDeadHost = {
|
||||
|
||||
return deadHostModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
@ -218,31 +219,28 @@ const internalDeadHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[owner,certificate]')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -404,8 +402,7 @@ const internalDeadHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[owner,certificate]')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -420,10 +417,10 @@ const internalDeadHost = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
|
@ -2,13 +2,16 @@ const https = require('https');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').ip_ranges;
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const internalNginx = require('./nginx');
|
||||
const { Liquid } = require('liquidjs');
|
||||
|
||||
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
|
||||
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
|
||||
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
|
||||
|
||||
const regIpV4 = /^(\d+\.?){4}\/\d+/;
|
||||
const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
|
||||
|
||||
const internalIpRanges = {
|
||||
|
||||
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
|
||||
@ -74,14 +77,14 @@ const internalIpRanges = {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n');
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
})
|
||||
.then(() => {
|
||||
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
|
||||
})
|
||||
.then((cloudfare_data) => {
|
||||
let items = cloudfare_data.split('\n');
|
||||
let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
|
||||
ip_ranges = [... ip_ranges, ... items];
|
||||
})
|
||||
.then(() => {
|
||||
@ -116,10 +119,7 @@ const internalIpRanges = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
generateConfig: (ip_ranges) => {
|
||||
let renderEngine = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
});
|
||||
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
|
||||
|
@ -1,10 +1,9 @@
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').nginx;
|
||||
const utils = require('../lib/utils');
|
||||
const error = require('../lib/error');
|
||||
const { Liquid } = require('liquidjs');
|
||||
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs');
|
||||
const logger = require('../logger').nginx;
|
||||
const config = require('../lib/config');
|
||||
const utils = require('../lib/utils');
|
||||
const error = require('../lib/error');
|
||||
|
||||
const internalNginx = {
|
||||
|
||||
@ -29,7 +28,9 @@ const internalNginx = {
|
||||
.then(() => {
|
||||
// Nginx is OK
|
||||
// We're deleting this config regardless.
|
||||
return internalNginx.deleteConfig(host_type, host); // Don't throw errors, as the file may not exist at all
|
||||
// Don't throw errors, as the file may not exist at all
|
||||
// Delete the .err file too
|
||||
return internalNginx.deleteConfig(host_type, host, false, true);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig(host_type, host);
|
||||
@ -64,7 +65,7 @@ const internalNginx = {
|
||||
}
|
||||
});
|
||||
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.error('Nginx test failed:', valid_lines.join('\n'));
|
||||
}
|
||||
|
||||
@ -80,6 +81,9 @@ const internalNginx = {
|
||||
.patch({
|
||||
meta: combined_meta
|
||||
})
|
||||
.then(() => {
|
||||
internalNginx.renameConfigAsError(host_type, host);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.deleteConfig(host_type, host, true);
|
||||
});
|
||||
@ -97,7 +101,7 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
test: () => {
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.info('Testing Nginx configuration');
|
||||
}
|
||||
|
||||
@ -121,13 +125,10 @@ const internalNginx = {
|
||||
* @returns {String}
|
||||
*/
|
||||
getConfigName: (host_type, host_id) => {
|
||||
host_type = host_type.replace(new RegExp('-', 'g'), '_');
|
||||
|
||||
if (host_type === 'default') {
|
||||
return '/data/nginx/default_host/site.conf';
|
||||
}
|
||||
|
||||
return '/data/nginx/' + host_type + '/' + host_id + '.conf';
|
||||
return '/data/nginx/' + internalNginx.getFileFriendlyHostType(host_type) + '/' + host_id + '.conf';
|
||||
},
|
||||
|
||||
/**
|
||||
@ -136,8 +137,6 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renderLocations: (host) => {
|
||||
|
||||
//logger.info('host = ' + JSON.stringify(host, null, 2));
|
||||
return new Promise((resolve, reject) => {
|
||||
let template;
|
||||
|
||||
@ -148,9 +147,7 @@ const internalNginx = {
|
||||
return;
|
||||
}
|
||||
|
||||
let renderer = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
});
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
let renderedLocations = '';
|
||||
|
||||
const locationRendering = async () => {
|
||||
@ -168,10 +165,8 @@ const internalNginx = {
|
||||
locationCopy.forward_path = `/${splitted.join('/')}`;
|
||||
}
|
||||
|
||||
//logger.info('locationCopy = ' + JSON.stringify(locationCopy, null, 2));
|
||||
|
||||
// eslint-disable-next-line
|
||||
renderedLocations += await renderer.parseAndRender(template, locationCopy);
|
||||
renderedLocations += await renderEngine.parseAndRender(template, locationCopy);
|
||||
}
|
||||
|
||||
};
|
||||
@ -187,24 +182,20 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
generateConfig: (host_type, host) => {
|
||||
host_type = host_type.replace(new RegExp('-', 'g'), '_');
|
||||
const nice_host_type = internalNginx.getFileFriendlyHostType(host_type);
|
||||
|
||||
if (debug_mode) {
|
||||
logger.info('Generating ' + host_type + ' Config:', host);
|
||||
if (config.debug()) {
|
||||
logger.info('Generating ' + nice_host_type + ' Config:', JSON.stringify(host, null, 2));
|
||||
}
|
||||
|
||||
// logger.info('host = ' + JSON.stringify(host, null, 2));
|
||||
|
||||
let renderEngine = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
});
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
let filename = internalNginx.getConfigName(host_type, host.id);
|
||||
let filename = internalNginx.getConfigName(nice_host_type, host.id);
|
||||
|
||||
try {
|
||||
template = fs.readFileSync(__dirname + '/../templates/' + host_type + '.conf', {encoding: 'utf8'});
|
||||
template = fs.readFileSync(__dirname + '/../templates/' + nice_host_type + '.conf', {encoding: 'utf8'});
|
||||
} catch (err) {
|
||||
reject(new error.ConfigurationError(err.message));
|
||||
return;
|
||||
@ -214,7 +205,7 @@ const internalNginx = {
|
||||
let origLocations;
|
||||
|
||||
// Manipulate the data a bit before sending it to the template
|
||||
if (host_type !== 'default') {
|
||||
if (nice_host_type !== 'default') {
|
||||
host.use_default_location = true;
|
||||
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
|
||||
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
|
||||
@ -248,7 +239,7 @@ const internalNginx = {
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.success('Wrote config:', filename, config_text);
|
||||
}
|
||||
|
||||
@ -258,7 +249,7 @@ const internalNginx = {
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.warn('Could not write ' + filename + ':', err.message);
|
||||
}
|
||||
|
||||
@ -277,13 +268,11 @@ const internalNginx = {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
generateLetsEncryptRequestConfig: (certificate) => {
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.info('Generating LetsEncrypt Request Config:', certificate);
|
||||
}
|
||||
|
||||
let renderEngine = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
});
|
||||
const renderEngine = utils.getRenderEngine();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let template = null;
|
||||
@ -303,14 +292,14 @@ const internalNginx = {
|
||||
.then((config_text) => {
|
||||
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
|
||||
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.success('Wrote config:', filename, config_text);
|
||||
}
|
||||
|
||||
resolve(true);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (debug_mode) {
|
||||
if (config.debug()) {
|
||||
logger.warn('Could not write ' + filename + ':', err.message);
|
||||
}
|
||||
|
||||
@ -319,33 +308,39 @@ const internalNginx = {
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* A simple wrapper around unlinkSync that writes to the logger
|
||||
*
|
||||
* @param {String} filename
|
||||
*/
|
||||
deleteFile: (filename) => {
|
||||
logger.debug('Deleting file: ' + filename);
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (err) {
|
||||
logger.debug('Could not delete file:', JSON.stringify(err, null, 2));
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {String} host_type
|
||||
* @returns String
|
||||
*/
|
||||
getFileFriendlyHostType: (host_type) => {
|
||||
return host_type.replace(new RegExp('-', 'g'), '_');
|
||||
},
|
||||
|
||||
/**
|
||||
* This removes the temporary nginx config file generated by `generateLetsEncryptRequestConfig`
|
||||
*
|
||||
* @param {Object} certificate
|
||||
* @param {Boolean} [throw_errors]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
deleteLetsEncryptRequestConfig: (certificate, throw_errors) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
let config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
|
||||
|
||||
if (debug_mode) {
|
||||
logger.warn('Deleting nginx config: ' + config_file);
|
||||
}
|
||||
|
||||
fs.unlinkSync(config_file);
|
||||
} catch (err) {
|
||||
if (debug_mode) {
|
||||
logger.warn('Could not delete config:', err.message);
|
||||
}
|
||||
|
||||
if (throw_errors) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
|
||||
deleteLetsEncryptRequestConfig: (certificate) => {
|
||||
const config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
internalNginx.deleteFile(config_file);
|
||||
resolve();
|
||||
});
|
||||
},
|
||||
@ -353,35 +348,42 @@ const internalNginx = {
|
||||
/**
|
||||
* @param {String} host_type
|
||||
* @param {Object} [host]
|
||||
* @param {Boolean} [throw_errors]
|
||||
* @param {Boolean} [delete_err_file]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
deleteConfig: (host_type, host, throw_errors) => {
|
||||
host_type = host_type.replace(new RegExp('-', 'g'), '_');
|
||||
deleteConfig: (host_type, host, delete_err_file) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file_err = config_file + '.err';
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
let config_file = internalNginx.getConfigName(host_type, typeof host === 'undefined' ? 0 : host.id);
|
||||
|
||||
if (debug_mode) {
|
||||
logger.warn('Deleting nginx config: ' + config_file);
|
||||
}
|
||||
|
||||
fs.unlinkSync(config_file);
|
||||
} catch (err) {
|
||||
if (debug_mode) {
|
||||
logger.warn('Could not delete config:', err.message);
|
||||
}
|
||||
|
||||
if (throw_errors) {
|
||||
reject(err);
|
||||
}
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
internalNginx.deleteFile(config_file);
|
||||
if (delete_err_file) {
|
||||
internalNginx.deleteFile(config_file_err);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {String} host_type
|
||||
* @param {Object} [host]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
renameConfigAsError: (host_type, host) => {
|
||||
const config_file = internalNginx.getConfigName(internalNginx.getFileFriendlyHostType(host_type), typeof host === 'undefined' ? 0 : host.id);
|
||||
const config_file_err = config_file + '.err';
|
||||
|
||||
return new Promise((resolve/*, reject*/) => {
|
||||
fs.unlink(config_file, () => {
|
||||
// ignore result, continue
|
||||
fs.rename(config_file, config_file_err, () => {
|
||||
// also ignore result, as this is a debugging informative file anyway
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {String} host_type
|
||||
* @param {Array} hosts
|
||||
@ -399,13 +401,12 @@ const internalNginx = {
|
||||
/**
|
||||
* @param {String} host_type
|
||||
* @param {Array} hosts
|
||||
* @param {Boolean} [throw_errors]
|
||||
* @returns {Promise}
|
||||
*/
|
||||
bulkDeleteConfigs: (host_type, hosts, throw_errors) => {
|
||||
bulkDeleteConfigs: (host_type, hosts) => {
|
||||
let promises = [];
|
||||
hosts.map(function (host) {
|
||||
promises.push(internalNginx.deleteConfig(host_type, host, throw_errors));
|
||||
promises.push(internalNginx.deleteConfig(host_type, host, true));
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
@ -415,8 +416,8 @@ const internalNginx = {
|
||||
* @param {string} config
|
||||
* @returns {boolean}
|
||||
*/
|
||||
advancedConfigHasDefaultLocation: function (config) {
|
||||
return !!config.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
|
||||
advancedConfigHasDefaultLocation: function (cfg) {
|
||||
return !!cfg.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -1,5 +1,6 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const proxyHostModel = require('../models/proxy_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
@ -49,8 +50,8 @@ const internalProxyHost = {
|
||||
|
||||
return proxyHostModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
@ -170,6 +171,7 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where({id: data.id})
|
||||
.patch(data)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
// Add to audit log
|
||||
return internalAuditLog.add(access, {
|
||||
@ -179,7 +181,7 @@ const internalProxyHost = {
|
||||
meta: data
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
return saved_row;
|
||||
});
|
||||
});
|
||||
})
|
||||
@ -223,31 +225,29 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[owner,access_list,access_list.[clients,items],certificate]')
|
||||
.allowGraph('[owner,access_list.[clients,items],certificate]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -409,8 +409,7 @@ const internalProxyHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[owner,access_list,certificate]')
|
||||
.allowGraph('[owner,access_list,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -425,10 +424,10 @@ const internalProxyHost = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
|
@ -1,5 +1,6 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const redirectionHostModel = require('../models/redirection_host');
|
||||
const internalHost = require('./host');
|
||||
const internalNginx = require('./nginx');
|
||||
@ -49,8 +50,8 @@ const internalRedirectionHost = {
|
||||
|
||||
return redirectionHostModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (create_certificate) {
|
||||
@ -65,9 +66,8 @@ const internalRedirectionHost = {
|
||||
.then(() => {
|
||||
return row;
|
||||
});
|
||||
} else {
|
||||
return row;
|
||||
}
|
||||
return row;
|
||||
})
|
||||
.then((row) => {
|
||||
// re-fetch with cert
|
||||
@ -218,31 +218,29 @@ const internalRedirectionHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[owner,certificate]')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
row = internalHost.cleanRowCertificateMeta(row);
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -404,8 +402,7 @@ const internalRedirectionHost = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[owner,certificate]')
|
||||
.allowGraph('[owner,certificate]')
|
||||
.orderBy('domain_names', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -420,10 +417,10 @@ const internalRedirectionHost = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
})
|
||||
.then((rows) => {
|
||||
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
|
||||
|
@ -1,5 +1,6 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const streamModel = require('../models/stream');
|
||||
const internalNginx = require('./nginx');
|
||||
const internalAuditLog = require('./audit-log');
|
||||
@ -27,8 +28,8 @@ const internalStream = {
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
// Configure nginx
|
||||
@ -71,8 +72,8 @@ const internalStream = {
|
||||
|
||||
return streamModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.patchAndFetchById(row.id, data)
|
||||
.then(utils.omitRow(omissions()))
|
||||
.then((saved_row) => {
|
||||
return internalNginx.configure(streamModel, 'stream', saved_row)
|
||||
.then(() => {
|
||||
@ -88,7 +89,7 @@ const internalStream = {
|
||||
meta: data
|
||||
})
|
||||
.then(() => {
|
||||
return _.omit(saved_row, omissions());
|
||||
return saved_row;
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -113,30 +114,28 @@ const internalStream = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[owner]')
|
||||
.allowGraph('[owner]')
|
||||
.first();
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
query.andWhere('owner_user_id', access.token.getUserId(1));
|
||||
}
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -298,8 +297,7 @@ const internalStream = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[owner]')
|
||||
.allowGraph('[owner]')
|
||||
.orderBy('incoming_port', 'ASC');
|
||||
|
||||
if (access_data.permission_visibility !== 'all') {
|
||||
@ -314,10 +312,10 @@ const internalStream = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
|
@ -24,7 +24,7 @@ module.exports = {
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.where('email', data.identity)
|
||||
.where('email', data.identity.toLowerCase().trim())
|
||||
.andWhere('is_deleted', 0)
|
||||
.andWhere('is_disabled', 0)
|
||||
.first()
|
||||
|
@ -1,5 +1,6 @@
|
||||
const _ = require('lodash');
|
||||
const error = require('../lib/error');
|
||||
const utils = require('../lib/utils');
|
||||
const userModel = require('../models/user');
|
||||
const userPermissionModel = require('../models/user_permission');
|
||||
const authModel = require('../models/auth');
|
||||
@ -35,8 +36,8 @@ const internalUser = {
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.insertAndFetch(data);
|
||||
.insertAndFetch(data)
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((user) => {
|
||||
if (auth) {
|
||||
@ -140,11 +141,8 @@ const internalUser = {
|
||||
|
||||
return userModel
|
||||
.query()
|
||||
.omit(omissions())
|
||||
.patchAndFetchById(user.id, data)
|
||||
.then((saved_user) => {
|
||||
return _.omit(saved_user, omissions());
|
||||
});
|
||||
.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then(() => {
|
||||
return internalUser.get(access, {id: data.id});
|
||||
@ -186,26 +184,24 @@ const internalUser = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.andWhere('id', data.id)
|
||||
.allowEager('[permissions]')
|
||||
.allowGraph('[permissions]')
|
||||
.first();
|
||||
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
query.omit(data.omit);
|
||||
}
|
||||
|
||||
if (typeof data.expand !== 'undefined' && data.expand !== null) {
|
||||
query.eager('[' + data.expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + data.expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRow(omissions()));
|
||||
})
|
||||
.then((row) => {
|
||||
if (row) {
|
||||
return _.omit(row, omissions());
|
||||
} else {
|
||||
if (!row) {
|
||||
throw new error.ItemNotFoundError(data.id);
|
||||
}
|
||||
// Custom omissions
|
||||
if (typeof data.omit !== 'undefined' && data.omit !== null) {
|
||||
row = _.omit(row, data.omit);
|
||||
}
|
||||
return row;
|
||||
});
|
||||
},
|
||||
|
||||
@ -322,8 +318,7 @@ const internalUser = {
|
||||
.query()
|
||||
.where('is_deleted', 0)
|
||||
.groupBy('id')
|
||||
.omit(['is_deleted'])
|
||||
.allowEager('[permissions]')
|
||||
.allowGraph('[permissions]')
|
||||
.orderBy('name', 'ASC');
|
||||
|
||||
// Query is used for searching
|
||||
@ -335,10 +330,10 @@ const internalUser = {
|
||||
}
|
||||
|
||||
if (typeof expand !== 'undefined' && expand !== null) {
|
||||
query.eager('[' + expand.join(', ') + ']');
|
||||
query.withGraphFetched('[' + expand.join(', ') + ']');
|
||||
}
|
||||
|
||||
return query;
|
||||
return query.then(utils.omitRows(omissions()));
|
||||
});
|
||||
},
|
||||
|
||||
|
@ -55,8 +55,8 @@ module.exports = function (token_string) {
|
||||
.where('id', token_data.attrs.id)
|
||||
.andWhere('is_deleted', 0)
|
||||
.andWhere('is_disabled', 0)
|
||||
.allowEager('[permissions]')
|
||||
.eager('[permissions]')
|
||||
.allowGraph('[permissions]')
|
||||
.withGraphFetched('[permissions]')
|
||||
.first()
|
||||
.then((user) => {
|
||||
if (user) {
|
||||
|
78
backend/lib/certbot.js
Normal file
78
backend/lib/certbot.js
Normal file
@ -0,0 +1,78 @@
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const utils = require('./utils');
|
||||
const error = require('./error');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
|
||||
const CERTBOT_VERSION_REPLACEMENT = '$(certbot --version | grep -Eo \'[0-9](\\.[0-9]+)+\')';
|
||||
|
||||
const certbot = {
|
||||
|
||||
/**
|
||||
* @param {array} pluginKeys
|
||||
*/
|
||||
installPlugins: async function (pluginKeys) {
|
||||
let hasErrors = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (pluginKeys.length === 0) {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
.each((i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
hasErrors = true;
|
||||
next(err);
|
||||
});
|
||||
})
|
||||
.error((err) => {
|
||||
logger.error(err.message);
|
||||
})
|
||||
.end(() => {
|
||||
if (hasErrors) {
|
||||
reject(new error.CommandError('Some plugins failed to install. Please check the logs above', 1));
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Installs a cerbot plugin given the key for the object from
|
||||
* ../global/certbot-dns-plugins.json
|
||||
*
|
||||
* @param {string} pluginKey
|
||||
* @returns {Object}
|
||||
*/
|
||||
installPlugin: async function (pluginKey) {
|
||||
if (typeof dnsPlugins[pluginKey] === 'undefined') {
|
||||
// throw Error(`Certbot plugin ${pluginKey} not found`);
|
||||
throw new error.ItemNotFoundError(pluginKey);
|
||||
}
|
||||
|
||||
const plugin = dnsPlugins[pluginKey];
|
||||
logger.start(`Installing ${pluginKey}...`);
|
||||
|
||||
plugin.version = plugin.version.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
plugin.dependencies = plugin.dependencies.replace(/{{certbot-version}}/g, CERTBOT_VERSION_REPLACEMENT);
|
||||
|
||||
const cmd = '. /opt/certbot/bin/activate && pip install --no-cache-dir ' + plugin.dependencies + ' ' + plugin.package_name + plugin.version + ' ' + ' && deactivate';
|
||||
return utils.exec(cmd)
|
||||
.then((result) => {
|
||||
logger.complete(`Installed ${pluginKey}`);
|
||||
return result;
|
||||
})
|
||||
.catch((err) => {
|
||||
throw err;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = certbot;
|
184
backend/lib/config.js
Normal file
184
backend/lib/config.js
Normal file
@ -0,0 +1,184 @@
|
||||
const fs = require('fs');
|
||||
const NodeRSA = require('node-rsa');
|
||||
const logger = require('../logger').global;
|
||||
|
||||
const keysFile = '/data/keys.json';
|
||||
|
||||
let instance = null;
|
||||
|
||||
// 1. Load from config file first (not recommended anymore)
|
||||
// 2. Use config env variables next
|
||||
const configure = () => {
|
||||
const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
|
||||
if (fs.existsSync(filename)) {
|
||||
let configData;
|
||||
try {
|
||||
configData = require(filename);
|
||||
} catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (configData && configData.database) {
|
||||
logger.info(`Using configuration from file: ${filename}`);
|
||||
instance = configData;
|
||||
instance.keys = getKeys();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
|
||||
const envMysqlUser = process.env.DB_MYSQL_USER || null;
|
||||
const envMysqlName = process.env.DB_MYSQL_NAME || null;
|
||||
if (envMysqlHost && envMysqlUser && envMysqlName) {
|
||||
// we have enough mysql creds to go with mysql
|
||||
logger.info('Using MySQL configuration');
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'mysql',
|
||||
host: envMysqlHost,
|
||||
port: process.env.DB_MYSQL_PORT || 3306,
|
||||
user: envMysqlUser,
|
||||
password: process.env.DB_MYSQL_PASSWORD,
|
||||
name: envMysqlName,
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const envSqliteFile = process.env.DB_SQLITE_FILE || '/data/database.sqlite';
|
||||
logger.info(`Using Sqlite: ${envSqliteFile}`);
|
||||
instance = {
|
||||
database: {
|
||||
engine: 'knex-native',
|
||||
knex: {
|
||||
client: 'sqlite3',
|
||||
connection: {
|
||||
filename: envSqliteFile
|
||||
},
|
||||
useNullAsDefault: true
|
||||
}
|
||||
},
|
||||
keys: getKeys(),
|
||||
};
|
||||
};
|
||||
|
||||
const getKeys = () => {
|
||||
// Get keys from file
|
||||
if (!fs.existsSync(keysFile)) {
|
||||
generateKeys();
|
||||
} else if (process.env.DEBUG) {
|
||||
logger.info('Keys file exists OK');
|
||||
}
|
||||
try {
|
||||
return require(keysFile);
|
||||
} catch (err) {
|
||||
logger.error('Could not read JWT key pair from config file: ' + keysFile, err);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
const generateKeys = () => {
|
||||
logger.info('Creating a new JWT key pair...');
|
||||
// Now create the keys and save them in the config.
|
||||
const key = new NodeRSA({ b: 2048 });
|
||||
key.generateKeyPair();
|
||||
|
||||
const keys = {
|
||||
key: key.exportKey('private').toString(),
|
||||
pub: key.exportKey('public').toString(),
|
||||
};
|
||||
|
||||
// Write keys config
|
||||
try {
|
||||
fs.writeFileSync(keysFile, JSON.stringify(keys, null, 2));
|
||||
} catch (err) {
|
||||
logger.error('Could not write JWT key pair to config file: ' + keysFile + ': ' + err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
logger.info('Wrote JWT key pair to config file: ' + keysFile);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} key ie: 'database' or 'database.engine'
|
||||
* @returns {boolean}
|
||||
*/
|
||||
has: function(key) {
|
||||
instance === null && configure();
|
||||
const keys = key.split('.');
|
||||
let level = instance;
|
||||
let has = true;
|
||||
keys.forEach((keyItem) =>{
|
||||
if (typeof level[keyItem] === 'undefined') {
|
||||
has = false;
|
||||
} else {
|
||||
level = level[keyItem];
|
||||
}
|
||||
});
|
||||
|
||||
return has;
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets a specific key from the top level
|
||||
*
|
||||
* @param {string} key
|
||||
* @returns {*}
|
||||
*/
|
||||
get: function (key) {
|
||||
instance === null && configure();
|
||||
if (key && typeof instance[key] !== 'undefined') {
|
||||
return instance[key];
|
||||
}
|
||||
return instance;
|
||||
},
|
||||
|
||||
/**
|
||||
* Is this a sqlite configuration?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isSqlite: function () {
|
||||
instance === null && configure();
|
||||
return instance.database.knex && instance.database.knex.client === 'sqlite3';
|
||||
},
|
||||
|
||||
/**
|
||||
* Are we running in debug mdoe?
|
||||
*
|
||||
* @returns {boolean}
|
||||
*/
|
||||
debug: function () {
|
||||
return !!process.env.DEBUG;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a public key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPublicKey: function () {
|
||||
instance === null && configure();
|
||||
return instance.keys.pub;
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns a private key
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getPrivateKey: function () {
|
||||
instance === null && configure();
|
||||
return instance.keys.key;
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {boolean}
|
||||
*/
|
||||
useLetsencryptStaging: function () {
|
||||
return !!process.env.LE_STAGING;
|
||||
}
|
||||
};
|
@ -82,7 +82,16 @@ module.exports = {
|
||||
this.message = message;
|
||||
this.public = false;
|
||||
this.status = 400;
|
||||
}
|
||||
},
|
||||
|
||||
CommandError: function (stdErr, code, previous) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.previous = previous;
|
||||
this.message = stdErr;
|
||||
this.code = code;
|
||||
this.public = false;
|
||||
},
|
||||
};
|
||||
|
||||
_.forEach(module.exports, function (error) {
|
||||
|
@ -1,14 +1,41 @@
|
||||
const exec = require('child_process').exec;
|
||||
const _ = require('lodash');
|
||||
const exec = require('child_process').exec;
|
||||
const execFile = require('child_process').execFile;
|
||||
const { Liquid } = require('liquidjs');
|
||||
const logger = require('../logger').global;
|
||||
const error = require('./error');
|
||||
|
||||
module.exports = {
|
||||
|
||||
exec: async function(cmd, options = {}) {
|
||||
logger.debug('CMD:', cmd);
|
||||
|
||||
const { stdout, stderr } = await new Promise((resolve, reject) => {
|
||||
const child = exec(cmd, options, (isError, stdout, stderr) => {
|
||||
if (isError) {
|
||||
reject(new error.CommandError(stderr, isError));
|
||||
} else {
|
||||
resolve({ stdout, stderr });
|
||||
}
|
||||
});
|
||||
|
||||
child.on('error', (e) => {
|
||||
reject(new error.CommandError(stderr, 1, e));
|
||||
});
|
||||
});
|
||||
return stdout;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {String} cmd
|
||||
* @param {Array} args
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exec: function (cmd) {
|
||||
execFile: function (cmd, args) {
|
||||
// logger.debug('CMD: ' + cmd + ' ' + (args ? args.join(' ') : ''));
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
exec(cmd, function (err, stdout, /*stderr*/) {
|
||||
execFile(cmd, args, function (err, stdout, /*stderr*/) {
|
||||
if (err && typeof err === 'object') {
|
||||
reject(err);
|
||||
} else {
|
||||
@ -16,5 +43,64 @@ module.exports = {
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRow: function (omissions) {
|
||||
/**
|
||||
* @param {Object} row
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (row) => {
|
||||
return _.omit(row, omissions);
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* Used in objection query builder
|
||||
*
|
||||
* @param {Array} omissions
|
||||
* @returns {Function}
|
||||
*/
|
||||
omitRows: function (omissions) {
|
||||
/**
|
||||
* @param {Array} rows
|
||||
* @returns {Object}
|
||||
*/
|
||||
return (rows) => {
|
||||
rows.forEach((row, idx) => {
|
||||
rows[idx] = _.omit(row, omissions);
|
||||
});
|
||||
return rows;
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Object} Liquid render engine
|
||||
*/
|
||||
getRenderEngine: function () {
|
||||
const renderEngine = new Liquid({
|
||||
root: __dirname + '/../templates/'
|
||||
});
|
||||
|
||||
/**
|
||||
* nginxAccessRule expects the object given to have 2 properties:
|
||||
*
|
||||
* directive string
|
||||
* address string
|
||||
*/
|
||||
renderEngine.registerFilter('nginxAccessRule', (v) => {
|
||||
if (typeof v.directive !== 'undefined' && typeof v.address !== 'undefined' && v.directive && v.address) {
|
||||
return `${v.directive} ${v.address};`;
|
||||
}
|
||||
return '';
|
||||
});
|
||||
|
||||
return renderEngine;
|
||||
}
|
||||
};
|
||||
|
@ -5,7 +5,7 @@ const definitions = require('../../schema/definitions.json');
|
||||
RegExp.prototype.toJSON = RegExp.prototype.toString;
|
||||
|
||||
const ajv = require('ajv')({
|
||||
verbose: true, //process.env.NODE_ENV === 'development',
|
||||
verbose: true,
|
||||
allErrors: true,
|
||||
format: 'full', // strict regexes for format checks
|
||||
coerceTypes: true,
|
||||
|
@ -7,6 +7,7 @@ module.exports = {
|
||||
access: new Signale({scope: 'Access '}),
|
||||
nginx: new Signale({scope: 'Nginx '}),
|
||||
ssl: new Signale({scope: 'SSL '}),
|
||||
certbot: new Signale({scope: 'Certbot '}),
|
||||
import: new Signale({scope: 'Importer '}),
|
||||
setup: new Signale({scope: 'Setup '}),
|
||||
ip_ranges: new Signale({scope: 'IP Ranges'})
|
||||
|
50
backend/migrations/20211108145214_regenerate_default_host.js
Normal file
50
backend/migrations/20211108145214_regenerate_default_host.js
Normal file
@ -0,0 +1,50 @@
|
||||
const migrate_name = 'stream_domain';
|
||||
const logger = require('../logger').migrate;
|
||||
const internalNginx = require('../internal/nginx');
|
||||
|
||||
async function regenerateDefaultHost(knex) {
|
||||
const row = await knex('setting').select('*').where('id', 'default-site').first();
|
||||
|
||||
if (!row) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return internalNginx.deleteConfig('default')
|
||||
.then(() => {
|
||||
return internalNginx.generateConfig('default', row);
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.test();
|
||||
})
|
||||
.then(() => {
|
||||
return internalNginx.reload();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate
|
||||
*
|
||||
* @see http://knexjs.org/#Schema
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.up = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Up...');
|
||||
|
||||
return regenerateDefaultHost(knex);
|
||||
};
|
||||
|
||||
/**
|
||||
* Undo Migrate
|
||||
*
|
||||
* @param {Object} knex
|
||||
* @param {Promise} Promise
|
||||
* @returns {Promise}
|
||||
*/
|
||||
exports.down = function (knex) {
|
||||
logger.info('[' + migrate_name + '] Migrating Down...');
|
||||
|
||||
return regenerateDefaultHost(knex);
|
||||
};
|
@ -50,7 +50,6 @@ class AccessList extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
},
|
||||
items: {
|
||||
@ -59,9 +58,6 @@ class AccessList extends Model {
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_auth.access_list_id'
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
|
||||
}
|
||||
},
|
||||
clients: {
|
||||
@ -70,9 +66,6 @@ class AccessList extends Model {
|
||||
join: {
|
||||
from: 'access_list.id',
|
||||
to: 'access_list_client.access_list_id'
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
|
||||
}
|
||||
},
|
||||
proxy_hosts: {
|
||||
@ -84,19 +77,10 @@ class AccessList extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('proxy_host.is_deleted', 0);
|
||||
qb.omit(['is_deleted', 'meta']);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
get satisfy() {
|
||||
return this.satisfy_any ? 'satisfy any' : 'satisfy all';
|
||||
}
|
||||
|
||||
get passauth() {
|
||||
return this.pass_auth ? '' : 'proxy_set_header Authorization "";';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AccessList;
|
||||
|
@ -45,7 +45,6 @@ class AccessListAuth extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -45,15 +45,10 @@ class AccessListClient extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
get rule() {
|
||||
return `${this.directive} ${this.address}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AccessListClient;
|
||||
|
@ -43,9 +43,6 @@ class AuditLog extends Model {
|
||||
join: {
|
||||
from: 'audit_log.user_id',
|
||||
to: 'user.id'
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'roles']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -74,9 +74,6 @@ class Auth extends Model {
|
||||
},
|
||||
filter: {
|
||||
is_deleted: 0
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.omit(['is_deleted']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -63,7 +63,6 @@ class Certificate extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -59,7 +59,6 @@ class DeadHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
@ -71,7 +70,6 @@ class DeadHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,13 +1,13 @@
|
||||
const db = require('../db');
|
||||
const config = require('config');
|
||||
const config = require('../lib/config');
|
||||
const Model = require('objection').Model;
|
||||
|
||||
Model.knex(db);
|
||||
|
||||
module.exports = function () {
|
||||
if (config.database.knex && config.database.knex.client === 'sqlite3') {
|
||||
return Model.raw('datetime(\'now\',\'localtime\')');
|
||||
} else {
|
||||
return Model.raw('NOW()');
|
||||
if (config.isSqlite()) {
|
||||
// eslint-disable-next-line
|
||||
return Model.raw("datetime('now','localtime')");
|
||||
}
|
||||
return Model.raw('NOW()');
|
||||
};
|
||||
|
@ -76,7 +76,6 @@ class ProxyHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
},
|
||||
access_list: {
|
||||
@ -88,7 +87,6 @@ class ProxyHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('access_list.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
@ -100,7 +98,6 @@ class ProxyHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,3 +1,4 @@
|
||||
|
||||
// Objection Docs:
|
||||
// http://vincit.github.io/objection.js/
|
||||
|
||||
@ -59,7 +60,6 @@ class RedirectionHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
},
|
||||
certificate: {
|
||||
@ -71,7 +71,6 @@ class RedirectionHost extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('certificate.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -46,7 +46,6 @@ class Stream extends Model {
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.where('user.is_deleted', 0);
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -6,44 +6,36 @@
|
||||
const _ = require('lodash');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const crypto = require('crypto');
|
||||
const config = require('../lib/config');
|
||||
const error = require('../lib/error');
|
||||
const logger = require('../logger').global;
|
||||
const ALGO = 'RS256';
|
||||
|
||||
let public_key = null;
|
||||
let private_key = null;
|
||||
|
||||
function checkJWTKeyPair() {
|
||||
if (!public_key || !private_key) {
|
||||
let config = require('config');
|
||||
public_key = config.get('jwt.pub');
|
||||
private_key = config.get('jwt.key');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
|
||||
let token_data = {};
|
||||
|
||||
let self = {
|
||||
const self = {
|
||||
/**
|
||||
* @param {Object} payload
|
||||
* @returns {Promise}
|
||||
*/
|
||||
create: (payload) => {
|
||||
if (!config.getPrivateKey()) {
|
||||
logger.error('Private key is empty!');
|
||||
}
|
||||
// sign with RSA SHA256
|
||||
let options = {
|
||||
const options = {
|
||||
algorithm: ALGO,
|
||||
expiresIn: payload.expiresIn || '1d'
|
||||
};
|
||||
|
||||
payload.jti = crypto.randomBytes(12)
|
||||
.toString('base64')
|
||||
.substr(-8);
|
||||
|
||||
checkJWTKeyPair();
|
||||
.substring(-8);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.sign(payload, private_key, options, (err, token) => {
|
||||
jwt.sign(payload, config.getPrivateKey(), options, (err, token) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
@ -62,13 +54,15 @@ module.exports = function () {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
load: function (token) {
|
||||
if (!config.getPublicKey()) {
|
||||
logger.error('Public key is empty!');
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
checkJWTKeyPair();
|
||||
try {
|
||||
if (!token || token === null || token === 'null') {
|
||||
reject(new error.AuthError('Empty token'));
|
||||
} else {
|
||||
jwt.verify(token, public_key, {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
|
||||
jwt.verify(token, config.getPublicKey(), {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
|
||||
if (err) {
|
||||
|
||||
if (err.name === 'TokenExpiredError') {
|
||||
@ -83,8 +77,6 @@ module.exports = function () {
|
||||
// Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
|
||||
// For 30 days at least, we need to replace 'all' with user.
|
||||
if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
|
||||
//console.log('Warning! Replacing "all" scope with "user"');
|
||||
|
||||
token_data.scope = ['user'];
|
||||
}
|
||||
|
||||
@ -134,7 +126,7 @@ module.exports = function () {
|
||||
* @returns {Integer}
|
||||
*/
|
||||
getUserId: (default_value) => {
|
||||
let attrs = self.get('attrs');
|
||||
const attrs = self.get('attrs');
|
||||
if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
|
||||
return attrs.id;
|
||||
}
|
||||
|
@ -43,9 +43,6 @@ class User extends Model {
|
||||
join: {
|
||||
from: 'user.id',
|
||||
to: 'user_permission.user_id'
|
||||
},
|
||||
modify: function (qb) {
|
||||
qb.omit(['id', 'created_on', 'modified_on', 'user_id']);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -10,29 +10,22 @@
|
||||
"bcrypt": "^5.0.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"compression": "^1.7.4",
|
||||
"config": "^3.3.1",
|
||||
"diskdb": "^0.1.17",
|
||||
"express": "^4.17.1",
|
||||
"express": "^4.19.2",
|
||||
"express-fileupload": "^1.1.9",
|
||||
"gravatar": "^1.8.0",
|
||||
"html-entities": "^1.2.1",
|
||||
"json-schema-ref-parser": "^8.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"knex": "^0.20.13",
|
||||
"liquidjs": "^9.11.10",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"knex": "2.4.2",
|
||||
"liquidjs": "10.6.1",
|
||||
"lodash": "^4.17.21",
|
||||
"moment": "^2.24.0",
|
||||
"moment": "^2.29.4",
|
||||
"mysql": "^2.18.1",
|
||||
"node-rsa": "^1.0.8",
|
||||
"nodemon": "^2.0.2",
|
||||
"objection": "^2.1.3",
|
||||
"objection": "3.0.1",
|
||||
"path": "^0.12.7",
|
||||
"pg": "^7.12.1",
|
||||
"restler": "^3.4.0",
|
||||
"signale": "^1.4.0",
|
||||
"sqlite3": "^4.1.1",
|
||||
"temp-write": "^4.0.0",
|
||||
"unix-timestamp": "^0.2.0"
|
||||
"signale": "1.4.0",
|
||||
"sqlite3": "5.1.6",
|
||||
"temp-write": "^4.0.0"
|
||||
},
|
||||
"signale": {
|
||||
"displayDate": true,
|
||||
@ -41,8 +34,9 @@
|
||||
"author": "Jamie Curnow <jc@jc21.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"eslint": "^6.8.0",
|
||||
"eslint": "^8.36.0",
|
||||
"eslint-plugin-align-assignments": "^1.1.2",
|
||||
"nodemon": "^2.0.2",
|
||||
"prettier": "^2.0.4"
|
||||
}
|
||||
}
|
||||
|
@ -68,6 +68,32 @@ router
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
/**
|
||||
* Test HTTP challenge for domains
|
||||
*
|
||||
* /api/nginx/certificates/test-http
|
||||
*/
|
||||
router
|
||||
.route('/test-http')
|
||||
.options((req, res) => {
|
||||
res.sendStatus(204);
|
||||
})
|
||||
.all(jwtdecode())
|
||||
|
||||
/**
|
||||
* GET /api/nginx/certificates/test-http
|
||||
*
|
||||
* Test HTTP challenge for domains
|
||||
*/
|
||||
.get((req, res, next) => {
|
||||
internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
|
||||
.then((result) => {
|
||||
res.status(200)
|
||||
.send(result);
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
/**
|
||||
* Specific certificate
|
||||
*
|
||||
@ -209,7 +235,6 @@ router
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Download LE Certs
|
||||
*
|
||||
|
@ -153,7 +153,7 @@
|
||||
"example": "john@example.com",
|
||||
"format": "email",
|
||||
"type": "string",
|
||||
"minLength": 8,
|
||||
"minLength": 6,
|
||||
"maxLength": 100
|
||||
},
|
||||
"password": {
|
||||
@ -172,7 +172,7 @@
|
||||
"description": "Domain Names separated by a comma",
|
||||
"example": "*.jc21.com,blog.jc21.com",
|
||||
"type": "array",
|
||||
"maxItems": 15,
|
||||
"maxItems": 100,
|
||||
"uniqueItems": true,
|
||||
"items": {
|
||||
"type": "string",
|
||||
|
@ -157,6 +157,17 @@
|
||||
"targetSchema": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "Test HTTP Challenge",
|
||||
"description": "Tests whether the HTTP challenge should work",
|
||||
"href": "/nginx/certificates/{definitions.identity.example}/test-http",
|
||||
"access": "private",
|
||||
"method": "GET",
|
||||
"rel": "info",
|
||||
"http_header": {
|
||||
"$ref": "../examples.json#/definitions/auth_header"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
49
backend/scripts/install-certbot-plugins
Executable file
49
backend/scripts/install-certbot-plugins
Executable file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/node
|
||||
|
||||
// Usage:
|
||||
// Install all plugins defined in `certbot-dns-plugins.json`:
|
||||
// ./install-certbot-plugins
|
||||
// Install one or more specific plugins:
|
||||
// ./install-certbot-plugins route53 cloudflare
|
||||
//
|
||||
// Usage with a running docker container:
|
||||
// docker exec npm_core /command/s6-setuidgid 1000:1000 bash -c "/app/scripts/install-certbot-plugins"
|
||||
//
|
||||
|
||||
const dnsPlugins = require('../global/certbot-dns-plugins.json');
|
||||
const certbot = require('../lib/certbot');
|
||||
const logger = require('../logger').certbot;
|
||||
const batchflow = require('batchflow');
|
||||
|
||||
let hasErrors = false;
|
||||
let failingPlugins = [];
|
||||
|
||||
let pluginKeys = Object.keys(dnsPlugins);
|
||||
if (process.argv.length > 2) {
|
||||
pluginKeys = process.argv.slice(2);
|
||||
}
|
||||
|
||||
batchflow(pluginKeys).sequential()
|
||||
.each((i, pluginKey, next) => {
|
||||
certbot.installPlugin(pluginKey)
|
||||
.then(() => {
|
||||
next();
|
||||
})
|
||||
.catch((err) => {
|
||||
hasErrors = true;
|
||||
failingPlugins.push(pluginKey);
|
||||
next(err);
|
||||
});
|
||||
})
|
||||
.error((err) => {
|
||||
logger.error(err.message);
|
||||
})
|
||||
.end(() => {
|
||||
if (hasErrors) {
|
||||
logger.error('Some plugins failed to install. Please check the logs above. Failing plugins: ' + '\n - ' + failingPlugins.join('\n - '));
|
||||
process.exit(1);
|
||||
} else {
|
||||
logger.complete('Plugins installed successfully');
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
114
backend/setup.js
114
backend/setup.js
@ -1,6 +1,4 @@
|
||||
const fs = require('fs');
|
||||
const NodeRSA = require('node-rsa');
|
||||
const config = require('config');
|
||||
const config = require('./lib/config');
|
||||
const logger = require('./logger').setup;
|
||||
const certificateModel = require('./models/certificate');
|
||||
const userModel = require('./models/user');
|
||||
@ -8,64 +6,7 @@ const userPermissionModel = require('./models/user_permission');
|
||||
const utils = require('./lib/utils');
|
||||
const authModel = require('./models/auth');
|
||||
const settingModel = require('./models/setting');
|
||||
const dns_plugins = require('./global/certbot-dns-plugins');
|
||||
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
|
||||
|
||||
/**
|
||||
* Creates a new JWT RSA Keypair if not alread set on the config
|
||||
*
|
||||
* @returns {Promise}
|
||||
*/
|
||||
const setupJwt = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Now go and check if the jwt gpg keys have been created and if not, create them
|
||||
if (!config.has('jwt') || !config.has('jwt.key') || !config.has('jwt.pub')) {
|
||||
logger.info('Creating a new JWT key pair...');
|
||||
|
||||
// jwt keys are not configured properly
|
||||
const filename = config.util.getEnv('NODE_CONFIG_DIR') + '/' + (config.util.getEnv('NODE_ENV') || 'default') + '.json';
|
||||
let config_data = {};
|
||||
|
||||
try {
|
||||
config_data = require(filename);
|
||||
} catch (err) {
|
||||
// do nothing
|
||||
if (debug_mode) {
|
||||
logger.debug(filename + ' config file could not be required');
|
||||
}
|
||||
}
|
||||
|
||||
// Now create the keys and save them in the config.
|
||||
let key = new NodeRSA({ b: 2048 });
|
||||
key.generateKeyPair();
|
||||
|
||||
config_data.jwt = {
|
||||
key: key.exportKey('private').toString(),
|
||||
pub: key.exportKey('public').toString(),
|
||||
};
|
||||
|
||||
// Write config
|
||||
fs.writeFile(filename, JSON.stringify(config_data, null, 2), (err) => {
|
||||
if (err) {
|
||||
logger.error('Could not write JWT key pair to config file: ' + filename);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info('Wrote JWT key pair to config file: ' + filename);
|
||||
delete require.cache[require.resolve('config')];
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// JWT key pair exists
|
||||
if (debug_mode) {
|
||||
logger.debug('JWT Keypair already exists');
|
||||
}
|
||||
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const certbot = require('./lib/certbot');
|
||||
/**
|
||||
* Creates a default admin users if one doesn't already exist in the database
|
||||
*
|
||||
@ -80,11 +21,14 @@ const setupDefaultUser = () => {
|
||||
.then((row) => {
|
||||
if (!row.count) {
|
||||
// Create a new user and set password
|
||||
logger.info('Creating a new user: admin@example.com with password: changeme');
|
||||
let email = process.env.INITIAL_ADMIN_EMAIL || 'admin@example.com';
|
||||
let password = process.env.INITIAL_ADMIN_PASSWORD || 'changeme';
|
||||
|
||||
logger.info('Creating a new user: ' + email + ' with password: ' + password);
|
||||
|
||||
let data = {
|
||||
is_deleted: 0,
|
||||
email: 'admin@example.com',
|
||||
email: email,
|
||||
name: 'Administrator',
|
||||
nickname: 'Admin',
|
||||
avatar: '',
|
||||
@ -100,7 +44,7 @@ const setupDefaultUser = () => {
|
||||
.insert({
|
||||
user_id: user.id,
|
||||
type: 'password',
|
||||
secret: 'changeme',
|
||||
secret: password,
|
||||
meta: {},
|
||||
})
|
||||
.then(() => {
|
||||
@ -119,8 +63,8 @@ const setupDefaultUser = () => {
|
||||
.then(() => {
|
||||
logger.info('Initial admin setup completed');
|
||||
});
|
||||
} else if (debug_mode) {
|
||||
logger.debug('Admin user setup not required');
|
||||
} else if (config.debug()) {
|
||||
logger.info('Admin user setup not required');
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -151,8 +95,8 @@ const setupDefaultSettings = () => {
|
||||
logger.info('Default settings added');
|
||||
});
|
||||
}
|
||||
if (debug_mode) {
|
||||
logger.debug('Default setting setup not required');
|
||||
if (config.debug()) {
|
||||
logger.info('Default setting setup not required');
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -174,29 +118,28 @@ const setupCertbotPlugins = () => {
|
||||
|
||||
certificates.map(function (certificate) {
|
||||
if (certificate.meta && certificate.meta.dns_challenge === true) {
|
||||
const dns_plugin = dns_plugins[certificate.meta.dns_provider];
|
||||
const packages_to_install = `${dns_plugin.package_name}==${dns_plugin.package_version} ${dns_plugin.dependencies}`;
|
||||
|
||||
if (plugins.indexOf(packages_to_install) === -1) plugins.push(packages_to_install);
|
||||
if (plugins.indexOf(certificate.meta.dns_provider) === -1) {
|
||||
plugins.push(certificate.meta.dns_provider);
|
||||
}
|
||||
|
||||
// Make sure credentials file exists
|
||||
const credentials_loc = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
|
||||
const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + certificate.meta.dns_provider_credentials.replace('\'', '\\\'') + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
|
||||
// Escape single quotes and backslashes
|
||||
const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
|
||||
const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
|
||||
promises.push(utils.exec(credentials_cmd));
|
||||
}
|
||||
});
|
||||
|
||||
if (plugins.length) {
|
||||
const install_cmd = 'pip install ' + plugins.join(' ');
|
||||
promises.push(utils.exec(install_cmd));
|
||||
}
|
||||
|
||||
if (promises.length) {
|
||||
return Promise.all(promises)
|
||||
.then(() => {
|
||||
logger.info('Added Certbot plugins ' + plugins.join(', '));
|
||||
});
|
||||
}
|
||||
return certbot.installPlugins(plugins)
|
||||
.then(() => {
|
||||
if (promises.length) {
|
||||
return Promise.all(promises)
|
||||
.then(() => {
|
||||
logger.info('Added Certbot plugins ' + plugins.join(', '));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -223,8 +166,7 @@ const setupLogrotation = () => {
|
||||
};
|
||||
|
||||
module.exports = function () {
|
||||
return setupJwt()
|
||||
.then(setupDefaultUser)
|
||||
return setupDefaultUser()
|
||||
.then(setupDefaultSettings)
|
||||
.then(setupCertbotPlugins)
|
||||
.then(setupLogrotation);
|
||||
|
25
backend/templates/_access.conf
Normal file
25
backend/templates/_access.conf
Normal file
@ -0,0 +1,25 @@
|
||||
{% if access_list_id > 0 %}
|
||||
{% if access_list.items.length > 0 %}
|
||||
# Authorization
|
||||
auth_basic "Authorization required";
|
||||
auth_basic_user_file /data/access/{{ access_list_id }};
|
||||
|
||||
{% if access_list.pass_auth == 0 %}
|
||||
proxy_set_header Authorization "";
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
# Access Rules: {{ access_list.clients | size }} total
|
||||
{% for client in access_list.clients %}
|
||||
{{client | nginxAccessRule}}
|
||||
{% endfor %}
|
||||
deny all;
|
||||
|
||||
# Access checks must...
|
||||
{% if access_list.satisfy_any == 1 %}
|
||||
satisfy any;
|
||||
{% else %}
|
||||
satisfy all;
|
||||
{% endif %}
|
||||
{% endif %}
|
@ -2,7 +2,7 @@
|
||||
{% if ssl_forced == 1 or ssl_forced == true %}
|
||||
{% if hsts_enabled == 1 or hsts_enabled == true %}
|
||||
# HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
|
||||
add_header Strict-Transport-Security "max-age=63072000;{% if hsts_subdomains == 1 or hsts_subdomains == true -%} includeSubDomains;{% endif %} preload" always;
|
||||
add_header Strict-Transport-Security $hsts_header always;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
3
backend/templates/_hsts_map.conf
Normal file
3
backend/templates/_hsts_map.conf
Normal file
@ -0,0 +1,3 @@
|
||||
map $scheme $hsts_header {
|
||||
https "max-age=63072000;{% if hsts_subdomains == 1 or hsts_subdomains == true -%} includeSubDomains;{% endif %} preload";
|
||||
}
|
@ -5,9 +5,9 @@
|
||||
#listen [::]:80;
|
||||
{% endif %}
|
||||
{% if certificate -%}
|
||||
listen 443 ssl{% if http2_support %} http2{% endif %};
|
||||
listen 443 ssl{% if http2_support == 1 or http2_support == true %} http2{% endif %};
|
||||
{% if ipv6 -%}
|
||||
listen [::]:443 ssl{% if http2_support %} http2{% endif %};
|
||||
listen [::]:443 ssl{% if http2_support == 1 or http2_support == true %} http2{% endif %};
|
||||
{% else -%}
|
||||
#listen [::]:443;
|
||||
{% endif %}
|
||||
|
@ -1,36 +1,16 @@
|
||||
location {{ path }} {
|
||||
set $upstream {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
|
||||
{{ advanced_config }}
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Scheme $scheme;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_pass $upstream;
|
||||
|
||||
{% if access_list_id > 0 %}
|
||||
{% if access_list.items.length > 0 %}
|
||||
# Authorization
|
||||
auth_basic "Authorization required";
|
||||
auth_basic_user_file /data/access/{{ access_list_id }};
|
||||
|
||||
{{ access_list.passauth }}
|
||||
{% endif %}
|
||||
|
||||
# Access Rules
|
||||
{% for client in access_list.clients %}
|
||||
{{- client.rule -}};
|
||||
{% endfor %}deny all;
|
||||
|
||||
# Access checks must...
|
||||
{% if access_list.satisfy %}
|
||||
{{ access_list.satisfy }};
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
|
||||
|
||||
{% include "_access.conf" %}
|
||||
{% include "_assets.conf" %}
|
||||
{% include "_exploits.conf" %}
|
||||
|
||||
{% include "_forced_ssl.conf" %}
|
||||
{% include "_hsts.conf" %}
|
||||
|
||||
@ -39,8 +19,5 @@
|
||||
proxy_set_header Connection $http_connection;
|
||||
proxy_http_version 1.1;
|
||||
{% endif %}
|
||||
|
||||
|
||||
{{ advanced_config }}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,9 @@
|
||||
{% include "_header_comment.conf" %}
|
||||
|
||||
{% if enabled %}
|
||||
|
||||
{% include "_hsts_map.conf" %}
|
||||
|
||||
server {
|
||||
{% include "_listen.conf" %}
|
||||
{% include "_certificates.conf" %}
|
||||
|
@ -7,9 +7,9 @@
|
||||
server {
|
||||
listen 80 default;
|
||||
{% if ipv6 -%}
|
||||
listen [::]:80;
|
||||
listen [::]:80 default;
|
||||
{% else -%}
|
||||
#listen [::]:80;
|
||||
#listen [::]:80 default;
|
||||
{% endif %}
|
||||
server_name default-host.localhost;
|
||||
access_log /data/logs/default-host_access.log combined;
|
||||
@ -24,6 +24,12 @@ server {
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
{%- if value == "444" %}
|
||||
location / {
|
||||
return 444;
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
{%- if value == "redirect" %}
|
||||
location / {
|
||||
return 301 {{ meta.redirect }};
|
||||
|
@ -1,6 +1,9 @@
|
||||
{% include "_header_comment.conf" %}
|
||||
|
||||
{% if enabled %}
|
||||
|
||||
{% include "_hsts_map.conf" %}
|
||||
|
||||
server {
|
||||
set $forward_scheme {{ forward_scheme }};
|
||||
set $server "{{ forward_host }}";
|
||||
@ -52,6 +55,7 @@ proxy_http_version 1.1;
|
||||
{% endif %}
|
||||
|
||||
{% include "_openid_connect.conf" %}
|
||||
{% include "_access.conf" %}
|
||||
{% include "_hsts.conf" %}
|
||||
|
||||
{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
|
||||
|
@ -1,6 +1,9 @@
|
||||
{% include "_header_comment.conf" %}
|
||||
|
||||
{% if enabled %}
|
||||
|
||||
{% include "_hsts_map.conf" %}
|
||||
|
||||
server {
|
||||
{% include "_listen.conf" %}
|
||||
{% include "_certificates.conf" %}
|
||||
|
2977
backend/yarn.lock
2977
backend/yarn.lock
File diff suppressed because it is too large
Load Diff
@ -3,20 +3,25 @@
|
||||
|
||||
# This file assumes that the frontend has been built using ./scripts/frontend-build
|
||||
|
||||
FROM nginxproxymanager/nginx-full:node
|
||||
FROM nginxproxymanager/nginx-full:certbot-node
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG BUILD_VERSION
|
||||
ARG BUILD_COMMIT
|
||||
ARG BUILD_DATE
|
||||
|
||||
# See: https://github.com/just-containers/s6-overlay/blob/master/README.md
|
||||
ENV SUPPRESS_NO_CONFIG_WARNING=1 \
|
||||
S6_FIX_ATTRS_HIDDEN=1 \
|
||||
S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
|
||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
|
||||
S6_FIX_ATTRS_HIDDEN=1 \
|
||||
S6_KILL_FINISH_MAXTIME=10000 \
|
||||
S6_VERBOSITY=1 \
|
||||
NODE_ENV=production \
|
||||
NPM_BUILD_VERSION="${BUILD_VERSION}" \
|
||||
NPM_BUILD_COMMIT="${BUILD_COMMIT}" \
|
||||
NPM_BUILD_DATE="${BUILD_DATE}"
|
||||
NPM_BUILD_DATE="${BUILD_DATE}" \
|
||||
NODE_OPTIONS="--openssl-legacy-provider"
|
||||
|
||||
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
|
||||
&& apt-get update \
|
||||
@ -25,7 +30,7 @@ RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# s6 overlay
|
||||
COPY scripts/install-s6 /tmp/install-s6
|
||||
COPY docker/scripts/install-s6 /tmp/install-s6
|
||||
RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
|
||||
|
||||
EXPOSE 80 81 443
|
||||
@ -35,18 +40,17 @@ COPY frontend/dist /app/frontend
|
||||
COPY global /app/global
|
||||
|
||||
WORKDIR /app
|
||||
RUN yarn install
|
||||
RUN yarn install \
|
||||
&& yarn cache clean
|
||||
|
||||
# add late to limit cache-busting by modifications
|
||||
COPY docker/rootfs /
|
||||
|
||||
# Remove frontend service not required for prod, dev nginx config as well
|
||||
RUN rm -rf /etc/services.d/frontend /etc/nginx/conf.d/dev.conf
|
||||
RUN rm -rf /etc/s6-overlay/s6-rc.d/user/contents.d/frontend /etc/nginx/conf.d/dev.conf \
|
||||
&& chmod 644 /etc/logrotate.d/nginx-proxy-manager
|
||||
|
||||
# Change permission of logrotate config file
|
||||
RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
|
||||
|
||||
VOLUME [ "/data", "/etc/letsencrypt" ]
|
||||
VOLUME [ "/data" ]
|
||||
ENTRYPOINT [ "/init" ]
|
||||
|
||||
LABEL org.label-schema.schema-version="1.0" \
|
||||
|
@ -1,13 +1,18 @@
|
||||
FROM nginxproxymanager/nginx-full:node
|
||||
FROM nginxproxymanager/nginx-full:certbot-node
|
||||
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
|
||||
|
||||
ENV S6_LOGGING=0 \
|
||||
SUPPRESS_NO_CONFIG_WARNING=1 \
|
||||
S6_FIX_ATTRS_HIDDEN=1
|
||||
# See: https://github.com/just-containers/s6-overlay/blob/master/README.md
|
||||
ENV SUPPRESS_NO_CONFIG_WARNING=1 \
|
||||
S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
|
||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
|
||||
S6_FIX_ATTRS_HIDDEN=1 \
|
||||
S6_KILL_FINISH_MAXTIME=10000 \
|
||||
S6_VERBOSITY=2 \
|
||||
NODE_OPTIONS="--openssl-legacy-provider"
|
||||
|
||||
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y certbot jq python3-pip logrotate \
|
||||
&& apt-get install -y jq python3-pip logrotate \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@ -21,9 +26,8 @@ RUN rm -f /etc/nginx/conf.d/production.conf
|
||||
RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
|
||||
|
||||
# s6 overlay
|
||||
RUN curl -L -o /tmp/s6-overlay-amd64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz" \
|
||||
&& tar -xzf /tmp/s6-overlay-amd64.tar.gz -C /
|
||||
COPY scripts/install-s6 /tmp/install-s6
|
||||
RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
|
||||
|
||||
EXPOSE 80 81 443
|
||||
ENTRYPOINT [ "/init" ]
|
||||
|
||||
|
28
docker/dev/dnsrouter-config.json
Normal file
28
docker/dev/dnsrouter-config.json
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"log": {
|
||||
"format": "nice",
|
||||
"level": "debug"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"host": "0.0.0.0",
|
||||
"port": 53,
|
||||
"upstreams": [
|
||||
{
|
||||
"regex": "website[0-9]+.example\\.com",
|
||||
"upstream": "127.0.0.11"
|
||||
},
|
||||
{
|
||||
"regex": ".*\\.example\\.com",
|
||||
"upstream": "1.1.1.1"
|
||||
},
|
||||
{
|
||||
"regex": "local",
|
||||
"nxdomain": true
|
||||
}
|
||||
],
|
||||
"internal": null,
|
||||
"default_upstream": "127.0.0.11"
|
||||
}
|
||||
]
|
||||
}
|
7
docker/dev/letsencrypt.ini
Normal file
7
docker/dev/letsencrypt.ini
Normal file
@ -0,0 +1,7 @@
|
||||
text = True
|
||||
non-interactive = True
|
||||
webroot-path = /data/letsencrypt-acme-challenge
|
||||
key-type = ecdsa
|
||||
elliptic-curve = secp384r1
|
||||
preferred-chain = ISRG Root X1
|
||||
server =
|
255
docker/dev/pdns-db.sql
Normal file
255
docker/dev/pdns-db.sql
Normal file
@ -0,0 +1,255 @@
|
||||
/*
|
||||
|
||||
How this was generated:
|
||||
1. bring up an empty pdns stack
|
||||
2. use api to create a zone ...
|
||||
|
||||
curl -X POST \
|
||||
'http://npm.dev:8081/api/v1/servers/localhost/zones' \
|
||||
--header 'X-API-Key: npm' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"name": "example.com.",
|
||||
"kind": "Native",
|
||||
"masters": [],
|
||||
"nameservers": [
|
||||
"ns1.pdns.",
|
||||
"ns2.pdns."
|
||||
]
|
||||
}'
|
||||
|
||||
3. Dump sql:
|
||||
|
||||
docker exec -ti npm.pdns.db mysqldump -u pdns -p pdns
|
||||
|
||||
*/
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
|
||||
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
|
||||
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
|
||||
/*!40101 SET NAMES utf8mb4 */;
|
||||
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
|
||||
/*!40103 SET TIME_ZONE='+00:00' */;
|
||||
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
|
||||
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
|
||||
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
|
||||
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
|
||||
|
||||
--
|
||||
-- Table structure for table `comments`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `comments`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `comments` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` int(11) NOT NULL,
|
||||
`name` varchar(255) NOT NULL,
|
||||
`type` varchar(10) NOT NULL,
|
||||
`modified_at` int(11) NOT NULL,
|
||||
`account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
|
||||
`comment` text CHARACTER SET utf8mb3 NOT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `comments_name_type_idx` (`name`,`type`),
|
||||
KEY `comments_order_idx` (`domain_id`,`modified_at`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `comments`
|
||||
--
|
||||
|
||||
LOCK TABLES `comments` WRITE;
|
||||
/*!40000 ALTER TABLE `comments` DISABLE KEYS */;
|
||||
/*!40000 ALTER TABLE `comments` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `cryptokeys`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `cryptokeys`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `cryptokeys` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` int(11) NOT NULL,
|
||||
`flags` int(11) NOT NULL,
|
||||
`active` tinyint(1) DEFAULT NULL,
|
||||
`published` tinyint(1) DEFAULT 1,
|
||||
`content` text DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `domainidindex` (`domain_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `cryptokeys`
|
||||
--
|
||||
|
||||
LOCK TABLES `cryptokeys` WRITE;
|
||||
/*!40000 ALTER TABLE `cryptokeys` DISABLE KEYS */;
|
||||
/*!40000 ALTER TABLE `cryptokeys` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `domainmetadata`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `domainmetadata`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `domainmetadata` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` int(11) NOT NULL,
|
||||
`kind` varchar(32) DEFAULT NULL,
|
||||
`content` text DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `domainmetadata_idx` (`domain_id`,`kind`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `domainmetadata`
|
||||
--
|
||||
|
||||
LOCK TABLES `domainmetadata` WRITE;
|
||||
/*!40000 ALTER TABLE `domainmetadata` DISABLE KEYS */;
|
||||
INSERT INTO `domainmetadata` VALUES
|
||||
(1,1,'SOA-EDIT-API','DEFAULT');
|
||||
/*!40000 ALTER TABLE `domainmetadata` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `domains`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `domains`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `domains` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`name` varchar(255) NOT NULL,
|
||||
`master` varchar(128) DEFAULT NULL,
|
||||
`last_check` int(11) DEFAULT NULL,
|
||||
`type` varchar(8) NOT NULL,
|
||||
`notified_serial` int(10) unsigned DEFAULT NULL,
|
||||
`account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
|
||||
`options` varchar(64000) DEFAULT NULL,
|
||||
`catalog` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `name_index` (`name`),
|
||||
KEY `catalog_idx` (`catalog`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `domains`
|
||||
--
|
||||
|
||||
LOCK TABLES `domains` WRITE;
|
||||
/*!40000 ALTER TABLE `domains` DISABLE KEYS */;
|
||||
INSERT INTO `domains` VALUES
|
||||
(1,'example.com','',NULL,'NATIVE',NULL,'',NULL,NULL);
|
||||
/*!40000 ALTER TABLE `domains` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `records`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `records`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `records` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`domain_id` int(11) DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`type` varchar(10) DEFAULT NULL,
|
||||
`content` varchar(64000) DEFAULT NULL,
|
||||
`ttl` int(11) DEFAULT NULL,
|
||||
`prio` int(11) DEFAULT NULL,
|
||||
`disabled` tinyint(1) DEFAULT 0,
|
||||
`ordername` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
|
||||
`auth` tinyint(1) DEFAULT 1,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `nametype_index` (`name`,`type`),
|
||||
KEY `domain_id` (`domain_id`),
|
||||
KEY `ordername` (`ordername`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `records`
|
||||
--
|
||||
|
||||
LOCK TABLES `records` WRITE;
|
||||
/*!40000 ALTER TABLE `records` DISABLE KEYS */;
|
||||
INSERT INTO `records` VALUES
|
||||
(1,1,'example.com','NS','ns1.pdns',1500,0,0,NULL,1),
|
||||
(2,1,'example.com','NS','ns2.pdns',1500,0,0,NULL,1),
|
||||
(3,1,'example.com','SOA','a.misconfigured.dns.server.invalid hostmaster.example.com 2023030501 10800 3600 604800 3600',1500,0,0,NULL,1);
|
||||
/*!40000 ALTER TABLE `records` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `supermasters`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `supermasters`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `supermasters` (
|
||||
`ip` varchar(64) NOT NULL,
|
||||
`nameserver` varchar(255) NOT NULL,
|
||||
`account` varchar(40) CHARACTER SET utf8mb3 NOT NULL,
|
||||
PRIMARY KEY (`ip`,`nameserver`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `supermasters`
|
||||
--
|
||||
|
||||
LOCK TABLES `supermasters` WRITE;
|
||||
/*!40000 ALTER TABLE `supermasters` DISABLE KEYS */;
|
||||
/*!40000 ALTER TABLE `supermasters` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
|
||||
--
|
||||
-- Table structure for table `tsigkeys`
|
||||
--
|
||||
|
||||
DROP TABLE IF EXISTS `tsigkeys`;
|
||||
/*!40101 SET @saved_cs_client = @@character_set_client */;
|
||||
/*!40101 SET character_set_client = utf8 */;
|
||||
CREATE TABLE `tsigkeys` (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`algorithm` varchar(50) DEFAULT NULL,
|
||||
`secret` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `namealgoindex` (`name`,`algorithm`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
|
||||
/*!40101 SET character_set_client = @saved_cs_client */;
|
||||
|
||||
--
|
||||
-- Dumping data for table `tsigkeys`
|
||||
--
|
||||
|
||||
LOCK TABLES `tsigkeys` WRITE;
|
||||
/*!40000 ALTER TABLE `tsigkeys` DISABLE KEYS */;
|
||||
/*!40000 ALTER TABLE `tsigkeys` ENABLE KEYS */;
|
||||
UNLOCK TABLES;
|
||||
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
|
||||
|
||||
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
|
||||
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
|
||||
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
|
||||
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
||||
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
|
||||
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
||||
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
|
12
docker/dev/pebble-config.json
Normal file
12
docker/dev/pebble-config.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"pebble": {
|
||||
"listenAddress": "0.0.0.0:443",
|
||||
"managementListenAddress": "0.0.0.0:15000",
|
||||
"certificate": "test/certs/localhost/cert.pem",
|
||||
"privateKey": "test/certs/localhost/key.pem",
|
||||
"httpPort": 80,
|
||||
"tlsPort": 443,
|
||||
"ocspResponderURL": "",
|
||||
"externalAccountBindingRequired": false
|
||||
}
|
||||
}
|
27
docker/docker-compose.ci.mysql.yml
Normal file
27
docker/docker-compose.ci.mysql.yml
Normal file
@ -0,0 +1,27 @@
|
||||
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
|
||||
services:
|
||||
|
||||
fullstack:
|
||||
environment:
|
||||
DB_MYSQL_HOST: 'db-mysql'
|
||||
DB_MYSQL_PORT: '3306'
|
||||
DB_MYSQL_USER: 'npm'
|
||||
DB_MYSQL_PASSWORD: 'npmpass'
|
||||
DB_MYSQL_NAME: 'npm'
|
||||
depends_on:
|
||||
- db-mysql
|
||||
|
||||
db-mysql:
|
||||
image: jc21/mariadb-aria
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: 'npm'
|
||||
MYSQL_DATABASE: 'npm'
|
||||
MYSQL_USER: 'npm'
|
||||
MYSQL_PASSWORD: 'npmpass'
|
||||
volumes:
|
||||
- mysql_vol:/var/lib/mysql
|
||||
networks:
|
||||
- fulltest
|
||||
|
||||
volumes:
|
||||
mysql_vol:
|
9
docker/docker-compose.ci.sqlite.yml
Normal file
9
docker/docker-compose.ci.sqlite.yml
Normal file
@ -0,0 +1,9 @@
|
||||
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
|
||||
services:
|
||||
|
||||
fullstack:
|
||||
environment:
|
||||
DB_SQLITE_FILE: '/data/mydb.sqlite'
|
||||
PUID: 1000
|
||||
PGID: 1000
|
||||
DISABLE_IPV6: 'true'
|
@ -1,80 +1,110 @@
|
||||
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
|
||||
version: "3"
|
||||
# WARNING: This is a CI docker-compose file used for building
|
||||
# and testing of the entire app, it should not be used for production.
|
||||
# This is a base compose file, it should be extended with a
|
||||
# docker-compose.ci.*.yml file
|
||||
services:
|
||||
|
||||
fullstack-mysql:
|
||||
image: ${IMAGE}:ci-${BUILD_NUMBER}
|
||||
fullstack:
|
||||
image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
|
||||
environment:
|
||||
NODE_ENV: "development"
|
||||
DEBUG: 'true'
|
||||
FORCE_COLOR: 1
|
||||
DB_MYSQL_HOST: "db"
|
||||
DB_MYSQL_PORT: 3306
|
||||
DB_MYSQL_USER: "npm"
|
||||
DB_MYSQL_PASSWORD: "npm"
|
||||
DB_MYSQL_NAME: "npm"
|
||||
volumes:
|
||||
- npm_data:/data
|
||||
expose:
|
||||
- 81
|
||||
- 80
|
||||
- 443
|
||||
- 'npm_data_ci:/data'
|
||||
- 'npm_le_ci:/etc/letsencrypt'
|
||||
- './dev/letsencrypt.ini:/etc/letsencrypt.ini:ro'
|
||||
- './dev/resolv.conf:/etc/resolv.conf:ro'
|
||||
- '/etc/localtime:/etc/localtime:ro'
|
||||
healthcheck:
|
||||
test: ["CMD", "/usr/bin/check-health"]
|
||||
interval: 10s
|
||||
timeout: 3s
|
||||
networks:
|
||||
fulltest:
|
||||
aliases:
|
||||
- website1.example.com
|
||||
- website2.example.com
|
||||
- website3.example.com
|
||||
|
||||
stepca:
|
||||
image: jc21/testca
|
||||
volumes:
|
||||
- './dev/resolv.conf:/etc/resolv.conf:ro'
|
||||
- '/etc/localtime:/etc/localtime:ro'
|
||||
networks:
|
||||
fulltest:
|
||||
aliases:
|
||||
- ca.internal
|
||||
|
||||
pdns:
|
||||
image: pschiffe/pdns-mysql
|
||||
volumes:
|
||||
- '/etc/localtime:/etc/localtime:ro'
|
||||
environment:
|
||||
PDNS_master: 'yes'
|
||||
PDNS_api: 'yes'
|
||||
PDNS_api_key: 'npm'
|
||||
PDNS_webserver: 'yes'
|
||||
PDNS_webserver_address: '0.0.0.0'
|
||||
PDNS_webserver_password: 'npm'
|
||||
PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
|
||||
PDNS_version_string: 'anonymous'
|
||||
PDNS_default_ttl: 1500
|
||||
PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
|
||||
PDNS_gmysql_host: pdns-db
|
||||
PDNS_gmysql_port: 3306
|
||||
PDNS_gmysql_user: pdns
|
||||
PDNS_gmysql_password: pdns
|
||||
PDNS_gmysql_dbname: pdns
|
||||
depends_on:
|
||||
- db
|
||||
healthcheck:
|
||||
test: ["CMD", "/bin/check-health"]
|
||||
interval: 10s
|
||||
timeout: 3s
|
||||
- pdns-db
|
||||
networks:
|
||||
fulltest:
|
||||
aliases:
|
||||
- ns1.pdns
|
||||
- ns2.pdns
|
||||
|
||||
fullstack-sqlite:
|
||||
image: ${IMAGE}:ci-${BUILD_NUMBER}
|
||||
pdns-db:
|
||||
image: mariadb
|
||||
environment:
|
||||
NODE_ENV: "development"
|
||||
FORCE_COLOR: 1
|
||||
DB_SQLITE_FILE: "/data/database.sqlite"
|
||||
MYSQL_ROOT_PASSWORD: 'pdns'
|
||||
MYSQL_DATABASE: 'pdns'
|
||||
MYSQL_USER: 'pdns'
|
||||
MYSQL_PASSWORD: 'pdns'
|
||||
volumes:
|
||||
- npm_data:/data
|
||||
expose:
|
||||
- 81
|
||||
- 80
|
||||
- 443
|
||||
healthcheck:
|
||||
test: ["CMD", "/bin/check-health"]
|
||||
interval: 10s
|
||||
timeout: 3s
|
||||
- 'pdns_mysql_vol:/var/lib/mysql'
|
||||
- '/etc/localtime:/etc/localtime:ro'
|
||||
- './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
|
||||
networks:
|
||||
- fulltest
|
||||
|
||||
db:
|
||||
image: jc21/mariadb-aria
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: "npm"
|
||||
MYSQL_DATABASE: "npm"
|
||||
MYSQL_USER: "npm"
|
||||
MYSQL_PASSWORD: "npm"
|
||||
dnsrouter:
|
||||
image: jc21/dnsrouter
|
||||
volumes:
|
||||
- db_data:/var/lib/mysql
|
||||
- ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
|
||||
networks:
|
||||
- fulltest
|
||||
|
||||
cypress-mysql:
|
||||
image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
|
||||
cypress:
|
||||
image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
|
||||
build:
|
||||
context: ../test/
|
||||
dockerfile: cypress/Dockerfile
|
||||
context: ../
|
||||
dockerfile: test/cypress/Dockerfile
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://fullstack-mysql:81"
|
||||
CYPRESS_baseUrl: 'http://fullstack:81'
|
||||
volumes:
|
||||
- cypress-logs:/results
|
||||
command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
|
||||
|
||||
cypress-sqlite:
|
||||
image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
|
||||
build:
|
||||
context: ../test/
|
||||
dockerfile: cypress/Dockerfile
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://fullstack-sqlite:81"
|
||||
volumes:
|
||||
- cypress-logs:/results
|
||||
command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
|
||||
- 'cypress_logs:/results'
|
||||
- './dev/resolv.conf:/etc/resolv.conf:ro'
|
||||
command: cypress run --browser chrome --config-file=cypress/config/ci.js
|
||||
networks:
|
||||
- fulltest
|
||||
|
||||
volumes:
|
||||
cypress-logs:
|
||||
npm_data:
|
||||
db_data:
|
||||
cypress_logs:
|
||||
npm_data_ci:
|
||||
npm_le_ci:
|
||||
pdns_mysql_vol:
|
||||
|
||||
networks:
|
||||
fulltest:
|
||||
name: "npm-${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
|
||||
|
@ -1,6 +1,6 @@
|
||||
# WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
|
||||
version: "3.5"
|
||||
services:
|
||||
|
||||
npm:
|
||||
image: nginxproxymanager:dev
|
||||
container_name: npm_core
|
||||
@ -14,14 +14,19 @@ services:
|
||||
networks:
|
||||
- nginx_proxy_manager
|
||||
environment:
|
||||
NODE_ENV: "development"
|
||||
PUID: 1000
|
||||
PGID: 1000
|
||||
FORCE_COLOR: 1
|
||||
DEVELOPMENT: "true"
|
||||
DB_MYSQL_HOST: "db"
|
||||
DB_MYSQL_PORT: 3306
|
||||
DB_MYSQL_USER: "npm"
|
||||
DB_MYSQL_PASSWORD: "npm"
|
||||
DB_MYSQL_NAME: "npm"
|
||||
# specifically for dev:
|
||||
DEBUG: 'true'
|
||||
DEVELOPMENT: 'true'
|
||||
LE_STAGING: 'true'
|
||||
# db:
|
||||
DB_MYSQL_HOST: 'db'
|
||||
DB_MYSQL_PORT: '3306'
|
||||
DB_MYSQL_USER: 'npm'
|
||||
DB_MYSQL_PASSWORD: 'npm'
|
||||
DB_MYSQL_NAME: 'npm'
|
||||
# DB_SQLITE_FILE: "/data/database.sqlite"
|
||||
# DISABLE_IPV6: "true"
|
||||
volumes:
|
||||
@ -37,29 +42,18 @@ services:
|
||||
db:
|
||||
image: jc21/mariadb-aria
|
||||
container_name: npm_db
|
||||
ports:
|
||||
- 33306:3306
|
||||
networks:
|
||||
- nginx_proxy_manager
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: "npm"
|
||||
MYSQL_DATABASE: "npm"
|
||||
MYSQL_USER: "npm"
|
||||
MYSQL_PASSWORD: "npm"
|
||||
MYSQL_ROOT_PASSWORD: 'npm'
|
||||
MYSQL_DATABASE: 'npm'
|
||||
MYSQL_USER: 'npm'
|
||||
MYSQL_PASSWORD: 'npm'
|
||||
volumes:
|
||||
- db_data:/var/lib/mysql
|
||||
|
||||
swagger:
|
||||
image: "swaggerapi/swagger-ui:latest"
|
||||
container_name: npm_swagger
|
||||
ports:
|
||||
- 3001:80
|
||||
networks:
|
||||
- nginx_proxy_manager
|
||||
environment:
|
||||
URL: "http://127.0.0.1:3081/api/schema"
|
||||
PORT: "80"
|
||||
depends_on:
|
||||
- npm
|
||||
|
||||
volumes:
|
||||
npm_data:
|
||||
name: npm_core_data
|
||||
|
@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This command reads the `DISABLE_IPV6` env var and will either enable
|
||||
# or disable ipv6 in all nginx configs based on this setting.
|
||||
|
||||
# Lowercase
|
||||
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
CYAN='\E[1;36m'
|
||||
BLUE='\E[1;34m'
|
||||
YELLOW='\E[1;33m'
|
||||
RED='\E[1;31m'
|
||||
RESET='\E[0m'
|
||||
|
||||
FOLDER=$1
|
||||
if [ "$FOLDER" == "" ]; then
|
||||
echo -e "${RED}❯ $0 requires a absolute folder path as the first argument!${RESET}"
|
||||
echo -e "${YELLOW} ie: $0 /data/nginx${RESET}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FILES=$(find "$FOLDER" -type f -name "*.conf")
|
||||
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
|
||||
# IPV6 is disabled
|
||||
echo "Disabling IPV6 in hosts"
|
||||
echo -e "${BLUE}❯ ${CYAN}Disabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
|
||||
|
||||
# Iterate over configs and run the regex
|
||||
for FILE in $FILES
|
||||
do
|
||||
echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
|
||||
sed -E -i 's/^([^#]*)listen \[::\]/\1#listen [::]/g' "$FILE"
|
||||
done
|
||||
|
||||
else
|
||||
# IPV6 is enabled
|
||||
echo -e "${BLUE}❯ ${CYAN}Enabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
|
||||
|
||||
# Iterate over configs and run the regex
|
||||
for FILE in $FILES
|
||||
do
|
||||
echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
|
||||
sed -E -i 's/^(\s*)#listen \[::\]/\1listen [::]/g' "$FILE"
|
||||
done
|
||||
|
||||
fi
|
2
docker/rootfs/etc/cont-finish.d/.gitignore
vendored
2
docker/rootfs/etc/cont-finish.d/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
3
docker/rootfs/etc/cont-init.d/.gitignore
vendored
3
docker/rootfs/etc/cont-init.d/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
||||
!*.sh
|
@ -1,7 +0,0 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
set -e
|
||||
|
||||
mkdir -p /data/logs
|
||||
echo "Changing ownership of /data/logs to $(id -u):$(id -g)"
|
||||
chown -R "$(id -u):$(id -g)" /data/logs
|
||||
|
@ -1,29 +0,0 @@
|
||||
#!/usr/bin/with-contenv bash
|
||||
# ref: https://github.com/linuxserver/docker-baseimage-alpine/blob/master/root/etc/cont-init.d/01-envfile
|
||||
|
||||
# in s6, environmental variables are written as text files for s6 to monitor
|
||||
# seach through full-path filenames for files ending in "__FILE"
|
||||
for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
|
||||
echo "[secret-init] Evaluating ${FILENAME##*/} ..."
|
||||
|
||||
# set SECRETFILE to the contents of the full-path textfile
|
||||
SECRETFILE=$(cat ${FILENAME})
|
||||
# SECRETFILE=${FILENAME}
|
||||
# echo "[secret-init] Set SECRETFILE to ${SECRETFILE}" # DEBUG - rm for prod!
|
||||
|
||||
# if SECRETFILE exists / is not null
|
||||
if [[ -f ${SECRETFILE} ]]; then
|
||||
# strip the appended "__FILE" from environmental variable name ...
|
||||
STRIPFILE=$(echo ${FILENAME} | sed "s/__FILE//g")
|
||||
# echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
|
||||
|
||||
# ... and set value to contents of secretfile
|
||||
# since s6 uses text files, this is effectively "export ..."
|
||||
printf $(cat ${SECRETFILE}) > ${STRIPFILE}
|
||||
# echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
|
||||
echo "[secret-init] Success! ${STRIPFILE##*/} set from ${FILENAME##*/}"
|
||||
|
||||
else
|
||||
echo "[secret-init] cannot find secret in ${FILENAME}"
|
||||
fi
|
||||
done
|
2
docker/rootfs/etc/fix-attrs.d/.gitignore
vendored
2
docker/rootfs/etc/fix-attrs.d/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
@ -3,3 +3,4 @@ non-interactive = True
|
||||
webroot-path = /data/letsencrypt-acme-challenge
|
||||
key-type = ecdsa
|
||||
elliptic-curve = secp384r1
|
||||
preferred-chain = ISRG Root X1
|
||||
|
@ -1,5 +1,6 @@
|
||||
/data/logs/*_access.log /data/logs/*/access.log {
|
||||
create 0644 root root
|
||||
su npm npm
|
||||
create 0644
|
||||
weekly
|
||||
rotate 4
|
||||
missingok
|
||||
@ -12,7 +13,8 @@
|
||||
}
|
||||
|
||||
/data/logs/*_error.log /data/logs/*/error.log {
|
||||
create 0644 root root
|
||||
su npm npm
|
||||
create 0644
|
||||
weekly
|
||||
rotate 10
|
||||
missingok
|
||||
|
@ -30,11 +30,10 @@ server {
|
||||
set $port "443";
|
||||
|
||||
server_name localhost;
|
||||
access_log /data/logs/fallback-access.log standard;
|
||||
access_log /data/logs/fallback_access.log standard;
|
||||
error_log /dev/null crit;
|
||||
ssl_certificate /data/nginx/dummycert.pem;
|
||||
ssl_certificate_key /data/nginx/dummykey.pem;
|
||||
include conf.d/include/ssl-ciphers.conf;
|
||||
ssl_reject_handshake on;
|
||||
|
||||
return 444;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
location ~* ^.*\.(css|js|jpe?g|gif|png|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
|
||||
location ~* ^.*\.(css|js|jpe?g|gif|png|webp|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
|
||||
if_modified_since off;
|
||||
|
||||
# use the public cache
|
||||
|
@ -1,3 +1,10 @@
|
||||
set $test "";
|
||||
if ($scheme = "http") {
|
||||
set $test "H";
|
||||
}
|
||||
if ($request_uri = /.well-known/acme-challenge/test-challenge) {
|
||||
set $test "${test}T";
|
||||
}
|
||||
if ($test = H) {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
|
4
docker/rootfs/etc/nginx/conf.d/include/log.conf
Normal file
4
docker/rootfs/etc/nginx/conf.d/include/log.conf
Normal file
@ -0,0 +1,4 @@
|
||||
log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
|
||||
log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
|
||||
|
||||
access_log /data/logs/fallback_access.log proxy;
|
@ -2,7 +2,7 @@ add_header X-Served-By $host;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Scheme $scheme;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_pass $forward_scheme://$server:$port;
|
||||
proxy_pass $forward_scheme://$server:$port$request_uri;
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# run nginx in foreground
|
||||
daemon off;
|
||||
|
||||
user root;
|
||||
pid /run/nginx/nginx.pid;
|
||||
user npm;
|
||||
|
||||
# Set number of worker processes automatically based on number of CPU cores.
|
||||
worker_processes auto;
|
||||
@ -14,8 +14,11 @@ error_log /data/logs/fallback_error.log warn;
|
||||
# Includes files with directives to load dynamic modules.
|
||||
include /etc/nginx/modules/*.conf;
|
||||
|
||||
# Custom
|
||||
include /data/nginx/custom/root_top[.]conf;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
include /data/nginx/custom/events[.]conf;
|
||||
}
|
||||
|
||||
http {
|
||||
@ -58,6 +61,8 @@ http {
|
||||
|
||||
access_log /data/logs/fallback_access.log proxy;
|
||||
|
||||
include /etc/nginx/conf.d/include/log.conf;
|
||||
|
||||
# Dynamically generated resolvers file
|
||||
include /etc/nginx/conf.d/include/resolvers.conf;
|
||||
|
||||
|
21
docker/rootfs/etc/s6-overlay/s6-rc.d/backend/run
Executable file
21
docker/rootfs/etc/s6-overlay/s6-rc.d/backend/run
Executable file
@ -0,0 +1,21 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/bin/common.sh
|
||||
|
||||
cd /app || exit 1
|
||||
|
||||
log_info 'Starting backend ...'
|
||||
|
||||
if [ "${DEVELOPMENT:-}" = 'true' ]; then
|
||||
s6-setuidgid "$PUID:$PGID" yarn install
|
||||
exec s6-setuidgid "$PUID:$PGID" bash -c "export HOME=$NPMHOME;node --max_old_space_size=250 --abort_on_uncaught_exception node_modules/nodemon/bin/nodemon.js"
|
||||
else
|
||||
while :
|
||||
do
|
||||
s6-setuidgid "$PUID:$PGID" bash -c "export HOME=$NPMHOME;node --abort_on_uncaught_exception --max_old_space_size=250 index.js"
|
||||
sleep 1
|
||||
done
|
||||
fi
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/backend/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/backend/type
Normal file
@ -0,0 +1 @@
|
||||
longrun
|
21
docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
Executable file
21
docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
Executable file
@ -0,0 +1,21 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
# This service is DEVELOPMENT only.
|
||||
|
||||
if [ "$DEVELOPMENT" = 'true' ]; then
|
||||
. /usr/bin/common.sh
|
||||
cd /app/frontend || exit 1
|
||||
HOME=$NPMHOME
|
||||
export HOME
|
||||
mkdir -p /app/frontend/dist
|
||||
chown -R "$PUID:$PGID" /app/frontend/dist
|
||||
|
||||
log_info 'Starting frontend ...'
|
||||
s6-setuidgid "$PUID:$PGID" yarn install
|
||||
exec s6-setuidgid "$PUID:$PGID" yarn watch
|
||||
else
|
||||
exit 0
|
||||
fi
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/type
Normal file
@ -0,0 +1 @@
|
||||
longrun
|
9
docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
Executable file
9
docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
Executable file
@ -0,0 +1,9 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/bin/common.sh
|
||||
|
||||
log_info 'Starting nginx ...'
|
||||
exec s6-setuidgid "$PUID:$PGID" nginx
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/type
Normal file
@ -0,0 +1 @@
|
||||
longrun
|
22
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/00-all.sh
Executable file
22
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/00-all.sh
Executable file
@ -0,0 +1,22 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
. /usr/bin/common.sh
|
||||
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
log_fatal "This docker container must be run as root, do not specify a user.\nYou can specify PUID and PGID env vars to run processes as that user and group after initialization."
|
||||
fi
|
||||
|
||||
if [ "$DEBUG" = "true" ]; then
|
||||
set -x
|
||||
fi
|
||||
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/50-ipv6.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/60-secrets.sh
|
||||
. /etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
|
40
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
Executable file
40
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
log_info "Configuring $NPMUSER user ..."
|
||||
|
||||
if id -u "$NPMUSER" 2>/dev/null; then
|
||||
# user already exists
|
||||
usermod -u "$PUID" "$NPMUSER"
|
||||
else
|
||||
# Add user
|
||||
useradd -o -u "$PUID" -U -d "$NPMHOME" -s /bin/false "$NPMUSER"
|
||||
fi
|
||||
|
||||
log_info "Configuring $NPMGROUP group ..."
|
||||
if [ "$(get_group_id "$NPMGROUP")" = '' ]; then
|
||||
# Add group. This will not set the id properly if it's already taken
|
||||
groupadd -f -g "$PGID" "$NPMGROUP"
|
||||
else
|
||||
groupmod -o -g "$PGID" "$NPMGROUP"
|
||||
fi
|
||||
|
||||
# Set the group ID and check it
|
||||
groupmod -o -g "$PGID" "$NPMGROUP"
|
||||
if [ "$(get_group_id "$NPMGROUP")" != "$PGID" ]; then
|
||||
echo "ERROR: Unable to set group id properly"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set the group against the user and check it
|
||||
usermod -G "$PGID" "$NPMGROUP"
|
||||
if [ "$(id -g "$NPMUSER")" != "$PGID" ] ; then
|
||||
echo "ERROR: Unable to set group against the user properly"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Home for user
|
||||
mkdir -p "$NPMHOME"
|
||||
chown -R "$PUID:$PGID" "$NPMHOME"
|
41
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
Executable file
41
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
Executable file
@ -0,0 +1,41 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
log_info 'Checking paths ...'
|
||||
|
||||
# Ensure /data is mounted
|
||||
if [ ! -d '/data' ]; then
|
||||
log_fatal '/data is not mounted! Check your docker configuration.'
|
||||
fi
|
||||
# Ensure /etc/letsencrypt is mounted
|
||||
if [ ! -d '/etc/letsencrypt' ]; then
|
||||
log_fatal '/etc/letsencrypt is not mounted! Check your docker configuration.'
|
||||
fi
|
||||
|
||||
# Create required folders
|
||||
mkdir -p \
|
||||
/data/nginx \
|
||||
/data/custom_ssl \
|
||||
/data/logs \
|
||||
/data/access \
|
||||
/data/nginx/default_host \
|
||||
/data/nginx/default_www \
|
||||
/data/nginx/proxy_host \
|
||||
/data/nginx/redirection_host \
|
||||
/data/nginx/stream \
|
||||
/data/nginx/dead_host \
|
||||
/data/nginx/temp \
|
||||
/data/letsencrypt-acme-challenge \
|
||||
/run/nginx \
|
||||
/tmp/nginx/body \
|
||||
/var/log/nginx \
|
||||
/var/lib/nginx/cache/public \
|
||||
/var/lib/nginx/cache/private \
|
||||
/var/cache/nginx/proxy_temp
|
||||
|
||||
touch /var/log/nginx/error.log || true
|
||||
chmod 777 /var/log/nginx/error.log || true
|
||||
chmod -R 777 /var/cache/nginx || true
|
||||
chmod 644 /etc/logrotate.d/nginx-proxy-manager
|
28
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
Executable file
28
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
Executable file
@ -0,0 +1,28 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
log_info 'Setting ownership ...'
|
||||
|
||||
# root
|
||||
chown root /tmp/nginx
|
||||
|
||||
# npm user and group
|
||||
chown -R "$PUID:$PGID" /data
|
||||
chown -R "$PUID:$PGID" /etc/letsencrypt
|
||||
chown -R "$PUID:$PGID" /run/nginx
|
||||
chown -R "$PUID:$PGID" /tmp/nginx
|
||||
chown -R "$PUID:$PGID" /var/cache/nginx
|
||||
chown -R "$PUID:$PGID" /var/lib/logrotate
|
||||
chown -R "$PUID:$PGID" /var/lib/nginx
|
||||
chown -R "$PUID:$PGID" /var/log/nginx
|
||||
|
||||
# Don't chown entire /etc/nginx folder as this causes crashes on some systems
|
||||
chown -R "$PUID:$PGID" /etc/nginx/nginx
|
||||
chown -R "$PUID:$PGID" /etc/nginx/nginx.conf
|
||||
chown -R "$PUID:$PGID" /etc/nginx/conf.d
|
||||
|
||||
# Prevents errors when installing python certbot plugins when non-root
|
||||
chown "$PUID:$PGID" /opt/certbot /opt/certbot/bin
|
||||
find /opt/certbot/lib/python*/site-packages -not -user "$PUID" -execdir chown "$PUID:$PGID" {} \+
|
17
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
Executable file
17
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
Executable file
@ -0,0 +1,17 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
log_info 'Dynamic resolvers ...'
|
||||
|
||||
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
# Dynamically generate resolvers file, if resolver is IPv6, enclose in `[]`
|
||||
# thanks @tfmm
|
||||
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ];
|
||||
then
|
||||
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) ipv6=off valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
|
||||
else
|
||||
echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
|
||||
fi
|
39
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/50-ipv6.sh
Executable file
39
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/50-ipv6.sh
Executable file
@ -0,0 +1,39 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
# This command reads the `DISABLE_IPV6` env var and will either enable
|
||||
# or disable ipv6 in all nginx configs based on this setting.
|
||||
|
||||
set -e
|
||||
|
||||
log_info 'IPv6 ...'
|
||||
|
||||
# Lowercase
|
||||
DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
process_folder () {
|
||||
FILES=$(find "$1" -type f -name "*.conf")
|
||||
SED_REGEX=
|
||||
|
||||
if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
|
||||
# IPV6 is disabled
|
||||
echo "Disabling IPV6 in hosts in: $1"
|
||||
SED_REGEX='s/^([^#]*)listen \[::\]/\1#listen [::]/g'
|
||||
else
|
||||
# IPV6 is enabled
|
||||
echo "Enabling IPV6 in hosts in: $1"
|
||||
SED_REGEX='s/^(\s*)#listen \[::\]/\1listen [::]/g'
|
||||
fi
|
||||
|
||||
for FILE in $FILES
|
||||
do
|
||||
echo "- ${FILE}"
|
||||
echo "$(sed -E "$SED_REGEX" "$FILE")" > $FILE
|
||||
done
|
||||
|
||||
# ensure the files are still owned by the npm user
|
||||
chown -R "$PUID:$PGID" "$1"
|
||||
}
|
||||
|
||||
process_folder /etc/nginx/conf.d
|
||||
process_folder /data/nginx
|
30
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/60-secrets.sh
Executable file
30
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/60-secrets.sh
Executable file
@ -0,0 +1,30 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
# in s6, environmental variables are written as text files for s6 to monitor
|
||||
# search through full-path filenames for files ending in "__FILE"
|
||||
log_info 'Docker secrets ...'
|
||||
|
||||
for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
|
||||
echo "[secret-init] Evaluating ${FILENAME##*/} ..."
|
||||
|
||||
# set SECRETFILE to the contents of the full-path textfile
|
||||
SECRETFILE=$(cat "${FILENAME}")
|
||||
# if SECRETFILE exists / is not null
|
||||
if [[ -f "${SECRETFILE}" ]]; then
|
||||
# strip the appended "__FILE" from environmental variable name ...
|
||||
STRIPFILE=$(echo "${FILENAME}" | sed "s/__FILE//g")
|
||||
# echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
|
||||
|
||||
# ... and set value to contents of secretfile
|
||||
# since s6 uses text files, this is effectively "export ..."
|
||||
printf $(cat "${SECRETFILE}") > "${STRIPFILE}"
|
||||
# echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
|
||||
echo "Success: ${STRIPFILE##*/} set from ${FILENAME##*/}"
|
||||
|
||||
else
|
||||
echo "Cannot find secret in ${FILENAME}"
|
||||
fi
|
||||
done
|
18
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
Executable file
18
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
Executable file
@ -0,0 +1,18 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
set +x
|
||||
|
||||
echo "
|
||||
-------------------------------------
|
||||
_ _ ____ __ __
|
||||
| \ | | _ \| \/ |
|
||||
| \| | |_) | |\/| |
|
||||
| |\ | __/| | | |
|
||||
|_| \_|_| |_| |_|
|
||||
-------------------------------------
|
||||
User: $NPMUSER PUID:$PUID ID:$(id -u "$NPMUSER") GROUP:$(id -g "$NPMUSER")
|
||||
Group: $NPMGROUP PGID:$PGID ID:$(get_group_id "$NPMGROUP")
|
||||
-------------------------------------
|
||||
"
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/type
Normal file
@ -0,0 +1 @@
|
||||
oneshot
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user