diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..2ff6c7e6
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,8 @@
+# Ignore everything
+*
+
+# Only allow the following for docker build:
+!backend/
+!docker/
+!scripts/
+!test/
diff --git a/.gitignore b/.gitignore
index 08462849..7c00febf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,25 @@
+git.idea
+.env
.DS_Store
-.idea
._*
+*.code-workspace
+vendor
+bin/*
+backend/config.json
+backend/embed/assets
+test/node_modules
+*/node_modules
+docs/.vuepress/dist
+frontend/build
+frontend/yarn-error.log
+frontend/.npmrc
+frontend/src/locale/lang
+test/cypress/fixtures/example.json
.vscode
-certbot-help.txt
+docker-build
+data
+dist
+backend/embed/acme.sh
+docker/dev/resolv.conf
+docker/dev/dnsrouter-config.json.tmp
+
diff --git a/.version b/.version
index b0e185b7..5efd7ac5 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.9.18
+3.0.0a
diff --git a/DEV-README.md b/DEV-README.md
new file mode 100644
index 00000000..df593536
--- /dev/null
+++ b/DEV-README.md
@@ -0,0 +1,93 @@
+# Development
+
+```bash
+git clone nginxproxymanager
+cd nginxproxymanager
+./scripts/start-dev
+# wait a minute or 2 for the package to build after container start
+curl http://127.0.0.1:3081/api/
+```
+
+## Using Local Test Certificate Authorities
+
+It's handy to use these instead of hitting production or staging acme servers
+when testing lots of stuff.
+
+Firstly create your first user using the api:
+
+```bash
+curl --request POST \
+ --url http://127.0.0.1:3081/api/users \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "name": "Bobby Tables",
+ "nickname": "Bobby",
+ "email": "you@example.com",
+ "roles": ["admin"],
+ "is_disabled": false,
+ "auth": {
+ "type": "password",
+ "secret": "changeme"
+ }
+}'
+```
+
+Then login in with those credentials to get your JWT token and set
+that as an environment variable:
+
+```bash
+NPM_TOKEN=$(curl --request POST \
+ --url http://127.0.0.1:3081/api/tokens \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "type": "password",
+ "identity": "you@example.com",
+ "secret": "changeme"
+}' | jq -r '.result.token')
+```
+
+Then choose one or both of the following CA's to set up.
+
+### SmallStep Acme CA
+
+[StepCA](https://github.com/smallstep/certificates) is SmallSteps's test CA server.
+
+- ✅ HTTP Validation
+- ✅ DNS Validation
+\
+Create a Certificate Authority that points to the Step CA:
+
+```bash
+curl --request POST \
+ --url http://127.0.0.1:3081/api/certificate-authorities \
+ --header "Authorization: Bearer ${NPM_TOKEN}" \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "name": "Step CA",
+ "acmesh_server": "https://ca.internal/acme/acme/directory",
+ "ca_bundle": "/etc/ssl/certs/NginxProxyManager.crt",
+ "max_domains": 2
+}'
+```
+
+### Pebble Test Acme CA
+
+[Pebble](https://github.com/letsencrypt/pebble) is Let's Encrypt's own test CA server.
+
+- ✅ HTTP Validation
+- ❌ DNS Validation
+
+Create a Certificate Authority that points to the Pebble CA:
+
+```bash
+curl --request POST \
+ --url http://127.0.0.1:3081/api/certificate-authorities \
+ --header "Authorization: Bearer ${NPM_TOKEN}" \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "name": "Pebble CA",
+ "acmesh_server": "https://pebble/dir",
+ "ca_bundle": "/etc/ssl/certs/pebble.minica.pem",
+ "max_domains": 2
+}'
+```
diff --git a/Jenkinsfile b/Jenkinsfile
index 1b744692..0f335d2b 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -8,14 +8,18 @@ pipeline {
ansiColor('xterm')
}
environment {
- IMAGE = "nginx-proxy-manager"
+ DOCKER_ORG = 'jc21'
+ IMAGE = 'nginx-proxy-manager'
BUILD_VERSION = getVersion()
- MAJOR_VERSION = "2"
+ BUILD_COMMIT = getCommit()
+ MAJOR_VERSION = '3'
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('/', '-')}"
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_FILE = 'docker/docker-compose.ci.yml'
COMPOSE_INTERACTIVE_NO_CLI = 1
BUILDX_NAME = "${COMPOSE_PROJECT_NAME}"
+ DOCS_BUCKET = 'jc21-npm-site-next' // TODO: change to prod when official
+ DOCS_CDN = 'E2Z0128EHS0Q23' // TODO: same
}
stages {
stage('Environment') {
@@ -26,7 +30,9 @@ pipeline {
}
steps {
script {
- env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
+ env.BUILDX_PUSH_TAGS = "-t docker.io/${DOCKER_ORG}/${IMAGE}:${BUILD_VERSION} -t docker.io/${DOCKER_ORG}/${IMAGE}:${MAJOR_VERSION} -t docker.io/${DOCKER_ORG}/${IMAGE}:latest"
+ echo 'Building on Master is disabled!'
+ sh 'exit 1'
}
}
}
@@ -39,100 +45,76 @@ pipeline {
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
- env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
+ env.BUILDX_PUSH_TAGS = "-t docker.io/${DOCKER_ORG}/${IMAGE}:v3-${BRANCH_LOWER}"
}
}
}
- stage('Versions') {
- steps {
- sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
- sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
- sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
- sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
- sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
- }
- }
}
}
stage('Frontend') {
steps {
- sh './scripts/frontend-build'
+ sh './scripts/ci/build-frontend'
}
+ /*
+ post {
+ always {
+ junit 'frontend/eslint.xml'
+ junit 'frontend/junit.xml'
+ }
+ }
+ */
}
stage('Backend') {
steps {
- echo 'Checking Syntax ...'
- sh 'docker pull nginxproxymanager/nginx-full:certbot-node'
- // See: https://github.com/yarnpkg/yarn/issues/3254
- sh '''docker run --rm \\
- -v "$(pwd)/backend:/app" \\
- -v "$(pwd)/global:/app/global" \\
- -w /app \\
- nginxproxymanager/nginx-full:certbot-node \\
- sh -c "yarn install && yarn eslint . && rm -rf node_modules"
- '''
-
- echo 'Docker Build ...'
- sh '''docker build --pull --no-cache --squash --compress \\
- -t "${IMAGE}:ci-${BUILD_NUMBER}" \\
- -f docker/Dockerfile \\
- --build-arg TARGETPLATFORM=linux/amd64 \\
- --build-arg BUILDPLATFORM=linux/amd64 \\
- --build-arg BUILD_VERSION="${BUILD_VERSION}" \\
- --build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
- --build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
- .
- '''
- }
- }
- stage('Integration Tests Sqlite') {
- steps {
- // Bring up a stack
- sh 'docker-compose up -d fullstack-sqlite'
- sh './scripts/wait-healthy $(docker-compose ps -q fullstack-sqlite) 120'
-
- // Run tests
- sh 'rm -rf test/results'
- sh 'docker-compose up cypress-sqlite'
- // Get results
- sh 'docker cp -L "$(docker-compose ps -q cypress-sqlite):/test/results" test/'
+ withCredentials([string(credentialsId: 'npm-sentry-dsn', variable: 'SENTRY_DSN')]) {
+ withCredentials([usernamePassword(credentialsId: 'oss-index-token', passwordVariable: 'NANCY_TOKEN', usernameVariable: 'NANCY_USER')]) {
+ sh './scripts/ci/test-backend'
+ }
+ sh './scripts/ci/build-backend'
+ sh '''docker build --pull --no-cache \\
+ -t "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}" \\
+ -f docker/Dockerfile \\
+ --build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
+ --build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
+ --build-arg BUILD_VERSION="${BUILD_VERSION}" \\
+ .
+ '''
+ }
}
post {
- always {
- // Dumps to analyze later
- sh 'mkdir -p debug'
- sh 'docker-compose logs fullstack-sqlite | gzip > debug/docker_fullstack_sqlite.log.gz'
- sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
- // Cypress videos and screenshot artifacts
- dir(path: 'test/results') {
- archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
- }
- junit 'test/results/junit/*'
+ success {
+ archiveArtifacts allowEmptyArchive: false, artifacts: 'bin/*'
}
}
}
- stage('Integration Tests Mysql') {
+ stage('Test') {
+ when {
+ not {
+ equals expected: 'UNSTABLE', actual: currentBuild.result
+ }
+ }
steps {
- // Bring up a stack
- sh 'docker-compose up -d fullstack-mysql'
- sh './scripts/wait-healthy $(docker-compose ps -q fullstack-mysql) 120'
-
- // Run tests
- sh 'rm -rf test/results'
- sh 'docker-compose up cypress-mysql'
- // Get results
- sh 'docker cp -L "$(docker-compose ps -q cypress-mysql):/test/results" test/'
+ // Docker image check
+ /*
+ sh '''docker run --rm \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v "$(pwd)/docker:/app" \
+ -e CI=true \
+ wagoodman/dive:latest --ci-config /app/.dive-ci \
+ "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
+ '''
+ */
+ sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug'
- sh 'docker-compose logs fullstack-mysql | gzip > debug/docker_fullstack_mysql.log.gz'
- sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
- // Cypress videos and screenshot artifacts
- dir(path: 'test/results') {
- archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
- }
+ sh 'docker-compose logs fullstack > debug/docker_fullstack.log'
+ sh 'docker-compose logs stepca > debug/docker_stepca.log'
+ sh 'docker-compose logs pdns > debug/docker_pdns.log'
+ sh 'docker-compose logs pdns-db > debug/docker_pdns-db.log'
+ sh 'docker-compose logs dnsrouter > debug/docker_dnsrouter.log'
junit 'test/results/junit/*'
}
}
@@ -149,11 +131,14 @@ pipeline {
sh 'yarn build'
}
+ // API Docs:
+ sh 'docker-compose exec -T fullstack curl -s --output /temp-docs/api-schema.json "http://fullstack:81/api/schema"'
+ sh 'mkdir -p "docs/.vuepress/dist/api"'
+ sh 'mv docs/api-schema.json docs/.vuepress/dist/api/'
+
dir(path: 'docs/.vuepress/dist') {
sh 'tar -czf ../../docs.tgz *'
}
-
- archiveArtifacts(artifacts: 'docs/docs.tgz', allowEmptyArchive: false)
}
}
stage('MultiArch Build') {
@@ -163,11 +148,12 @@ pipeline {
}
}
steps {
- withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
- // Docker Login
- sh "docker login -u '${duser}' -p '${dpass}'"
- // Buildx with push from cache
- sh "./scripts/buildx --push ${BUILDX_PUSH_TAGS}"
+ withCredentials([string(credentialsId: 'npm-sentry-dsn', variable: 'SENTRY_DSN')]) {
+ withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
+ sh 'docker login -u "${duser}" -p "${dpass}"'
+ sh './scripts/buildx --push ${BUILDX_PUSH_TAGS}'
+ // sh './scripts/buildx -o type=local,dest=docker-build'
+ }
}
}
}
@@ -184,7 +170,7 @@ pipeline {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'npm-s3-docs', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sh """docker run --rm \\
--name \${COMPOSE_PROJECT_NAME}-docs-upload \\
- -e S3_BUCKET=jc21-npm-site \\
+ -e S3_BUCKET=$DOCS_BUCKET \\
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
-v \$(pwd):/app \\
@@ -198,7 +184,7 @@ pipeline {
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
jc21/ci-tools \\
- aws cloudfront create-invalidation --distribution-id EN1G6DEWZUTDT --paths '/*'
+ aws cloudfront create-invalidation --distribution-id $DOCS_CDN --paths '/*'
"""
}
}
@@ -214,7 +200,7 @@ pipeline {
}
steps {
script {
- def comment = pullRequest.comment("This is an automated message from CI:\n\nDocker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.")
+ def comment = pullRequest.comment("This is an automated message from CI:\n\nDocker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/${DOCKER_ORG}/${IMAGE}) as `${DOCKER_ORG}/${IMAGE}:v3-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.")
}
}
}
@@ -222,19 +208,26 @@ pipeline {
post {
always {
sh 'docker-compose down --rmi all --remove-orphans --volumes -t 30'
- sh 'echo Reverting ownership'
- sh 'docker run --rm -v $(pwd):/data jc21/ci-tools chown -R $(id -u):$(id -g) /data'
+ sh './scripts/ci/build-cleanup'
+ echo 'Reverting ownership'
+ sh 'docker run --rm -v $(pwd):/data jc21/gotools:latest chown -R "$(id -u):$(id -g)" /data'
}
success {
juxtapose event: 'success'
sh 'figlet "SUCCESS"'
}
failure {
+ dir(path: 'test') {
+ archiveArtifacts allowEmptyArchive: true, artifacts: 'results/**/*', excludes: '**/*.xml'
+ }
archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
juxtapose event: 'failure'
sh 'figlet "FAILURE"'
}
unstable {
+ dir(path: 'test') {
+ archiveArtifacts allowEmptyArchive: true, artifacts: 'results/**/*', excludes: '**/*.xml'
+ }
archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
juxtapose event: 'unstable'
sh 'figlet "UNSTABLE"'
diff --git a/README.md b/README.md
index a97d3ba8..775bc244 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
-
+
@@ -52,7 +52,8 @@ I won't go in to too much detail here but here are the basics for someone new to
3. Configure your domain name details to point to your home, either with a static ip or a service like DuckDNS or [Amazon Route53](https://github.com/jc21/route53-ddns)
4. Use the Nginx Proxy Manager as your gateway to forward to your other web based services
-## Quick Setup
+
+## Quickest Setup
1. Install Docker and Docker-Compose
@@ -65,7 +66,7 @@ I won't go in to too much detail here but here are the basics for someone new to
version: '3'
services:
app:
- image: 'jc21/nginx-proxy-manager:latest'
+ image: 'jc21/nginx-proxy-manager:v3-develop'
restart: unless-stopped
ports:
- '80:80'
@@ -73,7 +74,6 @@ services:
- '443:443'
volumes:
- ./data:/data
- - ./letsencrypt:/etc/letsencrypt
```
3. Bring up your stack by running
@@ -97,436 +97,6 @@ Password: changeme
Immediately after logging in with this default user you will be asked to modify your details and change your password.
+## Become a Contributor
-## Contributors
-
-Special thanks to the following contributors:
-
-
-
-
-
-
+A guide to setting up your own development environment [is found here](DEV-README.md).
diff --git a/backend/.editorconfig b/backend/.editorconfig
new file mode 100644
index 00000000..8b96428f
--- /dev/null
+++ b/backend/.editorconfig
@@ -0,0 +1,8 @@
+root = true
+
+[*]
+indent_style = tab
+indent_size = 4
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = false
diff --git a/backend/.eslintrc.json b/backend/.eslintrc.json
deleted file mode 100644
index 6d6172a4..00000000
--- a/backend/.eslintrc.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "env": {
- "node": true,
- "es6": true
- },
- "extends": [
- "eslint:recommended"
- ],
- "globals": {
- "Atomics": "readonly",
- "SharedArrayBuffer": "readonly"
- },
- "parserOptions": {
- "ecmaVersion": 2018,
- "sourceType": "module"
- },
- "plugins": [
- "align-assignments"
- ],
- "rules": {
- "arrow-parens": [
- "error",
- "always"
- ],
- "indent": [
- "error",
- "tab"
- ],
- "linebreak-style": [
- "error",
- "unix"
- ],
- "quotes": [
- "error",
- "single"
- ],
- "semi": [
- "error",
- "always"
- ],
- "key-spacing": [
- "error",
- {
- "align": "value"
- }
- ],
- "comma-spacing": [
- "error",
- {
- "before": false,
- "after": true
- }
- ],
- "func-call-spacing": [
- "error",
- "never"
- ],
- "keyword-spacing": [
- "error",
- {
- "before": true
- }
- ],
- "no-irregular-whitespace": "error",
- "no-unused-expressions": 0,
- "align-assignments/align-assignments": [
- 2,
- {
- "requiresOnly": false
- }
- ]
- }
-}
\ No newline at end of file
diff --git a/backend/.gitignore b/backend/.gitignore
deleted file mode 100644
index 149080b9..00000000
--- a/backend/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-config/development.json
-data/*
-yarn-error.log
-tmp
-certbot.log
-node_modules
-core.*
-
diff --git a/backend/.golangci.yml b/backend/.golangci.yml
new file mode 100644
index 00000000..f61dc69d
--- /dev/null
+++ b/backend/.golangci.yml
@@ -0,0 +1,92 @@
+linters:
+ enable:
+ # Prevents against memory leaks in production caused by not closing file handle
+ - bodyclose
+ # Detects unused declarations in a go package
+ - deadcode
+ # Detects cloned code. DRY is good programming practice. Can cause issues with testing code where
+ # simplicity is preferred over duplication. Disabled for test code.
+ #- dupl
+ # Detects unchecked errors in go programs. These unchecked errors can be critical bugs in some cases.
+ - errcheck
+ # Simplifies go code.
+ - gosimple
+ # Reports suspicious constructs, maintained by goteam. e.g. Printf unused params not caught
+ # at compile time.
+ - govet
+ # Detect security issues with gocode. Use of secrets in code or obsolete security algorithms.
+ # It's imaged heuristic methods are used in finding problems. If issues with rules are found
+ # particular rules can be disabled as required.
+ # Could possibility cause issues with testing. Disabled for test code.
+ - gosec
+ # Detect repeated strings that could be replaced by a constant
+ - goconst
+ # Misc linters missing from other projects. Grouped into 3 categories diagnostics, style
+ # and performance
+ - gocritic
+ # Limits code cyclomatic complexity
+ - gocyclo
+ # Detects if code needs to be gofmt'd
+ - gofmt
+ # Detects unused go package imports
+ - goimports
+ # Detcts style mistakes not correctness. Golint is meant to carry out the
+ # stylistic conventions put forth in Effective Go and CodeReviewComments.
+ # golint has false positives and false negatives and can be tweaked.
+ - golint
+ # Detects ineffectual assignments in code
+ - ineffassign
+ # Detect commonly misspelled english words in comments
+ - misspell
+ # Detect naked returns on non-trivial functions, and conform with Go CodeReviewComments
+ - nakedret
+ # Detect slice allocations that can be preallocated
+ - prealloc
+ # Misc collection of static analysis tools
+ - staticcheck
+ # Detects unused struct fields
+ - structcheck
+ # Parses and typechecks the code like the go compiler
+ - typecheck
+ # Detects unused constants, variables, functions and types
+ - unused
+ # Detects unused global variables and constants
+ - varcheck
+ # Remove unnecessary type conversions
+ - unconvert
+ # Remove unnecessary(unused) function parameters
+ - unparam
+linters-settings:
+ goconst:
+ # minimal length of string constant
+ # default: 3
+ min-len: 2
+ # minimum number of occurrences of string constant
+ # default: 3
+ min-occurences: 2
+ misspell:
+ locale: UK
+ ignore-words:
+ - color
+issues:
+ # Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
+ # We have chosen an arbitrary value that works based on practical usage.
+ max-same: 20
+ # See cmdline flag documentation for more info about default excludes --exclude-use-default
+ # Nothing is excluded by default
+ exclude-use-default: false
+ # Excluding configuration per-path, per-linter, per-text and per-source
+ exclude-rules:
+ # Exclude some linters from running on tests files. # TODO: Add examples why this is good
+
+ - path: _test\.go
+ linters:
+ # Tests should be simple? Add example why this is good?
+ - gocyclo
+ # Error checking adds verbosity and complexity for minimal value
+ - errcheck
+ # Table test encourage duplication in defining the table tests.
+ - dupl
+ # Hard coded example tokens, SQL injection and other bad practices may
+ # want to be tested
+ - gosec
diff --git a/backend/.nancy-ignore b/backend/.nancy-ignore
new file mode 100644
index 00000000..5736e87a
--- /dev/null
+++ b/backend/.nancy-ignore
@@ -0,0 +1,22 @@
+# If you need to ignore any of nancy's warnings add them
+# here with a reference to the package/version that
+# triggers them and rational for ignoring it.
+
+# pkg:golang/github.com/coreos/etcd@3.3.10
+# etcd before versions 3.3.23 and 3.4.10 does not perform any password length validation
+CVE-2020-15115
+
+# pkg:golang/github.com/coreos/etcd@3.3.10
+# In ectd before versions 3.4.10 and 3.3.23, gateway TLS authentication is only applied to endpoints detected in DNS SRV records
+CVE-2020-15136
+
+# pkg:golang/github.com/coreos/etcd@3.3.10
+# In etcd before versions 3.3.23 and 3.4.10, the etcd gateway is a simple TCP proxy to allow for basic service discovery and access
+CVE-2020-15114
+
+# pkg:golang/github.com/gorilla/websocket@1.4.0
+# Integer Overflow or Wraparound
+CWE-190
+
+# jwt-go before 4.0.0-preview1 allows attackers to bypass intended access restrict...
+CVE-2020-26160
diff --git a/backend/.vscode/settings.json b/backend/.vscode/settings.json
deleted file mode 100644
index 4e540ab3..00000000
--- a/backend/.vscode/settings.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "editor.insertSpaces": false,
- "editor.formatOnSave": true,
- "files.trimTrailingWhitespace": true,
- "editor.codeActionsOnSave": {
- "source.fixAll.eslint": true
- }
-}
\ No newline at end of file
diff --git a/backend/README.md b/backend/README.md
new file mode 100644
index 00000000..912f0069
--- /dev/null
+++ b/backend/README.md
@@ -0,0 +1,6 @@
+# Backend
+
+## Guides and materials
+
+- [Nginx Proxy Protocol](https://docs.nginx.com/nginx/admin-guide/load-balancer/using-proxy-protocol/)
+-
diff --git a/backend/Taskfile.yml b/backend/Taskfile.yml
new file mode 100644
index 00000000..81a0bcd0
--- /dev/null
+++ b/backend/Taskfile.yml
@@ -0,0 +1,69 @@
+version: "2"
+
+tasks:
+ default:
+ cmds:
+ - task: run
+
+ run:
+ desc: Build and run
+ sources:
+ - internal/**/*.go
+ - cmd/**/*.go
+ - ../frontend/src/locale/src/*.json
+ cmds:
+ - task: locale
+ - task: build
+ - cmd: echo -e "==> Running..."
+ silent: true
+ - cmd: ../dist/bin/server
+ ignore_error: true
+ silent: true
+ env:
+ LOG_LEVEL: debug
+
+ build:
+ desc: Build the server
+ cmds:
+ - cmd: echo -e "==> Building..."
+ silent: true
+ - cmd: rm -f dist/bin/*
+ silent: true
+ - cmd: go build -ldflags="-X main.commit={{.GIT_COMMIT}} -X main.version={{.VERSION}}" -o ../dist/bin/server ./cmd/server/main.go
+ silent: true
+ - task: lint
+ vars:
+ GIT_COMMIT:
+ sh: git log -n 1 --format=%h
+ VERSION:
+ sh: cat ../.version
+ env:
+ GO111MODULE: on
+ CGO_ENABLED: 1
+
+ lint:
+ desc: Linting
+ cmds:
+ - cmd: echo -e "==> Linting..."
+ silent: true
+ - cmd: bash scripts/lint.sh
+ silent: true
+
+ test:
+ desc: Testing
+ cmds:
+ - cmd: echo -e "==> Testing..."
+ silent: true
+ - cmd: bash scripts/test.sh
+ silent: true
+
+ locale:
+ desc: Locale
+ dir: /app/frontend
+ cmds:
+ - cmd: yarn locale-compile
+ silent: true
+ ignore_error: true
+ - cmd: chown -R "$PUID:$PGID" src/locale/lang
+ silent: true
+ ignore_error: true
diff --git a/backend/app.js b/backend/app.js
deleted file mode 100644
index ca6d6fba..00000000
--- a/backend/app.js
+++ /dev/null
@@ -1,89 +0,0 @@
-const express = require('express');
-const bodyParser = require('body-parser');
-const fileUpload = require('express-fileupload');
-const compression = require('compression');
-const log = require('./logger').express;
-
-/**
- * App
- */
-const app = express();
-app.use(fileUpload());
-app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({extended: true}));
-
-// Gzip
-app.use(compression());
-
-/**
- * General Logging, BEFORE routes
- */
-
-app.disable('x-powered-by');
-app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
-app.enable('strict routing');
-
-// pretty print JSON when not live
-if (process.env.NODE_ENV !== 'production') {
- app.set('json spaces', 2);
-}
-
-// CORS for everything
-app.use(require('./lib/express/cors'));
-
-// General security/cache related headers + server header
-app.use(function (req, res, next) {
- let x_frame_options = 'DENY';
-
- if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
- x_frame_options = process.env.X_FRAME_OPTIONS;
- }
-
- res.set({
- 'X-XSS-Protection': '1; mode=block',
- 'X-Content-Type-Options': 'nosniff',
- 'X-Frame-Options': x_frame_options,
- 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
- Pragma: 'no-cache',
- Expires: 0
- });
- next();
-});
-
-app.use(require('./lib/express/jwt')());
-app.use('/', require('./routes/api/main'));
-
-// production error handler
-// no stacktraces leaked to user
-// eslint-disable-next-line
-app.use(function (err, req, res, next) {
-
- let payload = {
- error: {
- code: err.status,
- message: err.public ? err.message : 'Internal Error'
- }
- };
-
- if (process.env.NODE_ENV === 'development' || (req.baseUrl + req.path).includes('nginx/certificates')) {
- payload.debug = {
- stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
- previous: err.previous
- };
- }
-
- // Not every error is worth logging - but this is good for now until it gets annoying.
- if (typeof err.stack !== 'undefined' && err.stack) {
- if (process.env.NODE_ENV === 'development' || process.env.DEBUG) {
- log.debug(err.stack);
- } else if (typeof err.public == 'undefined' || !err.public) {
- log.warn(err.message);
- }
- }
-
- res
- .status(err.status || 500)
- .send(payload);
-});
-
-module.exports = app;
diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go
new file mode 100644
index 00000000..23e9bdc4
--- /dev/null
+++ b/backend/cmd/server/main.go
@@ -0,0 +1,47 @@
+package main
+
+import (
+ "os"
+ "os/signal"
+ "syscall"
+
+ "npm/internal/api"
+ "npm/internal/config"
+ "npm/internal/database"
+ "npm/internal/entity/setting"
+ "npm/internal/logger"
+ "npm/internal/state"
+ "npm/internal/worker"
+)
+
+var commit string
+var version string
+var sentryDSN string
+
+func main() {
+ config.InitArgs(&version, &commit)
+ config.Init(&version, &commit, &sentryDSN)
+ appstate := state.NewState()
+
+ database.Migrate(func() {
+ setting.ApplySettings()
+ database.CheckSetup()
+ go worker.StartCertificateWorker(appstate)
+
+ api.StartServer()
+ irqchan := make(chan os.Signal, 1)
+ signal.Notify(irqchan, syscall.SIGINT, syscall.SIGTERM)
+
+ for irq := range irqchan {
+ if irq == syscall.SIGINT || irq == syscall.SIGTERM {
+ logger.Info("Got ", irq, " shutting server down ...")
+ // Close db
+ err := database.GetInstance().Close()
+ if err != nil {
+ logger.Error("DatabaseCloseError", err)
+ }
+ break
+ }
+ }
+ })
+}
diff --git a/backend/config/README.md b/backend/config/README.md
deleted file mode 100644
index 26268a11..00000000
--- a/backend/config/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-These files are use in development and are not deployed as part of the final product.
-
\ No newline at end of file
diff --git a/backend/config/default.json b/backend/config/default.json
deleted file mode 100644
index 64ab577c..00000000
--- a/backend/config/default.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "database": {
- "engine": "mysql",
- "host": "db",
- "name": "npm",
- "user": "npm",
- "password": "npm",
- "port": 3306
- }
-}
diff --git a/backend/config/sqlite-test-db.json b/backend/config/sqlite-test-db.json
deleted file mode 100644
index ad548865..00000000
--- a/backend/config/sqlite-test-db.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
- "database": {
- "engine": "knex-native",
- "knex": {
- "client": "sqlite3",
- "connection": {
- "filename": "/app/config/mydb.sqlite"
- },
- "pool": {
- "min": 0,
- "max": 1,
- "createTimeoutMillis": 3000,
- "acquireTimeoutMillis": 30000,
- "idleTimeoutMillis": 30000,
- "reapIntervalMillis": 1000,
- "createRetryIntervalMillis": 100,
- "propagateCreateError": false
- },
- "migrations": {
- "tableName": "migrations",
- "stub": "src/backend/lib/migrate_template.js",
- "directory": "src/backend/migrations"
- }
- }
- }
-}
diff --git a/backend/db.js b/backend/db.js
deleted file mode 100644
index ce5338f0..00000000
--- a/backend/db.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const config = require('config');
-
-if (!config.has('database')) {
- throw new Error('Database config does not exist! Please read the instructions: https://github.com/jc21/nginx-proxy-manager/blob/master/doc/INSTALL.md');
-}
-
-function generateDbConfig() {
- if (config.database.engine === 'knex-native') {
- return config.database.knex;
- } else
- return {
- client: config.database.engine,
- connection: {
- host: config.database.host,
- user: config.database.user,
- password: config.database.password,
- database: config.database.name,
- port: config.database.port
- },
- migrations: {
- tableName: 'migrations'
- }
- };
-}
-
-
-let data = generateDbConfig();
-
-if (typeof config.database.version !== 'undefined') {
- data.version = config.database.version;
-}
-
-module.exports = require('knex')(data);
diff --git a/backend/doc/api.swagger.json b/backend/doc/api.swagger.json
deleted file mode 100644
index 06c02564..00000000
--- a/backend/doc/api.swagger.json
+++ /dev/null
@@ -1,1254 +0,0 @@
-{
- "openapi": "3.0.0",
- "info": {
- "title": "Nginx Proxy Manager API",
- "version": "2.x.x"
- },
- "servers": [
- {
- "url": "http://127.0.0.1:81/api"
- }
- ],
- "paths": {
- "/": {
- "get": {
- "operationId": "health",
- "summary": "Returns the API health status",
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "status": "OK",
- "version": {
- "major": 2,
- "minor": 1,
- "revision": 0
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HealthObject"
- }
- }
- }
- }
- }
- }
- },
- "/schema": {
- "get": {
- "operationId": "schema",
- "responses": {
- "200": {
- "description": "200 response"
- }
- },
- "summary": "Returns this swagger API schema"
- }
- },
- "/tokens": {
- "get": {
- "operationId": "refreshToken",
- "summary": "Refresh your access token",
- "tags": [
- "Tokens"
- ],
- "security": [
- {
- "BearerAuth": [
- "tokens"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "expires": 1566540510,
- "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/TokenObject"
- }
- }
- }
- }
- }
- },
- "post": {
- "operationId": "requestToken",
- "parameters": [
- {
- "description": "Credentials Payload",
- "in": "body",
- "name": "credentials",
- "required": true,
- "schema": {
- "additionalProperties": false,
- "properties": {
- "identity": {
- "minLength": 1,
- "type": "string"
- },
- "scope": {
- "minLength": 1,
- "type": "string",
- "enum": [
- "user"
- ]
- },
- "secret": {
- "minLength": 1,
- "type": "string"
- }
- },
- "required": [
- "identity",
- "secret"
- ],
- "type": "object"
- }
- }
- ],
- "responses": {
- "200": {
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "result": {
- "expires": 1566540510,
- "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/TokenObject"
- }
- }
- },
- "description": "200 response"
- }
- },
- "summary": "Request a new access token from credentials",
- "tags": [
- "Tokens"
- ]
- }
- },
- "/settings": {
- "get": {
- "operationId": "getSettings",
- "summary": "Get all settings",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": [
- {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- ]
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingsList"
- }
- }
- }
- }
- }
- }
- },
- "/settings/{settingID}": {
- "get": {
- "operationId": "getSetting",
- "summary": "Get a setting",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "settingID",
- "schema": {
- "type": "string",
- "minLength": 1
- },
- "required": true,
- "description": "Setting ID",
- "example": "default-site"
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- }
- }
- }
- },
- "put": {
- "operationId": "updateSetting",
- "summary": "Update a setting",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "settingID",
- "schema": {
- "type": "string",
- "minLength": 1
- },
- "required": true,
- "description": "Setting ID",
- "example": "default-site"
- },
- {
- "in": "body",
- "name": "setting",
- "description": "Setting Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- }
- }
- }
- }
- },
- "/users": {
- "get": {
- "operationId": "getUsers",
- "summary": "Get all users",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "query",
- "name": "expand",
- "description": "Expansions",
- "schema": {
- "type": "string",
- "enum": [
- "permissions"
- ]
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": [
- {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- ]
- },
- "withPermissions": {
- "value": [
- {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ],
- "permissions": {
- "visibility": "all",
- "proxy_hosts": "manage",
- "redirection_hosts": "manage",
- "dead_hosts": "manage",
- "streams": "manage",
- "access_lists": "manage",
- "certificates": "manage"
- }
- }
- ]
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UsersList"
- }
- }
- }
- }
- }
- },
- "post": {
- "operationId": "createUser",
- "summary": "Create a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- ],
- "responses": {
- "201": {
- "description": "201 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 2,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ],
- "permissions": {
- "visibility": "all",
- "proxy_hosts": "manage",
- "redirection_hosts": "manage",
- "dead_hosts": "manage",
- "streams": "manage",
- "access_lists": "manage",
- "certificates": "manage"
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}": {
- "get": {
- "operationId": "getUser",
- "summary": "Get a user",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 1
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- },
- "put": {
- "operationId": "updateUser",
- "summary": "Update a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 2,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- },
- "delete": {
- "operationId": "deleteUser",
- "summary": "Delete a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/auth": {
- "put": {
- "operationId": "updateUserAuth",
- "summary": "Update a User's Authentication",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/AuthObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/permissions": {
- "put": {
- "operationId": "updateUserPermissions",
- "summary": "Update a User's Permissions",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "Permissions Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/PermissionsObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/login": {
- "put": {
- "operationId": "loginAsUser",
- "summary": "Login as this user",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "token": "eyJhbGciOiJSUzI1NiIsInR...16OjT8B3NLyXg",
- "expires": "2020-01-31T10:56:23.239Z",
- "user": {
- "id": 1,
- "created_on": "2020-01-30T10:43:44.000Z",
- "modified_on": "2020-01-30T10:43:44.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/3c8d73f45fd8763f827b964c76e6032a?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- }
- },
- "schema": {
- "type": "object",
- "description": "Login object",
- "required": [
- "expires",
- "token",
- "user"
- ],
- "additionalProperties": false,
- "properties": {
- "expires": {
- "description": "Token Expiry Unix Time",
- "example": 1566540249,
- "minimum": 1,
- "type": "number"
- },
- "token": {
- "description": "JWT Token",
- "example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
- "type": "string"
- },
- "user": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- }
- }
- }
- },
- "/reports/hosts": {
- "get": {
- "operationId": "reportsHosts",
- "summary": "Report on Host Statistics",
- "tags": [
- "Reports"
- ],
- "security": [
- {
- "BearerAuth": [
- "reports"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "proxy": 20,
- "redirection": 1,
- "stream": 0,
- "dead": 1
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HostReportObject"
- }
- }
- }
- }
- }
- }
- },
- "/audit-log": {
- "get": {
- "operationId": "getAuditLog",
- "summary": "Get Audit Log",
- "tags": [
- "Audit Log"
- ],
- "security": [
- {
- "BearerAuth": [
- "audit-log"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "proxy": 20,
- "redirection": 1,
- "stream": 0,
- "dead": 1
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HostReportObject"
- }
- }
- }
- }
- }
- }
- }
- },
- "components": {
- "securitySchemes": {
- "BearerAuth": {
- "type": "http",
- "scheme": "bearer"
- }
- },
- "schemas": {
- "HealthObject": {
- "type": "object",
- "description": "Health object",
- "additionalProperties": false,
- "required": [
- "status",
- "version"
- ],
- "properties": {
- "status": {
- "type": "string",
- "description": "Healthy",
- "example": "OK"
- },
- "version": {
- "type": "object",
- "description": "The version object",
- "example": {
- "major": 2,
- "minor": 0,
- "revision": 0
- },
- "additionalProperties": false,
- "required": [
- "major",
- "minor",
- "revision"
- ],
- "properties": {
- "major": {
- "type": "integer",
- "minimum": 0
- },
- "minor": {
- "type": "integer",
- "minimum": 0
- },
- "revision": {
- "type": "integer",
- "minimum": 0
- }
- }
- }
- }
- },
- "TokenObject": {
- "type": "object",
- "description": "Token object",
- "required": [
- "expires",
- "token"
- ],
- "additionalProperties": false,
- "properties": {
- "expires": {
- "description": "Token Expiry Unix Time",
- "example": 1566540249,
- "minimum": 1,
- "type": "number"
- },
- "token": {
- "description": "JWT Token",
- "example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
- "type": "string"
- }
- }
- },
- "SettingObject": {
- "type": "object",
- "description": "Setting object",
- "required": [
- "id",
- "name",
- "description",
- "value",
- "meta"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": "string",
- "description": "Setting ID",
- "minLength": 1,
- "example": "default-site"
- },
- "name": {
- "type": "string",
- "description": "Setting Display Name",
- "minLength": 1,
- "example": "Default Site"
- },
- "description": {
- "type": "string",
- "description": "Meaningful description",
- "minLength": 1,
- "example": "What to show when Nginx is hit with an unknown Host"
- },
- "value": {
- "description": "Value in almost any form",
- "example": "congratulations",
- "oneOf": [
- {
- "type": "string",
- "minLength": 1
- },
- {
- "type": "integer"
- },
- {
- "type": "object"
- },
- {
- "type": "number"
- },
- {
- "type": "array"
- }
- ]
- },
- "meta": {
- "description": "Extra metadata",
- "example": {},
- "type": "object"
- }
- }
- },
- "SettingsList": {
- "type": "array",
- "description": "Setting list",
- "items": {
- "$ref": "#/components/schemas/SettingObject"
- }
- },
- "UserObject": {
- "type": "object",
- "description": "User object",
- "required": [
- "id",
- "created_on",
- "modified_on",
- "is_disabled",
- "email",
- "name",
- "nickname",
- "avatar",
- "roles"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": "integer",
- "description": "User ID",
- "minimum": 1,
- "example": 1
- },
- "created_on": {
- "type": "string",
- "description": "Created Date",
- "example": "2020-01-30T09:36:08.000Z"
- },
- "modified_on": {
- "type": "string",
- "description": "Modified Date",
- "example": "2020-01-30T09:41:04.000Z"
- },
- "is_disabled": {
- "type": "integer",
- "minimum": 0,
- "maximum": 1,
- "description": "Is user Disabled (0 = false, 1 = true)",
- "example": 0
- },
- "email": {
- "type": "string",
- "description": "Email",
- "minLength": 3,
- "example": "jc@jc21.com"
- },
- "name": {
- "type": "string",
- "description": "Name",
- "minLength": 1,
- "example": "Jamie Curnow"
- },
- "nickname": {
- "type": "string",
- "description": "Nickname",
- "example": "James"
- },
- "avatar": {
- "type": "string",
- "description": "Gravatar URL based on email, without scheme",
- "example": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm"
- },
- "roles": {
- "description": "Roles applied",
- "example": [
- "admin"
- ],
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "UsersList": {
- "type": "array",
- "description": "User list",
- "items": {
- "$ref": "#/components/schemas/UserObject"
- }
- },
- "AuthObject": {
- "type": "object",
- "description": "Authentication Object",
- "required": [
- "type",
- "secret"
- ],
- "properties": {
- "type": {
- "type": "string",
- "pattern": "^password$",
- "example": "password"
- },
- "current": {
- "type": "string",
- "minLength": 1,
- "maxLength": 64,
- "example": "changeme"
- },
- "secret": {
- "type": "string",
- "minLength": 8,
- "maxLength": 64,
- "example": "mySuperN3wP@ssword!"
- }
- }
- },
- "PermissionsObject": {
- "type": "object",
- "properties": {
- "visibility": {
- "type": "string",
- "description": "Visibility Type",
- "enum": [
- "all",
- "user"
- ]
- },
- "access_lists": {
- "type": "string",
- "description": "Access Lists Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "dead_hosts": {
- "type": "string",
- "description": "404 Hosts Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "proxy_hosts": {
- "type": "string",
- "description": "Proxy Hosts Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "redirection_hosts": {
- "type": "string",
- "description": "Redirection Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "streams": {
- "type": "string",
- "description": "Streams Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "certificates": {
- "type": "string",
- "description": "Certificates Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- }
- }
- },
- "HostReportObject": {
- "type": "object",
- "properties": {
- "proxy": {
- "type": "integer",
- "description": "Proxy Hosts Count"
- },
- "redirection": {
- "type": "integer",
- "description": "Redirection Hosts Count"
- },
- "stream": {
- "type": "integer",
- "description": "Streams Count"
- },
- "dead": {
- "type": "integer",
- "description": "404 Hosts Count"
- }
- }
- }
- }
- }
-}
\ No newline at end of file
diff --git a/backend/embed/api_docs/api.swagger.json b/backend/embed/api_docs/api.swagger.json
new file mode 100644
index 00000000..c22f9450
--- /dev/null
+++ b/backend/embed/api_docs/api.swagger.json
@@ -0,0 +1,243 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "title": "Nginx Proxy Manager API",
+ "version": "{{VERSION}}"
+ },
+ "paths": {
+ "/": {
+ "get": {
+ "$ref": "file://./paths/get.json"
+ }
+ },
+ "/certificates": {
+ "get": {
+ "$ref": "file://./paths/certificates/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/certificates/post.json"
+ }
+ },
+ "/certificates/{certificateID}": {
+ "get": {
+ "$ref": "file://./paths/certificates/certificateID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/certificates/certificateID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/certificates/certificateID/delete.json"
+ }
+ },
+ "/certificates-authorities": {
+ "get": {
+ "$ref": "file://./paths/certificates-authorities/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/certificates-authorities/post.json"
+ }
+ },
+ "/certificates-authorities/{caID}": {
+ "get": {
+ "$ref": "file://./paths/certificates-authorities/caID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/certificates-authorities/caID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/certificates-authorities/caID/delete.json"
+ }
+ },
+ "/config": {
+ "get": {
+ "$ref": "file://./paths/config/get.json"
+ }
+ },
+ "/dns-providers": {
+ "get": {
+ "$ref": "file://./paths/dns-providers/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/dns-providers/post.json"
+ }
+ },
+ "/dns-providers/{providerID}": {
+ "get": {
+ "$ref": "file://./paths/dns-providers/providerID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/dns-providers/providerID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/dns-providers/providerID/delete.json"
+ }
+ },
+ "/hosts": {
+ "get": {
+ "$ref": "file://./paths/hosts/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/hosts/post.json"
+ }
+ },
+ "/hosts/{hostID}": {
+ "get": {
+ "$ref": "file://./paths/hosts/hostID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/hosts/hostID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/hosts/hostID/delete.json"
+ }
+ },
+ "/schema": {
+ "get": {
+ "$ref": "file://./paths/schema/get.json"
+ }
+ },
+ "/settings": {
+ "get": {
+ "$ref": "file://./paths/settings/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/settings/post.json"
+ }
+ },
+ "/settings/{name}": {
+ "get": {
+ "$ref": "file://./paths/settings/name/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/settings/name/put.json"
+ }
+ },
+ "/streams": {
+ "get": {
+ "$ref": "file://./paths/streams/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/streams/post.json"
+ }
+ },
+ "/streams/{streamID}": {
+ "get": {
+ "$ref": "file://./paths/streams/streamID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/streams/streamID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/streams/streamID/delete.json"
+ }
+ },
+ "/tokens": {
+ "get": {
+ "$ref": "file://./paths/tokens/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/tokens/post.json"
+ }
+ },
+ "/users": {
+ "get": {
+ "$ref": "file://./paths/users/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/users/post.json"
+ }
+ },
+ "/users/{userID}": {
+ "get": {
+ "$ref": "file://./paths/users/userID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/users/userID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/users/userID/delete.json"
+ }
+ },
+ "/users/{userID}/auth": {
+ "post": {
+ "$ref": "file://./paths/users/userID/auth/post.json"
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "CertificateAuthorityList": {
+ "$ref": "file://./components/CertificateAuthorityList.json"
+ },
+ "CertificateAuthorityObject": {
+ "$ref": "file://./components/CertificateAuthorityObject.json"
+ },
+ "CertificateList": {
+ "$ref": "file://./components/CertificateList.json"
+ },
+ "CertificateObject": {
+ "$ref": "file://./components/CertificateObject.json"
+ },
+ "ConfigObject": {
+ "$ref": "file://./components/ConfigObject.json"
+ },
+ "DeletedItemResponse": {
+ "$ref": "file://./components/DeletedItemResponse.json"
+ },
+ "DNSProviderList": {
+ "$ref": "file://./components/DNSProviderList.json"
+ },
+ "DNSProviderObject": {
+ "$ref": "file://./components/DNSProviderObject.json"
+ },
+ "ErrorObject": {
+ "$ref": "file://./components/ErrorObject.json"
+ },
+ "FilterObject": {
+ "$ref": "file://./components/FilterObject.json"
+ },
+ "HealthObject": {
+ "$ref": "file://./components/HealthObject.json"
+ },
+ "HostList": {
+ "$ref": "file://./components/HostList.json"
+ },
+ "HostObject": {
+ "$ref": "file://./components/HostObject.json"
+ },
+ "HostTemplateList": {
+ "$ref": "file://./components/HostTemplateList.json"
+ },
+ "HostTemplateObject": {
+ "$ref": "file://./components/HostTemplateObject.json"
+ },
+ "SettingList": {
+ "$ref": "file://./components/SettingList.json"
+ },
+ "SettingObject": {
+ "$ref": "file://./components/SettingObject.json"
+ },
+ "SortObject": {
+ "$ref": "file://./components/SortObject.json"
+ },
+ "StreamList": {
+ "$ref": "file://./components/StreamList.json"
+ },
+ "StreamObject": {
+ "$ref": "file://./components/StreamObject.json"
+ },
+ "TokenObject": {
+ "$ref": "file://./components/TokenObject.json"
+ },
+ "UserAuthObject": {
+ "$ref": "file://./components/UserAuthObject.json"
+ },
+ "UserList": {
+ "$ref": "file://./components/UserList.json"
+ },
+ "UserObject": {
+ "$ref": "file://./components/UserObject.json"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateAuthorityList.json b/backend/embed/api_docs/components/CertificateAuthorityList.json
new file mode 100644
index 00000000..131140ef
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateAuthorityList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "CertificateAuthorityList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateAuthorityObject.json b/backend/embed/api_docs/components/CertificateAuthorityObject.json
new file mode 100644
index 00000000..a25cad48
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateAuthorityObject.json
@@ -0,0 +1,55 @@
+{
+ "type": "object",
+ "description": "CertificateAuthorityObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "name",
+ "acmesh_server",
+ "ca_bundle",
+ "max_domains",
+ "is_wildcard_supported",
+ "is_readonly"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "acmesh_server": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 255
+ },
+ "ca_bundle": {
+ "type": "string",
+ "minLength": 0,
+ "maxLength": 255
+ },
+ "max_domains": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "is_wildcard_supported": {
+ "type": "boolean"
+ },
+ "is_readonly": {
+ "type": "boolean"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateList.json b/backend/embed/api_docs/components/CertificateList.json
new file mode 100644
index 00000000..8fbf2ccc
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "CertificateList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateObject.json b/backend/embed/api_docs/components/CertificateObject.json
new file mode 100644
index 00000000..789eabf6
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateObject.json
@@ -0,0 +1,82 @@
+{
+ "type": "object",
+ "description": "CertificateObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "expires_on",
+ "type",
+ "user_id",
+ "certificate_authority_id",
+ "dns_provider_id",
+ "name",
+ "is_ecc",
+ "status",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "expires_on": {
+ "type": "integer",
+ "minimum": 1,
+ "nullable": true
+ },
+ "type": {
+ "type": "string",
+ "enum": ["custom", "http", "dns"]
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "certificate_authority_id": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "certificate_authority": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ },
+ "dns_provider_id": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ },
+ "status": {
+ "type": "string",
+ "enum": ["ready", "requesting", "failed", "provided"]
+ },
+ "is_ecc": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "error_message": {
+ "type": "string"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/ConfigObject.json b/backend/embed/api_docs/components/ConfigObject.json
new file mode 100644
index 00000000..96c4176f
--- /dev/null
+++ b/backend/embed/api_docs/components/ConfigObject.json
@@ -0,0 +1,4 @@
+{
+ "type": "object",
+ "description": "ConfigObject"
+}
diff --git a/backend/embed/api_docs/components/DNSProviderList.json b/backend/embed/api_docs/components/DNSProviderList.json
new file mode 100644
index 00000000..edf8385c
--- /dev/null
+++ b/backend/embed/api_docs/components/DNSProviderList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "DNSProviderList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/DNSProviderObject.json b/backend/embed/api_docs/components/DNSProviderObject.json
new file mode 100644
index 00000000..9ea739ff
--- /dev/null
+++ b/backend/embed/api_docs/components/DNSProviderObject.json
@@ -0,0 +1,49 @@
+{
+ "type": "object",
+ "description": "DNSProviderObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "user_id",
+ "name",
+ "acmesh_name",
+ "dns_sleep",
+ "meta"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "acmesh_name": {
+ "type": "string",
+ "minLength": 4,
+ "maxLength": 50
+ },
+ "dns_sleep": {
+ "type": "integer"
+ },
+ "meta": {
+ "type": "object"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/DeletedItemResponse.json b/backend/embed/api_docs/components/DeletedItemResponse.json
new file mode 100644
index 00000000..0e0a9a0e
--- /dev/null
+++ b/backend/embed/api_docs/components/DeletedItemResponse.json
@@ -0,0 +1,15 @@
+{
+ "type": "object",
+ "description": "DeletedItemResponse",
+ "additionalProperties": false,
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "type": "boolean",
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/ErrorObject.json b/backend/embed/api_docs/components/ErrorObject.json
new file mode 100644
index 00000000..a1d77605
--- /dev/null
+++ b/backend/embed/api_docs/components/ErrorObject.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "description": "ErrorObject",
+ "additionalProperties": false,
+ "required": ["code", "message"],
+ "properties": {
+ "code": {
+ "type": "integer",
+ "description": "Error code",
+ "minimum": 0
+ },
+ "message": {
+ "type": "string",
+ "description": "Error message"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/FilterObject.json b/backend/embed/api_docs/components/FilterObject.json
new file mode 100644
index 00000000..4ba75766
--- /dev/null
+++ b/backend/embed/api_docs/components/FilterObject.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "description": "FilterObject",
+ "additionalProperties": false,
+ "required": ["field", "modifier", "value"],
+ "properties": {
+ "field": {
+ "type": "string",
+ "description": "Field to filter with"
+ },
+ "modifier": {
+ "type": "string",
+ "description": "Filter modifier",
+ "pattern": "^(equals|not|min|max|greater|lesser|contains|starts|ends|in|notin)$"
+ },
+ "value": {
+ "type": "array",
+ "description": "Values used for filtering",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HealthObject.json b/backend/embed/api_docs/components/HealthObject.json
new file mode 100644
index 00000000..c58261a7
--- /dev/null
+++ b/backend/embed/api_docs/components/HealthObject.json
@@ -0,0 +1,41 @@
+{
+ "type": "object",
+ "description": "HealthObject",
+ "additionalProperties": false,
+ "required": ["version", "commit", "healthy", "setup", "error_reporting"],
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "Version",
+ "example": "3.0.0",
+ "minLength": 1
+ },
+ "commit": {
+ "type": "string",
+ "description": "Commit hash",
+ "example": "946b88f",
+ "minLength": 7
+ },
+ "healthy": {
+ "type": "boolean",
+ "description": "Healthy?",
+ "example": true
+ },
+ "setup": {
+ "type": "boolean",
+ "description": "Is the application set up?",
+ "example": true
+ },
+ "error_reporting": {
+ "type": "boolean",
+ "description": "Will the application send any error reporting?",
+ "example": true
+ },
+ "acme.sh": {
+ "type": "string",
+ "description": "Acme.sh version",
+ "example": "v3.0.0",
+ "minLength": 1
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostList.json b/backend/embed/api_docs/components/HostList.json
new file mode 100644
index 00000000..8d9d413a
--- /dev/null
+++ b/backend/embed/api_docs/components/HostList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "HostList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostObject.json b/backend/embed/api_docs/components/HostObject.json
new file mode 100644
index 00000000..4d9eb2c9
--- /dev/null
+++ b/backend/embed/api_docs/components/HostObject.json
@@ -0,0 +1,55 @@
+{
+ "type": "object",
+ "description": "HostObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "expires_on",
+ "user_id",
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "expires_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostTemplateList.json b/backend/embed/api_docs/components/HostTemplateList.json
new file mode 100644
index 00000000..a4ad36e1
--- /dev/null
+++ b/backend/embed/api_docs/components/HostTemplateList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "HostTemplateList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/HostTemplateObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostTemplateObject.json b/backend/embed/api_docs/components/HostTemplateObject.json
new file mode 100644
index 00000000..ac4937a8
--- /dev/null
+++ b/backend/embed/api_docs/components/HostTemplateObject.json
@@ -0,0 +1,44 @@
+{
+ "type": "object",
+ "description": "HostTemplateObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "user_id",
+ "name",
+ "host_type",
+ "template"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "host_type": {
+ "type": "string",
+ "pattern": "^proxy|redirect|dead|stream$"
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SettingList.json b/backend/embed/api_docs/components/SettingList.json
new file mode 100644
index 00000000..77fd564b
--- /dev/null
+++ b/backend/embed/api_docs/components/SettingList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "SettingList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SettingObject.json b/backend/embed/api_docs/components/SettingObject.json
new file mode 100644
index 00000000..60aab614
--- /dev/null
+++ b/backend/embed/api_docs/components/SettingObject.json
@@ -0,0 +1,49 @@
+{
+ "type": "object",
+ "description": "SettingObject",
+ "additionalProperties": false,
+ "required": ["id", "name", "value"],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 100
+ },
+ "description": {
+ "type": "string",
+ "minLength": 0,
+ "maxLength": 100
+ },
+ "value": {
+ "oneOf": [
+ {
+ "type": "array"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object"
+ },
+ {
+ "type": "integer"
+ },
+ {
+ "type": "string"
+ }
+ ]
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SortObject.json b/backend/embed/api_docs/components/SortObject.json
new file mode 100644
index 00000000..a2810ce6
--- /dev/null
+++ b/backend/embed/api_docs/components/SortObject.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "description": "SortObject",
+ "additionalProperties": false,
+ "required": ["field", "direction"],
+ "properties": {
+ "field": {
+ "type": "string",
+ "description": "Field for sorting on"
+ },
+ "direction": {
+ "type": "string",
+ "description": "Sort order",
+ "pattern": "^(ASC|DESC)$"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/StreamList.json b/backend/embed/api_docs/components/StreamList.json
new file mode 100644
index 00000000..c3dae5ab
--- /dev/null
+++ b/backend/embed/api_docs/components/StreamList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "StreamList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/StreamObject.json b/backend/embed/api_docs/components/StreamObject.json
new file mode 100644
index 00000000..7141e861
--- /dev/null
+++ b/backend/embed/api_docs/components/StreamObject.json
@@ -0,0 +1,55 @@
+{
+ "type": "object",
+ "description": "StreamObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_on",
+ "modified_on",
+ "expires_on",
+ "user_id",
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "expires_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/TokenObject.json b/backend/embed/api_docs/components/TokenObject.json
new file mode 100644
index 00000000..81c5d205
--- /dev/null
+++ b/backend/embed/api_docs/components/TokenObject.json
@@ -0,0 +1,19 @@
+{
+ "type": "object",
+ "description": "TokenObject",
+ "additionalProperties": false,
+ "required": ["expires", "token"],
+ "properties": {
+ "expires": {
+ "type": "number",
+ "description": "Token Expiry Unix Time",
+ "example": 1566540249,
+ "minimum": 1
+ },
+ "token": {
+ "type": "string",
+ "description": "JWT Token",
+ "example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserAuthObject.json b/backend/embed/api_docs/components/UserAuthObject.json
new file mode 100644
index 00000000..b3ab4b48
--- /dev/null
+++ b/backend/embed/api_docs/components/UserAuthObject.json
@@ -0,0 +1,28 @@
+{
+ "type": "object",
+ "description": "UserAuthObject",
+ "additionalProperties": false,
+ "required": ["id", "user_id", "type", "created_on", "modified_on"],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^password$"
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserList.json b/backend/embed/api_docs/components/UserList.json
new file mode 100644
index 00000000..a4d502f3
--- /dev/null
+++ b/backend/embed/api_docs/components/UserList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "UserList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserObject.json b/backend/embed/api_docs/components/UserObject.json
new file mode 100644
index 00000000..997f01a8
--- /dev/null
+++ b/backend/embed/api_docs/components/UserObject.json
@@ -0,0 +1,73 @@
+{
+ "type": "object",
+ "description": "UserObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "name",
+ "nickname",
+ "email",
+ "created_on",
+ "modified_on",
+ "is_disabled"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 100
+ },
+ "nickname": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 100
+ },
+ "email": {
+ "type": "string",
+ "minLength": 5,
+ "maxLength": 150
+ },
+ "created_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "modified_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "gravatar_url": {
+ "type": "string"
+ },
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "is_deleted": {
+ "type": "boolean"
+ },
+ "auth": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^password$"
+ }
+ }
+ },
+ "capabilities": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/main.go b/backend/embed/api_docs/main.go
new file mode 100644
index 00000000..95756af7
--- /dev/null
+++ b/backend/embed/api_docs/main.go
@@ -0,0 +1,9 @@
+package doc
+
+import "embed"
+
+// SwaggerFiles contain all the files used for swagger schema generation
+//go:embed api.swagger.json
+//go:embed components
+//go:embed paths
+var SwaggerFiles embed.FS
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json b/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json
new file mode 100644
index 00000000..3ae3bea8
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json
@@ -0,0 +1,39 @@
+{
+ "operationId": "deleteCertificateAuthority",
+ "summary": "Delete a Certificate Authority",
+ "tags": [
+ "Certificate Authorities"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/get.json b/backend/embed/api_docs/paths/certificates-authorities/caID/get.json
new file mode 100644
index 00000000..6bd4d008
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/get.json
@@ -0,0 +1,52 @@
+{
+ "operationId": "getCertificateAuthority",
+ "summary": "Get a Certificate Authority object by ID",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1627531400,
+ "modified_on": 1627531400,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/put.json b/backend/embed/api_docs/paths/certificates-authorities/caID/put.json
new file mode 100644
index 00000000..63199bc6
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/put.json
@@ -0,0 +1,61 @@
+{
+ "operationId": "updateCertificateAuthority",
+ "summary": "Update an existing Certificate Authority",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Certificate Authority details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateCertificateAuthority}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1627531400,
+ "modified_on": 1627531400,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/get.json b/backend/embed/api_docs/paths/certificates-authorities/get.json
new file mode 100644
index 00000000..54138b75
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/get.json
@@ -0,0 +1,92 @@
+{
+ "operationId": "getCertificateAuthorities",
+ "summary": "Get a list of Certificate Authorities",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 2,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1627531400,
+ "modified_on": 1627531400,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_setup": true
+ },
+ {
+ "id": 2,
+ "created_on": 1627531400,
+ "modified_on": 1627531400,
+ "name": "Let's Encrypt",
+ "acmesh_server": "https://acme-v02.api.letsencrypt.org/directory",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_setup": true
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/post.json b/backend/embed/api_docs/paths/certificates-authorities/post.json
new file mode 100644
index 00000000..93d13797
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/post.json
@@ -0,0 +1,48 @@
+{
+ "operationId": "createCertificateAuthority",
+ "summary": "Create a new Certificate Authority",
+ "tags": ["Certificate Authorities"],
+ "requestBody": {
+ "description": "Certificate Authority to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateCertificateAuthority}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1627531400,
+ "modified_on": 1627531400,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/delete.json b/backend/embed/api_docs/paths/certificates/certificateID/delete.json
new file mode 100644
index 00000000..98acfaf7
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteCertificate",
+ "summary": "Delete a Certificate",
+ "tags": [
+ "Certificates"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the certificate",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a certificate that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/get.json b/backend/embed/api_docs/paths/certificates/certificateID/get.json
new file mode 100644
index 00000000..e26988ff
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/get.json
@@ -0,0 +1,61 @@
+{
+ "operationId": "getCertificate",
+ "summary": "Get a certificate object by ID",
+ "tags": [
+ "Certificates"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the certificate",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1604536109,
+ "modified_on": 1604536109,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": [
+ "test1.jc21.com.au"
+ ],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/put.json b/backend/embed/api_docs/paths/certificates/certificateID/put.json
new file mode 100644
index 00000000..3574d87b
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/put.json
@@ -0,0 +1,70 @@
+{
+ "operationId": "updateCertificate",
+ "summary": "Update an existing Certificate",
+ "tags": [
+ "Certificates"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the certificate",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Certificate details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateCertificate}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1604536109,
+ "modified_on": 1604536109,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": [
+ "test1.jc21.com.au"
+ ],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates/get.json b/backend/embed/api_docs/paths/certificates/get.json
new file mode 100644
index 00000000..d4cfcc79
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/get.json
@@ -0,0 +1,90 @@
+{
+ "operationId": "getCertificates",
+ "summary": "Get a list of certificates",
+ "tags": [
+ "Certificates"
+ ],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1604536109,
+ "modified_on": 1604536109,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": [
+ "test1.jc21.com.au"
+ ],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates/post.json b/backend/embed/api_docs/paths/certificates/post.json
new file mode 100644
index 00000000..5a332270
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/post.json
@@ -0,0 +1,57 @@
+{
+ "operationId": "createCertificate",
+ "summary": "Create a new Certificate",
+ "tags": [
+ "Certificates"
+ ],
+ "requestBody": {
+ "description": "Certificate to create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateCertificate}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1604536109,
+ "modified_on": 1604536109,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": [
+ "test1.jc21.com.au"
+ ],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/config/get.json b/backend/embed/api_docs/paths/config/get.json
new file mode 100644
index 00000000..ea1f0701
--- /dev/null
+++ b/backend/embed/api_docs/paths/config/get.json
@@ -0,0 +1,36 @@
+{
+ "operationId": "config",
+ "summary": "Returns the API Service configuration",
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/ConfigObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "data": "/data",
+ "log": {
+ "level": "debug",
+ "format": "nice"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/dns-providers/get.json b/backend/embed/api_docs/paths/dns-providers/get.json
new file mode 100644
index 00000000..cb7aac8b
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/get.json
@@ -0,0 +1,82 @@
+{
+ "operationId": "getDNSProviders",
+ "summary": "Get a list of DNS Providers",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1602593653,
+ "modified_on": 1602593653,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/post.json b/backend/embed/api_docs/paths/dns-providers/post.json
new file mode 100644
index 00000000..a37d9cd2
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/post.json
@@ -0,0 +1,49 @@
+{
+ "operationId": "createDNSProvider",
+ "summary": "Create a new DNS Provider",
+ "tags": ["DNS Providers"],
+ "requestBody": {
+ "description": "DNS Provider to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateDNSProvider}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1602593653,
+ "modified_on": 1602593653,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/delete.json b/backend/embed/api_docs/paths/dns-providers/providerID/delete.json
new file mode 100644
index 00000000..32b77b0d
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteDNSProvider",
+ "summary": "Delete a DNS Provider",
+ "tags": [
+ "DNS Providers"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a DNS Provider that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/get.json b/backend/embed/api_docs/paths/dns-providers/providerID/get.json
new file mode 100644
index 00000000..eff1a367
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/get.json
@@ -0,0 +1,53 @@
+{
+ "operationId": "getDNSProvider",
+ "summary": "Get a DNS Provider object by ID",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1602593653,
+ "modified_on": 1602593653,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/put.json b/backend/embed/api_docs/paths/dns-providers/providerID/put.json
new file mode 100644
index 00000000..f90d9ef3
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/put.json
@@ -0,0 +1,64 @@
+{
+ "operationId": "updateDNSProvider",
+ "summary": "Update an existing DNS Provider",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "DNS Provider details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateDNSProvider}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "result": {
+ "id": 1,
+ "created_on": 1602593653,
+ "modified_on": 1602593653,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/get.json b/backend/embed/api_docs/paths/get.json
new file mode 100644
index 00000000..0f9506f1
--- /dev/null
+++ b/backend/embed/api_docs/paths/get.json
@@ -0,0 +1,47 @@
+{
+ "operationId": "health",
+ "summary": "Returns the API health status",
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HealthObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "version": "3.0.0",
+ "commit": "9f119b6",
+ "healthy": true,
+ "setup": true,
+ "error_reporting": true
+ }
+ }
+ },
+ "unhealthy": {
+ "value": {
+ "result": {
+ "version": "3.0.0",
+ "commit": "9f119b6",
+ "healthy": false,
+ "setup": true,
+ "error_reporting": true
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/host-templates/get.json b/backend/embed/api_docs/paths/host-templates/get.json
new file mode 100644
index 00000000..2c3ca700
--- /dev/null
+++ b/backend/embed/api_docs/paths/host-templates/get.json
@@ -0,0 +1,79 @@
+{
+ "operationId": "getHostTemplates",
+ "summary": "Get a list of Host Templates",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostTemplateList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "created_on",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1646218093,
+ "modified_on": 1646218093,
+ "user_id": 1,
+ "name": "Default Proxy Template",
+ "host_type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/host-templates/hostTemplateID/delete.json b/backend/embed/api_docs/paths/host-templates/hostTemplateID/delete.json
new file mode 100644
index 00000000..f14fe9d4
--- /dev/null
+++ b/backend/embed/api_docs/paths/host-templates/hostTemplateID/delete.json
@@ -0,0 +1,58 @@
+{
+ "operationId": "deleteHostTemplate",
+ "summary": "Delete a Host Template",
+ "tags": ["Host Templates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostTemplateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Host Template",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a host template that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/host-templates/hostTemplateID/get.json b/backend/embed/api_docs/paths/host-templates/hostTemplateID/get.json
new file mode 100644
index 00000000..6f16a957
--- /dev/null
+++ b/backend/embed/api_docs/paths/host-templates/hostTemplateID/get.json
@@ -0,0 +1,50 @@
+{
+ "operationId": "getHostTemplate",
+ "summary": "Get a Host Template object by ID",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostTemplateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host Template",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1646218093,
+ "modified_on": 1646218093,
+ "user_id": 1,
+ "name": "Default Host Template",
+ "host_type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/host-templates/hostTemplateID/put.json b/backend/embed/api_docs/paths/host-templates/hostTemplateID/put.json
new file mode 100644
index 00000000..2993e5a0
--- /dev/null
+++ b/backend/embed/api_docs/paths/host-templates/hostTemplateID/put.json
@@ -0,0 +1,59 @@
+{
+ "operationId": "updateHostTemplate",
+ "summary": "Update an existing Host Template",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostTemplateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host Template",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Host Template details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateHostTemplate}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1646218093,
+ "modified_on": 1646218093,
+ "user_id": 1,
+ "name": "My renamed proxy template",
+ "host_type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/host-templates/post.json b/backend/embed/api_docs/paths/host-templates/post.json
new file mode 100644
index 00000000..dd834c87
--- /dev/null
+++ b/backend/embed/api_docs/paths/host-templates/post.json
@@ -0,0 +1,46 @@
+{
+ "operationId": "createHost",
+ "summary": "Create a new Host",
+ "tags": ["Hosts"],
+ "requestBody": {
+ "description": "Host to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateHostTemplate}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 10,
+ "created_on": 1646218093,
+ "modified_on": 1646218093,
+ "user_id": 1,
+ "name": "My proxy template",
+ "host_type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/get.json b/backend/embed/api_docs/paths/hosts/get.json
new file mode 100644
index 00000000..160980e8
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/get.json
@@ -0,0 +1,94 @@
+{
+ "operationId": "getHosts",
+ "summary": "Get a list of Hosts",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "domain_names",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1646279455,
+ "modified_on": 1646279455,
+ "user_id": 2,
+ "type": "proxy",
+ "host_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "upstream_options": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/hostID/delete.json b/backend/embed/api_docs/paths/hosts/hostID/delete.json
new file mode 100644
index 00000000..4df119ad
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteHost",
+ "summary": "Delete a Host",
+ "tags": [
+ "Hosts"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Host",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a host that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/hosts/hostID/get.json b/backend/embed/api_docs/paths/hosts/hostID/get.json
new file mode 100644
index 00000000..654f7df9
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/get.json
@@ -0,0 +1,65 @@
+{
+ "operationId": "getHost",
+ "summary": "Get a Host object by ID",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1646279455,
+ "modified_on": 1646279455,
+ "user_id": 2,
+ "type": "proxy",
+ "host_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "upstream_options": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/hostID/put.json b/backend/embed/api_docs/paths/hosts/hostID/put.json
new file mode 100644
index 00000000..ec9675c8
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/put.json
@@ -0,0 +1,74 @@
+{
+ "operationId": "updateHost",
+ "summary": "Update an existing Host",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Host details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateHost}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1646279455,
+ "modified_on": 1646279455,
+ "user_id": 2,
+ "type": "proxy",
+ "host_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "upstream_options": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/post.json b/backend/embed/api_docs/paths/hosts/post.json
new file mode 100644
index 00000000..e0362c68
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/post.json
@@ -0,0 +1,61 @@
+{
+ "operationId": "createHost",
+ "summary": "Create a new Host",
+ "tags": ["Hosts"],
+ "requestBody": {
+ "description": "Host to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateHost}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_on": 1645700556,
+ "modified_on": 1645700556,
+ "user_id": 2,
+ "type": "proxy",
+ "host_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "upstream_options": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/schema/get.json b/backend/embed/api_docs/paths/schema/get.json
new file mode 100644
index 00000000..e21ae805
--- /dev/null
+++ b/backend/embed/api_docs/paths/schema/get.json
@@ -0,0 +1,9 @@
+{
+ "operationId": "schema",
+ "summary": "Returns this swagger API schema",
+ "responses": {
+ "200": {
+ "description": "200 response"
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/settings/get.json b/backend/embed/api_docs/paths/settings/get.json
new file mode 100644
index 00000000..3455b0ff
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/get.json
@@ -0,0 +1,84 @@
+{
+ "operationId": "getSettings",
+ "summary": "Get a list of settings",
+ "tags": [
+ "Settings"
+ ],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_on": 1578010090,
+ "modified_on": 1578010095,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/settings/name/get.json b/backend/embed/api_docs/paths/settings/name/get.json
new file mode 100644
index 00000000..775a4737
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/name/get.json
@@ -0,0 +1,55 @@
+{
+ "operationId": "getSetting",
+ "summary": "Get a setting object by name",
+ "tags": [
+ "Settings"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "name",
+ "schema": {
+ "type": "string",
+ "minLength": 2
+ },
+ "required": true,
+ "description": "Name of the setting",
+ "example": "default-site"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_on": 1578010090,
+ "modified_on": 1578010095,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/settings/name/put.json b/backend/embed/api_docs/paths/settings/name/put.json
new file mode 100644
index 00000000..ee741104
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/name/put.json
@@ -0,0 +1,64 @@
+{
+ "operationId": "updateSetting",
+ "summary": "Update an existing Setting",
+ "tags": [
+ "Settings"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "name",
+ "schema": {
+ "type": "string",
+ "minLength": 2
+ },
+ "required": true,
+ "description": "Name of the setting",
+ "example": "default-site"
+ }
+ ],
+ "requestBody": {
+ "description": "Setting details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateSetting}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_on": 1578010090,
+ "modified_on": 1578010090,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/settings/post.json b/backend/embed/api_docs/paths/settings/post.json
new file mode 100644
index 00000000..63921da2
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/post.json
@@ -0,0 +1,51 @@
+{
+ "operationId": "createSetting",
+ "summary": "Create a new Setting",
+ "tags": [
+ "Settings"
+ ],
+ "requestBody": {
+ "description": "Setting to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateSetting}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_on": 1578010090,
+ "modified_on": 1578010090,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/get.json b/backend/embed/api_docs/paths/streams/get.json
new file mode 100644
index 00000000..482212d1
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/get.json
@@ -0,0 +1,75 @@
+{
+ "operationId": "getStreams",
+ "summary": "Get a list of Streams",
+ "tags": [
+ "Streams"
+ ],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ "TODO"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/post.json b/backend/embed/api_docs/paths/streams/post.json
new file mode 100644
index 00000000..d1949830
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/post.json
@@ -0,0 +1,42 @@
+{
+ "operationId": "createStream",
+ "summary": "Create a new Stream",
+ "tags": [
+ "Streams"
+ ],
+ "requestBody": {
+ "description": "Host to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateStream}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/streamID/delete.json b/backend/embed/api_docs/paths/streams/streamID/delete.json
new file mode 100644
index 00000000..d0f35269
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteStream",
+ "summary": "Delete a Stream",
+ "tags": [
+ "Streams"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Stream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a Stream that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/streamID/get.json b/backend/embed/api_docs/paths/streams/streamID/get.json
new file mode 100644
index 00000000..94e54c3f
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/get.json
@@ -0,0 +1,46 @@
+{
+ "operationId": "getStream",
+ "summary": "Get a Stream object by ID",
+ "tags": [
+ "Streams"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Stream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/streamID/put.json b/backend/embed/api_docs/paths/streams/streamID/put.json
new file mode 100644
index 00000000..4baa882e
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/put.json
@@ -0,0 +1,55 @@
+{
+ "operationId": "updateStream",
+ "summary": "Update an existing Stream",
+ "tags": [
+ "Streams"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Stream",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Stream details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateStream}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/tokens/get.json b/backend/embed/api_docs/paths/tokens/get.json
new file mode 100644
index 00000000..601697a7
--- /dev/null
+++ b/backend/embed/api_docs/paths/tokens/get.json
@@ -0,0 +1,37 @@
+{
+ "operationId": "refreshToken",
+ "summary": "Refresh your access token",
+ "tags": [
+ "Tokens"
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "expires": 1566540510,
+ "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
+ "scope": "user"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/tokens/post.json b/backend/embed/api_docs/paths/tokens/post.json
new file mode 100644
index 00000000..44b95b2b
--- /dev/null
+++ b/backend/embed/api_docs/paths/tokens/post.json
@@ -0,0 +1,79 @@
+{
+ "operationId": "requestToken",
+ "summary": "Request a new access token from credentials",
+ "tags": [
+ "Tokens"
+ ],
+ "requestBody": {
+ "description": "Credentials Payload",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.GetToken}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": [
+ "result"
+ ],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "expires": 1566540510,
+ "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
+ "scope": "user"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "403": {
+ "description": "403 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "error"
+ ],
+ "properties": {
+ "result": {
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 403,
+ "message": "Not available during setup phase"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/users/get.json b/backend/embed/api_docs/paths/users/get.json
new file mode 100644
index 00000000..647b6d52
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/get.json
@@ -0,0 +1,117 @@
+{
+ "operationId": "getUsers",
+ "summary": "Get a list of users",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "name,nickname.desc,email.asc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 3,
+ "offset": 0,
+ "limit": 100,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ },
+ {
+ "field": "nickname",
+ "direction": "DESC"
+ },
+ {
+ "field": "email",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "nickname": "James",
+ "email": "jc@jc21.com",
+ "created_on": 1578010090,
+ "modified_on": 1578010095,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ },
+ {
+ "id": 2,
+ "name": "John Doe",
+ "nickname": "John",
+ "email": "johdoe@example.com",
+ "created_on": 1578010100,
+ "modified_on": 1578010105,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": [
+ "hosts.view",
+ "hosts.manage"
+ ]
+ },
+ {
+ "id": 3,
+ "name": "Jane Doe",
+ "nickname": "Jane",
+ "email": "janedoe@example.com",
+ "created_on": 1578010110,
+ "modified_on": 1578010115,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": [
+ "hosts.view",
+ "hosts.manage"
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/post.json b/backend/embed/api_docs/paths/users/post.json
new file mode 100644
index 00000000..7305f188
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/post.json
@@ -0,0 +1,79 @@
+{
+ "operationId": "createUser",
+ "summary": "Create a new User",
+ "tags": ["Users"],
+ "requestBody": {
+ "description": "User to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateUser}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "nickname": "James",
+ "email": "jc@jc21.com",
+ "created_on": 1578010100,
+ "modified_on": 1578010100,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "auth": {
+ "$ref": "#/components/schemas/UserAuthObject"
+ },
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["error"],
+ "properties": {
+ "result": {
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "error": {
+ "code": 400,
+ "message": "An user already exists with this email address"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/auth/post.json b/backend/embed/api_docs/paths/users/userID/auth/post.json
new file mode 100644
index 00000000..a71432f8
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/auth/post.json
@@ -0,0 +1,65 @@
+{
+ "operationId": "setPassword",
+ "summary": "Set a User's password",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to set yourself",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Credentials to set",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.SetAuth}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserAuthObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "user_id": 3,
+ "type": "password",
+ "created_on": 1648422222,
+ "modified_on": 1648423979
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/delete.json b/backend/embed/api_docs/paths/users/userID/delete.json
new file mode 100644
index 00000000..0ffa7024
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteUser",
+ "summary": "Delete a User",
+ "tags": [
+ "Users"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the user",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete yourself!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/users/userID/get.json b/backend/embed/api_docs/paths/users/userID/get.json
new file mode 100644
index 00000000..d5eebb03
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/get.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "getUser",
+ "summary": "Get a user object by ID or 'me'",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to get yourself",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "nickname": "James",
+ "email": "jc@jc21.com",
+ "created_on": 1578010100,
+ "modified_on": 1578010105,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/put.json b/backend/embed/api_docs/paths/users/userID/put.json
new file mode 100644
index 00000000..33dccd5c
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/put.json
@@ -0,0 +1,107 @@
+{
+ "operationId": "updateUser",
+ "summary": "Update an existing User",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to update yourself",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "User details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateUser}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "nickname": "James",
+ "email": "jc@jc21.com",
+ "created_on": 1578010100,
+ "modified_on": 1578010110,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "required": ["error"],
+ "properties": {
+ "result": {
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "duplicateemail": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "A user already exists with this email address"
+ }
+ }
+ },
+ "nodisable": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot disable yourself!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/main.go b/backend/embed/main.go
new file mode 100644
index 00000000..9909cef6
--- /dev/null
+++ b/backend/embed/main.go
@@ -0,0 +1,19 @@
+package embed
+
+import "embed"
+
+// APIDocFiles contain all the files used for swagger schema generation
+//go:embed api_docs
+var APIDocFiles embed.FS
+
+// Assets are frontend assets served from within this app
+//go:embed assets
+var Assets embed.FS
+
+// MigrationFiles are database migrations
+//go:embed migrations/*.sql
+var MigrationFiles embed.FS
+
+// NginxFiles hold nginx config templates
+//go:embed nginx
+var NginxFiles embed.FS
diff --git a/backend/embed/migrations/20201013035318_initial_schema.sql b/backend/embed/migrations/20201013035318_initial_schema.sql
new file mode 100644
index 00000000..417b5406
--- /dev/null
+++ b/backend/embed/migrations/20201013035318_initial_schema.sql
@@ -0,0 +1,209 @@
+-- migrate:up
+
+CREATE TABLE IF NOT EXISTS `user`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ name TEXT NOT NULL,
+ nickname TEXT NOT NULL,
+ email TEXT NOT NULL,
+ is_system INTEGER NOT NULL DEFAULT 0,
+ is_disabled INTEGER NOT NULL DEFAULT 0,
+ is_deleted INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE TABLE IF NOT EXISTS `capability`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ UNIQUE (name)
+);
+
+CREATE TABLE IF NOT EXISTS `user_has_capability`
+(
+ user_id INTEGER NOT NULL,
+ capability_id INTEGER NOT NULL,
+ UNIQUE (user_id, capability_id),
+ FOREIGN KEY (capability_id) REFERENCES capability (id)
+);
+
+CREATE TABLE IF NOT EXISTS `auth`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ type TEXT NOT NULL,
+ secret TEXT NOT NULL,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id),
+ UNIQUE (user_id, type)
+);
+
+CREATE TABLE IF NOT EXISTS `setting`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ name TEXT NOT NULL,
+ description TEXT NOT NULL DEFAULT "",
+ value TEXT NOT NULL,
+ UNIQUE (name)
+);
+
+CREATE TABLE IF NOT EXISTS `audit_log`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ object_type TEXT NOT NULL,
+ object_id INTEGER NOT NULL,
+ action TEXT NOT NULL,
+ meta TEXT NOT NULL,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `certificate_authority`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ name TEXT NOT NULL,
+ acmesh_server TEXT NOT NULL DEFAULT "",
+ ca_bundle TEXT NOT NULL DEFAULT "",
+ is_wildcard_supported INTEGER NOT NULL DEFAULT 0, -- specific to each CA, acme v1 doesn't usually have wildcards
+ max_domains INTEGER NOT NULL DEFAULT 5, -- per request
+ is_readonly INTEGER NOT NULL DEFAULT 0,
+ is_deleted INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE TABLE IF NOT EXISTS `dns_provider`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ acmesh_name TEXT NOT NULL,
+ dns_sleep INTEGER NOT NULL DEFAULT 0,
+ meta TEXT NOT NULL,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `certificate`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ type TEXT NOT NULL, -- custom,dns,http
+ user_id INTEGER NOT NULL,
+ certificate_authority_id INTEGER, -- 0 for a custom cert
+ dns_provider_id INTEGER, -- 0, for a http or custom cert
+ name TEXT NOT NULL,
+ domain_names TEXT NOT NULL,
+ expires_on INTEGER DEFAULT 0,
+ status TEXT NOT NULL, -- ready,requesting,failed,provided
+ error_message text NOT NULL DEFAULT "",
+ meta TEXT NOT NULL,
+ is_ecc INTEGER NOT NULL DEFAULT 0,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id),
+ FOREIGN KEY (certificate_authority_id) REFERENCES certificate_authority (id),
+ FOREIGN KEY (dns_provider_id) REFERENCES dns_provider (id)
+);
+
+CREATE TABLE IF NOT EXISTS `stream`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ listen_interface TEXT NOT NULL,
+ incoming_port INTEGER NOT NULL,
+ upstream_options TEXT NOT NULL,
+ tcp_forwarding INTEGER NOT NULL DEFAULT 0,
+ udp_forwarding INTEGER NOT NULL DEFAULT 0,
+ advanced_config TEXT NOT NULL,
+ is_disabled INTEGER NOT NULL DEFAULT 0,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `upstream`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ hosts TEXT NOT NULL,
+ balance_method TEXT NOT NULL,
+ max_fails INTEGER NOT NULL DEFAULT 1,
+ fail_timeout INTEGER NOT NULL DEFAULT 10,
+ advanced_config TEXT NOT NULL,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `access_list`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ meta TEXT NOT NULL,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `host_template`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ host_type TEXT NOT NULL,
+ template TEXT NOT NULL,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id)
+);
+
+CREATE TABLE IF NOT EXISTS `host`
+(
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ created_on INTEGER NOT NULL DEFAULT 0,
+ modified_on INTEGER NOT NULL DEFAULT 0,
+ user_id INTEGER NOT NULL,
+ type TEXT NOT NULL,
+ host_template_id INTEGER NOT NULL,
+ listen_interface TEXT NOT NULL,
+ domain_names TEXT NOT NULL,
+ upstream_id INTEGER NOT NULL,
+ certificate_id INTEGER,
+ access_list_id INTEGER,
+ ssl_forced INTEGER NOT NULL DEFAULT 0,
+ caching_enabled INTEGER NOT NULL DEFAULT 0,
+ block_exploits INTEGER NOT NULL DEFAULT 0,
+ allow_websocket_upgrade INTEGER NOT NULL DEFAULT 0,
+ http2_support INTEGER NOT NULL DEFAULT 0,
+ hsts_enabled INTEGER NOT NULL DEFAULT 0,
+ hsts_subdomains INTEGER NOT NULL DEFAULT 0,
+ paths TEXT NOT NULL,
+ upstream_options TEXT NOT NULL DEFAULT "",
+ advanced_config TEXT NOT NULL DEFAULT "",
+ is_disabled INTEGER NOT NULL DEFAULT 0,
+ is_deleted INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (user_id) REFERENCES user (id),
+ FOREIGN KEY (host_template_id) REFERENCES host_template (id),
+ FOREIGN KEY (upstream_id) REFERENCES upstream (id),
+ FOREIGN KEY (certificate_id) REFERENCES certificate (id),
+ FOREIGN KEY (access_list_id) REFERENCES access_list (id)
+);
+
+-- migrate:down
+
+-- Not allowed to go down from initial
diff --git a/backend/embed/migrations/20201013035839_initial_data.sql b/backend/embed/migrations/20201013035839_initial_data.sql
new file mode 100644
index 00000000..fa71871e
--- /dev/null
+++ b/backend/embed/migrations/20201013035839_initial_data.sql
@@ -0,0 +1,171 @@
+-- migrate:up
+
+-- User permissions
+INSERT INTO `capability` (
+ name
+) VALUES
+ ("full-admin"),
+ ("access-lists.view"),
+ ("access-lists.manage"),
+ ("audit-log.view"),
+ ("certificates.view"),
+ ("certificates.manage"),
+ ("certificate-authorities.view"),
+ ("certificate-authorities.manage"),
+ ("dns-providers.view"),
+ ("dns-providers.manage"),
+ ("hosts.view"),
+ ("hosts.manage"),
+ ("host-templates.view"),
+ ("host-templates.manage"),
+ ("settings.manage"),
+ ("streams.view"),
+ ("streams.manage"),
+ ("users.manage");
+
+-- Default error reporting setting
+INSERT INTO `setting` (
+ created_on,
+ modified_on,
+ name,
+ description,
+ value
+) VALUES (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "error-reporting",
+ "If enabled, any application errors are reported to Sentry. Sensitive information is not sent.",
+ "true" -- remember this is json
+);
+
+-- Default site
+INSERT INTO `setting` (
+ created_on,
+ modified_on,
+ name,
+ description,
+ value
+) VALUES (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "default-site",
+ "What to show users who hit your Nginx server by default",
+ '"welcome"' -- remember this is json
+);
+
+-- Default Certificate Authorities
+
+INSERT INTO `certificate_authority` (
+ created_on,
+ modified_on,
+ name,
+ acmesh_server,
+ is_wildcard_supported,
+ max_domains,
+ is_readonly
+) VALUES (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "ZeroSSL",
+ "zerossl",
+ 1,
+ 10,
+ 1
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "Let's Encrypt",
+ "https://acme-v02.api.letsencrypt.org/directory",
+ 1,
+ 10,
+ 1
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "Buypass Go SSL",
+ "https://api.buypass.com/acme/directory",
+ 0,
+ 5,
+ 1
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "Let's Encrypt (Testing)",
+ "https://acme-staging-v02.api.letsencrypt.org/directory",
+ 1,
+ 10,
+ 1
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "Buypass Go SSL (Testing)",
+ "https://api.test4.buypass.no/acme/directory",
+ 0,
+ 5,
+ 1
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "SSL.com",
+ "ssl.com",
+ 0,
+ 10,
+ 1
+);
+
+-- System User
+INSERT INTO `user` (
+ created_on,
+ modified_on,
+ name,
+ nickname,
+ email,
+ is_system
+) VALUES (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ "System",
+ "System",
+ "system@localhost",
+ 1
+);
+
+-- Host Templates
+INSERT INTO `host_template` (
+ created_on,
+ modified_on,
+ user_id,
+ name,
+ host_type,
+ template
+) VALUES (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Proxy Template",
+ "proxy",
+ "# this is a proxy template"
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Redirect Template",
+ "redirect",
+ "# this is a redirect template"
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Dead Template",
+ "dead",
+ "# this is a dead template"
+), (
+ strftime('%s', 'now'),
+ strftime('%s', 'now'),
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Stream Template",
+ "stream",
+ "# this is a stream template"
+);
+
+-- migrate:down
diff --git a/backend/embed/nginx/_assets.conf.hbs b/backend/embed/nginx/_assets.conf.hbs
new file mode 100644
index 00000000..73639767
--- /dev/null
+++ b/backend/embed/nginx/_assets.conf.hbs
@@ -0,0 +1,4 @@
+{{#if caching_enabled}}
+ # Asset Caching
+ include conf.d/include/assets.conf;
+{{/if}}
diff --git a/backend/embed/nginx/_certificates.conf.hbs b/backend/embed/nginx/_certificates.conf.hbs
new file mode 100644
index 00000000..d114f982
--- /dev/null
+++ b/backend/embed/nginx/_certificates.conf.hbs
@@ -0,0 +1,13 @@
+{{#if certificate}}
+ {{#if (equal certificate.certificate_authority_id "0")}}
+ # Custom SSL
+ ssl_certificate {{npm_data_dir}}/custom_ssl/npm-{{certificate.id}}/fullchain.pem;
+ ssl_certificate_key {{npm_data_dir}}/custom_ssl/npm-{{certificate.id}}/privkey.pem;
+ {{else}}
+ # Acme SSL
+ include {{nginx_conf_dir}}/npm/conf.d/acme-challenge.conf;
+ include {{nginx_conf_dir}}/npm/conf.d/include/ssl-ciphers.conf;
+ ssl_certificate {{acme_certs_dir}}/npm-{{certificate.id}}/fullchain.pem;
+ ssl_certificate_key {{acme_certs_dir}}/npm-{{certificate.id}}/privkey.pem;
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_forced_ssl.conf.hbs b/backend/embed/nginx/_forced_ssl.conf.hbs
new file mode 100644
index 00000000..970296e0
--- /dev/null
+++ b/backend/embed/nginx/_forced_ssl.conf.hbs
@@ -0,0 +1,6 @@
+{{#if certificate}}
+ {{#if ssl_forced}}
+ # Force SSL
+ include {{nginx_conf_dir}}/npm/conf.d/include/force-ssl.conf;
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_hsts.conf.hbs b/backend/embed/nginx/_hsts.conf.hbs
new file mode 100644
index 00000000..c27da5aa
--- /dev/null
+++ b/backend/embed/nginx/_hsts.conf.hbs
@@ -0,0 +1,8 @@
+{{#if certificate}}
+ {{#if ssl_forced}}
+ {{#if hsts_enabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security "max-age=63072000;{{#if hsts_subdomains}} includeSubDomains;{{/if}} preload" always;
+ {{/if}}
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_listen.conf.hbs b/backend/embed/nginx/_listen.conf.hbs
new file mode 100644
index 00000000..217da00f
--- /dev/null
+++ b/backend/embed/nginx/_listen.conf.hbs
@@ -0,0 +1,18 @@
+listen 80;
+
+{{#if ipv6}}
+ listen [::]:80;
+{{else}}
+ #listen [::]:80;
+{{/if}}
+
+{{#if certificate}}
+ listen 443 ssl{% if http2_support %} http2{% endif %};
+ {{#if ipv6}}
+ listen [::]:443;
+ {{else}}
+ #listen [::]:443;
+ {{/if}}
+{{/if}}
+
+server_name{{#each domain_names}} {{this}}{{/each}};
diff --git a/backend/embed/nginx/_location.conf.hbs b/backend/embed/nginx/_location.conf.hbs
new file mode 100644
index 00000000..167d46eb
--- /dev/null
+++ b/backend/embed/nginx/_location.conf.hbs
@@ -0,0 +1,40 @@
+location {{path}} {
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_pass {{forward_scheme}}://{{forward_host}}:{{forward_port}}{{forward_path}};
+
+ {{#if access_list}}
+ {{#if access_list.items}}
+ # Authorization
+ auth_basic "Authorization required";
+ auth_basic_user_file {{npm_data_dir}}/access/{{access_list.id}};
+ {{access_list.passauth}}
+ {{/if}}
+
+ # Access Rules
+ {{#each access_list.clients as |client clientIdx|}}
+ {{client.rule}};
+ {{/each}}deny all;
+
+ # Access checks must...
+ {{#if access_list.satisfy}}
+ {{access_list.satisfy}};
+ {{/if}}
+ {{/if}}
+
+ {{> inc_assets}}
+ {{> inc_forced_ssl}}
+ {{> inc_hsts}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ {{advanced_config}}
+ }
+
diff --git a/backend/embed/nginx/acme-request.conf.hbs b/backend/embed/nginx/acme-request.conf.hbs
new file mode 100644
index 00000000..aac14895
--- /dev/null
+++ b/backend/embed/nginx/acme-request.conf.hbs
@@ -0,0 +1,15 @@
+server {
+ listen 80;
+ {{#if ipv6}}
+ listen [::]:80;
+ {{/if}}
+
+ server_name{{#each domain_names}} {{this}}{{/each}};
+ access_log {{npm_data_dir}}/logs/acme-requests_access.log standard;
+ error_log {{npm_data_dir}}/logs/acme-requests_error.log warn;
+ {{nginx_conf_dir}}/npm/conf.d/include/letsencrypt-acme-challenge.conf;
+
+ location / {
+ return 404;
+ }
+}
diff --git a/backend/embed/nginx/dead_host.conf.hbs b/backend/embed/nginx/dead_host.conf.hbs
new file mode 100644
index 00000000..289940c8
--- /dev/null
+++ b/backend/embed/nginx/dead_host.conf.hbs
@@ -0,0 +1,20 @@
+{{#if enabled}}
+ server {
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ access_log {{npm_data_dir}}/logs/dead-host-{{id}}_access.log standard;
+ error_log {{npm_data_dir}}/logs/dead-host-{{id}}_error.log warn;
+
+ {{advanced_config}}
+
+ {{#if use_default_location}}
+ location / {
+ {{> inc_hsts}}
+ return 404;
+ }
+ {{/if}}
+ }
+{{/if}}
diff --git a/backend/embed/nginx/default.conf.hbs b/backend/embed/nginx/default.conf.hbs
new file mode 100644
index 00000000..190ec02b
--- /dev/null
+++ b/backend/embed/nginx/default.conf.hbs
@@ -0,0 +1,35 @@
+{{#if (equal value "congratulations")}}
+ # Skipping output, congratulations page configration is baked in.
+{{else}}
+ server {
+ listen 80 default;
+ {{#if ipv6}}
+ listen [::]:80;
+ {{else}}
+ #listen [::]:80;
+ {{/if}}
+
+ server_name default-host.localhost;
+ access_log {{npm_data_dir}}/logs/default-host_access.log combined;
+ error_log {{npm_data_dir}}/logs/default-host_error.log warn;
+
+ {{#if (equal value "404")}}
+ location / {
+ return 404;
+ }
+ {{/if}}
+
+ {{#if (equal value "redirect")}}
+ location / {
+ return 301 {{meta.redirect}};
+ }
+ {{/if}}
+
+ {{#if (equal value "html")}}
+ root {{npm_data_dir}}/nginx/default_www;
+ location / {
+ try_files $uri /index.html;
+ }
+ {{/if}}
+ }
+{{/if}}
diff --git a/backend/embed/nginx/ip_ranges.conf.hbs b/backend/embed/nginx/ip_ranges.conf.hbs
new file mode 100644
index 00000000..7b7c3d07
--- /dev/null
+++ b/backend/embed/nginx/ip_ranges.conf.hbs
@@ -0,0 +1,3 @@
+{{#each ip_ranges as |range rangeIdx|}}
+ set_real_ip_from {{range}};
+{{/each}}
diff --git a/backend/embed/nginx/proxy_host.conf.hbs b/backend/embed/nginx/proxy_host.conf.hbs
new file mode 100644
index 00000000..e7681f4b
--- /dev/null
+++ b/backend/embed/nginx/proxy_host.conf.hbs
@@ -0,0 +1,62 @@
+{{#if enabled}}
+ server {
+ set $forward_scheme {{forward_scheme}};
+ set $server "{{forward_host}}";
+ set $port {{forward_port}};
+
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_assets}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ access_log {{npm_data_dir}}/logs/proxy-host-{{id}}_access.log proxy;
+ error_log {{npm_data_dir}}/logs/proxy-host-{{id}}_error.log warn;
+
+ {{advanced_config}}
+ {{locations}}
+
+ {{#if use_default_location}}
+ location / {
+ {{#if access_list}}
+ {{#if access_list.items}}
+ # Authorization
+ auth_basic "Authorization required";
+ auth_basic_user_file {{npm_data_dir}}/access/{{access_list.id}};
+ {{access_list.passauth}}
+ {{/if}}
+
+ # Access Rules
+ {{#each access_list.clients as |client clientIdx|}}
+ {{client.rule}};
+ {{/each}}deny all;
+
+ # Access checks must...
+ {{#if access_list.satisfy}}
+ {{access_list.satisfy}};
+ {{/if}}
+ {{/if}}
+
+ {{> inc_hsts}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ # Proxy!
+ include {{nginx_conf_dir}}/npm/conf.d/include/proxy.conf;
+ }
+ {{/if}}
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_proxy[.]conf;
+ }
+{{/if}}
diff --git a/backend/embed/nginx/redirection_host.conf.hbs b/backend/embed/nginx/redirection_host.conf.hbs
new file mode 100644
index 00000000..18208c6c
--- /dev/null
+++ b/backend/embed/nginx/redirection_host.conf.hbs
@@ -0,0 +1,28 @@
+{{#if enabled}}
+ server {
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_assets}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ access_log {{npm_data_dir}}/logs/redirection-host-{{ id }}_access.log standard;
+ error_log {{npm_data_dir}}/logs/redirection-host-{{ id }}_error.log warn;
+
+ {{advanced_config}}
+
+ {{#if use_default_location}}
+ location / {
+ {{> inc_hsts}}
+ {{#if preserve_path}}
+ return {{forward_http_code}} {{forward_scheme}}://{{forward_domain_name}}$request_uri;
+ {{else}}
+ return {{forward_http_code}} {{forward_scheme}}://{{forward_domain_name}};
+ {{/if}}
+ }
+ {{/if}}
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_redirect[.]conf;
+ }
+{{/if}}
diff --git a/backend/embed/nginx/stream.conf.hbs b/backend/embed/nginx/stream.conf.hbs
new file mode 100644
index 00000000..bc85bbfa
--- /dev/null
+++ b/backend/embed/nginx/stream.conf.hbs
@@ -0,0 +1,34 @@
+{{#if enabled}}
+ {{#if tcp_forwarding}}
+ server {
+ listen {{incoming_port}};
+ {{#if ipv6}}
+ listen [::]:{{incoming_port}};
+ {{else}}
+ #listen [::]:{{incoming_port}};
+ {{/if}}
+
+ proxy_pass {{forward_ip}}:{{forwarding_port}};
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_stream[.]conf;
+ include {{npm_data_dir}}/nginx/custom/server_stream_tcp[.]conf;
+ }
+ {{/if}}
+
+ {{#if udp_forwarding}}
+ server {
+ listen {{incoming_port}} udp;
+ {{#if ipv6}}
+ listen [::]:{{ incoming_port }} udp;
+ {{else}}
+ #listen [::]:{{incoming_port}} udp;
+ {{/if}}
+ proxy_pass {{forward_ip}}:{{forwarding_port}};
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_stream[.]conf;
+ include {{npm_data_dir}}/nginx/custom/server_stream_udp[.]conf;
+ }
+ {{/if}}
+{{/if}}
diff --git a/backend/go.mod b/backend/go.mod
new file mode 100644
index 00000000..45118bf3
--- /dev/null
+++ b/backend/go.mod
@@ -0,0 +1,39 @@
+module npm
+
+go 1.17
+
+require (
+ github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible
+ github.com/dgrijalva/jwt-go v3.2.0+incompatible
+ github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e
+ github.com/fatih/color v1.13.0
+ github.com/getsentry/sentry-go v0.12.0
+ github.com/go-chi/chi v4.1.2+incompatible
+ github.com/go-chi/cors v1.2.0
+ github.com/go-chi/jwtauth v4.0.4+incompatible
+ github.com/jc21/jsref v0.0.0-20210608024405-a97debfc4760
+ github.com/jmoiron/sqlx v1.3.4
+ github.com/mattn/go-sqlite3 v2.0.3+incompatible
+ github.com/qri-io/jsonschema v0.2.1
+ github.com/stretchr/testify v1.7.0
+ github.com/vrischmann/envconfig v1.3.0
+ golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838
+)
+
+require (
+ github.com/alexflint/go-arg v1.4.3 // indirect
+ github.com/alexflint/go-scalar v1.1.0 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c // indirect
+ github.com/lestrrat-go/option v1.0.0 // indirect
+ github.com/lestrrat-go/pdebug/v3 v3.0.1 // indirect
+ github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb // indirect
+ github.com/mattn/go-colorable v0.1.12 // indirect
+ github.com/mattn/go-isatty v0.0.14 // indirect
+ github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
+ github.com/pkg/errors v0.9.1 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/qri-io/jsonpointer v0.1.1 // indirect
+ golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a // indirect
+ gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect
+)
diff --git a/backend/go.sum b/backend/go.sum
new file mode 100644
index 00000000..f980fa1c
--- /dev/null
+++ b/backend/go.sum
@@ -0,0 +1,300 @@
+github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno=
+github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo=
+github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY=
+github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0=
+github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
+github.com/alexflint/go-arg v1.4.3 h1:9rwwEBpMXfKQKceuZfYcwuc/7YY7tWJbFsgG5cAU/uo=
+github.com/alexflint/go-arg v1.4.3/go.mod h1:3PZ/wp/8HuqRZMUUgu7I+e1qcpUbvmS258mRXkFH4IA=
+github.com/alexflint/go-scalar v1.1.0 h1:aaAouLLzI9TChcPXotr6gUhq+Scr8rl0P9P4PnltbhM=
+github.com/alexflint/go-scalar v1.1.0/go.mod h1:LoFvNMqS1CPrMVltza4LvnGKhaSpc3oyLEBUZVhhS2o=
+github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
+github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible h1:Ppm0npCCsmuR9oQaBtRuZcmILVE74aXE+AmrJj8L2ns=
+github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
+github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
+github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
+github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
+github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
+github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
+github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e h1:2R8DvYLNr5DL25eWwpOdPno1eIbTNjJC0d7v8ti5cus=
+github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e/go.mod h1:YjikoytuRI4q+GRd3xrOrKJN+Ayi2dwRomHLDDeMHfs=
+github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
+github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM=
+github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw=
+github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
+github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
+github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
+github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
+github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
+github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
+github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc=
+github.com/getsentry/sentry-go v0.10.0 h1:6gwY+66NHKqyZrdi6O2jGdo7wGdo9b3B69E01NFgT5g=
+github.com/getsentry/sentry-go v0.10.0/go.mod h1:kELm/9iCblqUYh+ZRML7PNdCvEuw24wBvJPYyi86cws=
+github.com/getsentry/sentry-go v0.12.0 h1:era7g0re5iY13bHSdN/xMkyV+5zZppjRVQhZrXCaEIk=
+github.com/getsentry/sentry-go v0.12.0/go.mod h1:NSap0JBYWzHND8oMbyi0+XZhUalc1TBdRL1M71JZW2c=
+github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s=
+github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM=
+github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
+github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec=
+github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
+github.com/go-chi/cors v1.2.0 h1:tV1g1XENQ8ku4Bq3K9ub2AtgG+p16SmzeMSGTwrOKdE=
+github.com/go-chi/cors v1.2.0/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
+github.com/go-chi/jwtauth v4.0.4+incompatible h1:LGIxg6YfvSBzxU2BljXbrzVc1fMlgqSKBQgKOGAVtPY=
+github.com/go-chi/jwtauth v4.0.4+incompatible/go.mod h1:Q5EIArY/QnD6BdS+IyDw7B2m6iNbnPxtfd6/BcmtWbs=
+github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
+github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
+github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
+github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
+github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
+github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
+github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
+github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
+github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
+github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
+github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI=
+github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0=
+github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk=
+github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g=
+github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw=
+github.com/jc21/jsref v0.0.0-20210608024405-a97debfc4760 h1:7wxq2DIgtO36KLrFz1RldysO0WVvcYsD49G9tyAs01k=
+github.com/jc21/jsref v0.0.0-20210608024405-a97debfc4760/go.mod h1:yIq2t51OJgVsdRlPY68NAnyVdBH0kYXxDTFtUxOap80=
+github.com/jmoiron/sqlx v1.3.3 h1:j82X0bf7oQ27XeqxicSZsTU5suPwKElg3oyxNn43iTk=
+github.com/jmoiron/sqlx v1.3.3/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
+github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w=
+github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
+github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
+github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
+github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
+github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8=
+github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE=
+github.com/kataras/neffos v0.0.14/go.mod h1:8lqADm8PnbeFfL7CLXh1WHw53dG27MC3pgi2R1rmoTE=
+github.com/kataras/pio v0.0.2/go.mod h1:hAoW0t9UmXi4R5Oyq5Z4irTbaTsOemSrDGUtaTl7Dro=
+github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubcGyk0Bz8=
+github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
+github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
+github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/kyoh86/richgo v0.3.8/go.mod h1:2C8POkF1H04iTOG2Tp1yyZhspCME9nN3cir3VXJ02II=
+github.com/kyoh86/xdg v1.2.0/go.mod h1:/mg8zwu1+qe76oTFUBnyS7rJzk7LLC0VGEzJyJ19DHs=
+github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g=
+github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y=
+github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
+github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c h1:pGh5EFIfczeDHwgMHgfwjhZzL+8/E3uZF6T7vER/W8c=
+github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c/go.mod h1:xw2Gm4Mg+ST9s8fHR1VkUIyOJMJnSloRZlPQB+wyVpY=
+github.com/lestrrat-go/option v0.0.0-20210103042652-6f1ecfceda35 h1:lea8Wt+1ePkVrI2/WD+NgQT5r/XsLAzxeqtyFLcEs10=
+github.com/lestrrat-go/option v0.0.0-20210103042652-6f1ecfceda35/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
+github.com/lestrrat-go/option v1.0.0 h1:WqAWL8kh8VcSoD6xjSH34/1m8yxluXQbDeKNfvFeEO4=
+github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
+github.com/lestrrat-go/pdebug/v3 v3.0.1 h1:3G5sX/aw/TbMTtVc9U7IHBWRZtMvwvBziF1e4HoQtv8=
+github.com/lestrrat-go/pdebug/v3 v3.0.1/go.mod h1:za+m+Ve24yCxTEhR59N7UlnJomWwCiIqbJRmKeiADU4=
+github.com/lestrrat-go/structinfo v0.0.0-20190212233437-acd51874663b h1:YUFRoeHK/mvRjBR0bBRDC7ZGygYchoQ8j1xMENlObro=
+github.com/lestrrat-go/structinfo v0.0.0-20190212233437-acd51874663b/go.mod h1:s2U6PowV3/Jobkx/S9d0XiPwOzs6niW3DIouw+7nZC8=
+github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb h1:DDg5u5lk2v8O8qxs8ecQkMUBj3tLW6wkSLzxxOyi1Ig=
+github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb/go.mod h1:i+E8Uf04vf2QjOWyJdGY75vmG+4rxiZW2kIj1lTB5mo=
+github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
+github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
+github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
+github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
+github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
+github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40=
+github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
+github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
+github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA=
+github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
+github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
+github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
+github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
+github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
+github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
+github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8=
+github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
+github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
+github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ=
+github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg=
+github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w=
+github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
+github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
+github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
+github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
+github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
+github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
+github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
+github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/qri-io/jsonpointer v0.1.1 h1:prVZBZLL6TW5vsSB9fFHFAMBLI4b0ri5vribQlTJiBA=
+github.com/qri-io/jsonpointer v0.1.1/go.mod h1:DnJPaYgiKu56EuDp8TU5wFLdZIcAnb/uH9v37ZaMV64=
+github.com/qri-io/jsonschema v0.2.1 h1:NNFoKms+kut6ABPf6xiKNM5214jzxAhDBrPHCJ97Wg0=
+github.com/qri-io/jsonschema v0.2.1/go.mod h1:g7DPkiOsK1xv6T/Ao5scXRkd+yTFygcANPBaaqW+VrI=
+github.com/rakyll/statik v0.1.7/go.mod h1:AlZONWzMtEnMs7W4e/1LURLiI49pIMmp6V9Unghqrcc=
+github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
+github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
+github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g=
+github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
+github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
+github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
+github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
+github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
+github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
+github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
+github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
+github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
+github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
+github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
+github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
+github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
+github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4=
+github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
+github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w=
+github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
+github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
+github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
+github.com/vrischmann/envconfig v1.3.0 h1:4XIvQTXznxmWMnjouj0ST5lFo/WAYf5Exgl3x82crEk=
+github.com/vrischmann/envconfig v1.3.0/go.mod h1:bbvxFYJdRSpXrhS63mBFtKJzkDiNkyArOLXtY6q0kuI=
+github.com/wacul/ptr v1.0.0/go.mod h1:BD0gjsZrCwtoR+yWDB9v2hQ8STlq9tT84qKfa+3txOc=
+github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
+github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
+github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
+github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
+github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
+github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg=
+github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM=
+github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc=
+golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
+golang.org/x/crypto v0.0.0-20210506145944-38f3c27a63bf h1:B2n+Zi5QeYRDAEodEu72OS36gmTWjgpXr2+cWcBW90o=
+golang.org/x/crypto v0.0.0-20210506145944-38f3c27a63bf/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838 h1:71vQrMauZZhcTVK6KdYM+rklehEEwb3E+ZhaE5jrPrE=
+golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw9KJexJuRLI9G7Hp5wE=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210326220804-49726bf1d181/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210603125802-9665404d3644 h1:CA1DEQ4NdKphKeL70tvsWNdT5oFh1lOjihRcEDROi0I=
+golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a h1:ppl5mZgokTT8uPkmYOyEUmPTr3ypaKkg5eFOGrAmxxE=
+golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE=
+gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y=
+gopkg.in/ini.v1 v1.51.1/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/backend/index.js b/backend/index.js
deleted file mode 100644
index 8d42d096..00000000
--- a/backend/index.js
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env node
-
-const logger = require('./logger').global;
-
-async function appStart () {
- // Create config file db settings if environment variables have been set
- await createDbConfigFromEnvironment();
-
- const migrate = require('./migrate');
- const setup = require('./setup');
- const app = require('./app');
- const apiValidator = require('./lib/validator/api');
- const internalCertificate = require('./internal/certificate');
- const internalIpRanges = require('./internal/ip_ranges');
-
- return migrate.latest()
- .then(setup)
- .then(() => {
- return apiValidator.loadSchemas;
- })
- .then(internalIpRanges.fetch)
- .then(() => {
-
- internalCertificate.initTimer();
- internalIpRanges.initTimer();
-
- const server = app.listen(3000, () => {
- logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
-
- process.on('SIGTERM', () => {
- logger.info('PID ' + process.pid + ' received SIGTERM');
- server.close(() => {
- logger.info('Stopping.');
- process.exit(0);
- });
- });
- });
- })
- .catch((err) => {
- logger.error(err.message);
- setTimeout(appStart, 1000);
- });
-}
-
-async function createDbConfigFromEnvironment() {
- return new Promise((resolve, reject) => {
- const envMysqlHost = process.env.DB_MYSQL_HOST || null;
- const envMysqlPort = process.env.DB_MYSQL_PORT || null;
- const envMysqlUser = process.env.DB_MYSQL_USER || null;
- const envMysqlName = process.env.DB_MYSQL_NAME || null;
- let envSqliteFile = process.env.DB_SQLITE_FILE || null;
-
- const fs = require('fs');
- const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
- let configData = {};
-
- try {
- configData = require(filename);
- } catch (err) {
- // do nothing
- }
-
- if (configData.database && configData.database.engine && !configData.database.fromEnv) {
- logger.info('Manual db configuration already exists, skipping config creation from environment variables');
- resolve();
- return;
- }
-
- if ((!envMysqlHost || !envMysqlPort || !envMysqlUser || !envMysqlName) && !envSqliteFile){
- envSqliteFile = '/data/database.sqlite';
- logger.info(`No valid environment variables for database provided, using default SQLite file '${envSqliteFile}'`);
- }
-
- if (envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) {
- const newConfig = {
- fromEnv: true,
- engine: 'mysql',
- host: envMysqlHost,
- port: envMysqlPort,
- user: envMysqlUser,
- password: process.env.DB_MYSQL_PASSWORD,
- name: envMysqlName,
- };
-
- if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
- // Config is unchanged, skip overwrite
- resolve();
- return;
- }
-
- logger.info('Generating MySQL knex configuration from environment variables');
- configData.database = newConfig;
-
- } else {
- const newConfig = {
- fromEnv: true,
- engine: 'knex-native',
- knex: {
- client: 'sqlite3',
- connection: {
- filename: envSqliteFile
- },
- useNullAsDefault: true
- }
- };
- if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
- // Config is unchanged, skip overwrite
- resolve();
- return;
- }
-
- logger.info('Generating SQLite knex configuration');
- configData.database = newConfig;
- }
-
- // Write config
- fs.writeFile(filename, JSON.stringify(configData, null, 2), (err) => {
- if (err) {
- logger.error('Could not write db config to config file: ' + filename);
- reject(err);
- } else {
- logger.debug('Wrote db configuration to config file: ' + filename);
- resolve();
- }
- });
- });
-}
-
-try {
- appStart();
-} catch (err) {
- logger.error(err.message, err);
- process.exit(1);
-}
-
diff --git a/backend/internal/access-list.js b/backend/internal/access-list.js
deleted file mode 100644
index 083bfa62..00000000
--- a/backend/internal/access-list.js
+++ /dev/null
@@ -1,534 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const batchflow = require('batchflow');
-const logger = require('../logger').access;
-const error = require('../lib/error');
-const accessListModel = require('../models/access_list');
-const accessListAuthModel = require('../models/access_list_auth');
-const accessListClientModel = require('../models/access_list_client');
-const proxyHostModel = require('../models/proxy_host');
-const internalAuditLog = require('./audit-log');
-const internalNginx = require('./nginx');
-const utils = require('../lib/utils');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalAccessList = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('access_lists:create', data)
- .then((/*access_data*/) => {
- return accessListModel
- .query()
- .omit(omissions())
- .insertAndFetch({
- name: data.name,
- satisfy_any: data.satisfy_any,
- pass_auth: data.pass_auth,
- owner_user_id: access.token.getUserId(1)
- });
- })
- .then((row) => {
- data.id = row.id;
-
- let promises = [];
-
- // Now add the items
- data.items.map((item) => {
- promises.push(accessListAuthModel
- .query()
- .insert({
- access_list_id: row.id,
- username: item.username,
- password: item.password
- })
- );
- });
-
- // Now add the clients
- if (typeof data.clients !== 'undefined' && data.clients) {
- data.clients.map((client) => {
- promises.push(accessListClientModel
- .query()
- .insert({
- access_list_id: row.id,
- address: client.address,
- directive: client.directive
- })
- );
- });
- }
-
- return Promise.all(promises);
- })
- .then(() => {
- // re-fetch with expansions
- return internalAccessList.get(access, {
- id: data.id,
- expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
- }, true /* <- skip masking */);
- })
- .then((row) => {
- // Audit log
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- return internalAccessList.build(row)
- .then(() => {
- if (row.proxy_host_count) {
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- }
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'access-list',
- object_id: row.id,
- meta: internalAccessList.maskItems(data)
- });
- })
- .then(() => {
- return internalAccessList.maskItems(row);
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.name]
- * @param {String} [data.items]
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('access_lists:update', data.id)
- .then((/*access_data*/) => {
- return internalAccessList.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Access List could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
- })
- .then(() => {
- // patch name if specified
- if (typeof data.name !== 'undefined' && data.name) {
- return accessListModel
- .query()
- .where({id: data.id})
- .patch({
- name: data.name,
- satisfy_any: data.satisfy_any,
- pass_auth: data.pass_auth,
- });
- }
- })
- .then(() => {
- // Check for items and add/update/remove them
- if (typeof data.items !== 'undefined' && data.items) {
- let promises = [];
- let items_to_keep = [];
-
- data.items.map(function (item) {
- if (item.password) {
- promises.push(accessListAuthModel
- .query()
- .insert({
- access_list_id: data.id,
- username: item.username,
- password: item.password
- })
- );
- } else {
- // This was supplied with an empty password, which means keep it but don't change the password
- items_to_keep.push(item.username);
- }
- });
-
- let query = accessListAuthModel
- .query()
- .delete()
- .where('access_list_id', data.id);
-
- if (items_to_keep.length) {
- query.andWhere('username', 'NOT IN', items_to_keep);
- }
-
- return query
- .then(() => {
- // Add new items
- if (promises.length) {
- return Promise.all(promises);
- }
- });
- }
- })
- .then(() => {
- // Check for clients and add/update/remove them
- if (typeof data.clients !== 'undefined' && data.clients) {
- let promises = [];
-
- data.clients.map(function (client) {
- if (client.address) {
- promises.push(accessListClientModel
- .query()
- .insert({
- access_list_id: data.id,
- address: client.address,
- directive: client.directive
- })
- );
- }
- });
-
- let query = accessListClientModel
- .query()
- .delete()
- .where('access_list_id', data.id);
-
- return query
- .then(() => {
- // Add new items
- if (promises.length) {
- return Promise.all(promises);
- }
- });
- }
- })
- .then(internalNginx.reload)
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'access-list',
- object_id: data.id,
- meta: internalAccessList.maskItems(data)
- });
- })
- .then(() => {
- // re-fetch with expansions
- return internalAccessList.get(access, {
- id: data.id,
- expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
- }, true /* <- skip masking */);
- })
- .then((row) => {
- return internalAccessList.build(row)
- .then(() => {
- if (row.proxy_host_count) {
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- }
- })
- .then(() => {
- return internalAccessList.maskItems(row);
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @param {Boolean} [skip_masking]
- * @return {Promise}
- */
- get: (access, data, skip_masking) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('access_lists:get', data.id)
- .then((access_data) => {
- let query = accessListModel
- .query()
- .select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
- .joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
- .where('access_list.is_deleted', 0)
- .andWhere('access_list.id', data.id)
- .allowEager('[owner,items,clients,proxy_hosts.[*, access_list.[clients,items]]]')
- .omit(['access_list.is_deleted'])
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
- row = internalAccessList.maskItems(row);
- }
-
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('access_lists:delete', data.id)
- .then(() => {
- return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- // 1. update row to be deleted
- // 2. update any proxy hosts that were using it (ignoring permissions)
- // 3. reconfigure those hosts
- // 4. audit log
-
- // 1. update row to be deleted
- return accessListModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // 2. update any proxy hosts that were using it (ignoring permissions)
- if (row.proxy_hosts) {
- return proxyHostModel
- .query()
- .where('access_list_id', '=', row.id)
- .patch({access_list_id: 0})
- .then(() => {
- // 3. reconfigure those hosts, then reload nginx
-
- // set the access_list_id to zero for these items
- row.proxy_hosts.map(function (val, idx) {
- row.proxy_hosts[idx].access_list_id = 0;
- });
-
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- })
- .then(() => {
- return internalNginx.reload();
- });
- }
- })
- .then(() => {
- // delete the htpasswd file
- let htpasswd_file = internalAccessList.getFilename(row);
-
- try {
- fs.unlinkSync(htpasswd_file);
- } catch (err) {
- // do nothing
- }
- })
- .then(() => {
- // 4. audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'access-list',
- object_id: row.id,
- meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Lists
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('access_lists:list')
- .then((access_data) => {
- let query = accessListModel
- .query()
- .select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
- .joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
- .where('access_list.is_deleted', 0)
- .groupBy('access_list.id')
- .omit(['access_list.is_deleted'])
- .allowEager('[owner,items,clients]')
- .orderBy('access_list.name', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('name', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (rows) {
- rows.map(function (row, idx) {
- if (typeof row.items !== 'undefined' && row.items) {
- rows[idx] = internalAccessList.maskItems(row);
- }
- });
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Integer} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = accessListModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * @param {Object} list
- * @returns {Object}
- */
- maskItems: (list) => {
- if (list && typeof list.items !== 'undefined') {
- list.items.map(function (val, idx) {
- let repeat_for = 8;
- let first_char = '*';
-
- if (typeof val.password !== 'undefined' && val.password) {
- repeat_for = val.password.length - 1;
- first_char = val.password.charAt(0);
- }
-
- list.items[idx].hint = first_char + ('*').repeat(repeat_for);
- list.items[idx].password = '';
- });
- }
-
- return list;
- },
-
- /**
- * @param {Object} list
- * @param {Integer} list.id
- * @returns {String}
- */
- getFilename: (list) => {
- return '/data/access/' + list.id;
- },
-
- /**
- * @param {Object} list
- * @param {Integer} list.id
- * @param {String} list.name
- * @param {Array} list.items
- * @returns {Promise}
- */
- build: (list) => {
- logger.info('Building Access file #' + list.id + ' for: ' + list.name);
-
- return new Promise((resolve, reject) => {
- let htpasswd_file = internalAccessList.getFilename(list);
-
- // 1. remove any existing access file
- try {
- fs.unlinkSync(htpasswd_file);
- } catch (err) {
- // do nothing
- }
-
- // 2. create empty access file
- try {
- fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
- resolve(htpasswd_file);
- } catch (err) {
- reject(err);
- }
- })
- .then((htpasswd_file) => {
- // 3. generate password for each user
- if (list.items.length) {
- return new Promise((resolve, reject) => {
- batchflow(list.items).sequential()
- .each((i, item, next) => {
- if (typeof item.password !== 'undefined' && item.password.length) {
- logger.info('Adding: ' + item.username);
-
- utils.exec('/usr/bin/htpasswd -b "' + htpasswd_file + '" "' + item.username + '" "' + item.password + '"')
- .then((/*result*/) => {
- next();
- })
- .catch((err) => {
- logger.error(err);
- next(err);
- });
- }
- })
- .error((err) => {
- logger.error(err);
- reject(err);
- })
- .end((results) => {
- logger.success('Built Access file #' + list.id + ' for: ' + list.name);
- resolve(results);
- });
- });
- }
- });
- }
-};
-
-module.exports = internalAccessList;
diff --git a/backend/internal/acme/acmesh.go b/backend/internal/acme/acmesh.go
new file mode 100644
index 00000000..462eba39
--- /dev/null
+++ b/backend/internal/acme/acmesh.go
@@ -0,0 +1,200 @@
+package acme
+
+// Some light reading:
+// https://github.com/acmesh-official/acme.sh/wiki/How-to-issue-a-cert
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+
+ "npm/internal/config"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/logger"
+)
+
+func getAcmeShFilePath() (string, error) {
+ path, err := exec.LookPath("acme.sh")
+ if err != nil {
+ return path, fmt.Errorf("Cannot find acme.sh execuatable script in PATH")
+ }
+ return path, nil
+}
+
+func getCommonEnvVars() []string {
+ return []string{
+ fmt.Sprintf("ACMESH_CONFIG_HOME=%s", os.Getenv("ACMESH_CONFIG_HOME")),
+ fmt.Sprintf("ACMESH_HOME=%s", os.Getenv("ACMESH_HOME")),
+ fmt.Sprintf("CERT_HOME=%s", os.Getenv("CERT_HOME")),
+ fmt.Sprintf("LE_CONFIG_HOME=%s", os.Getenv("LE_CONFIG_HOME")),
+ fmt.Sprintf("LE_WORKING_DIR=%s", os.Getenv("LE_WORKING_DIR")),
+ }
+}
+
+// GetAcmeShVersion will return the acme.sh script version
+func GetAcmeShVersion() string {
+ if r, err := shExec([]string{"--version"}, nil); err == nil {
+ // modify the output
+ r = strings.Trim(r, "\n")
+ v := strings.Split(r, "\n")
+ return v[len(v)-1]
+ }
+ return ""
+}
+
+// CreateAccountKey is required for each server initially
+func CreateAccountKey(ca *certificateauthority.Model) error {
+ args := []string{"--create-account-key", "--accountkeylength", "2048"}
+ if ca != nil {
+ logger.Info("Acme.sh CreateAccountKey for %s", ca.AcmeshServer)
+ args = append(args, "--server", ca.AcmeshServer)
+ if ca.CABundle != "" {
+ args = append(args, "--ca-bundle", ca.CABundle)
+ }
+ } else {
+ logger.Info("Acme.sh CreateAccountKey")
+ }
+
+ args = append(args, getCommonArgs()...)
+ ret, err := shExec(args, nil)
+ if err != nil {
+ return err
+ }
+
+ logger.Debug("CreateAccountKey returned:\n%+v", ret)
+
+ return nil
+}
+
+// RequestCert does all the heavy lifting
+func RequestCert(domains []string, method, outputFullchainFile, outputKeyFile string, dnsProvider *dnsprovider.Model, ca *certificateauthority.Model, force bool) (string, error) {
+ args, err := buildCertRequestArgs(domains, method, outputFullchainFile, outputKeyFile, dnsProvider, ca, force)
+ if err != nil {
+ return err.Error(), err
+ }
+
+ envs := make([]string, 0)
+ if dnsProvider != nil {
+ envs, err = dnsProvider.GetAcmeShEnvVars()
+ if err != nil {
+ return err.Error(), err
+ }
+ }
+
+ ret, err := shExec(args, envs)
+ if err != nil {
+ return ret, err
+ }
+
+ return "", nil
+}
+
+// shExec executes the acme.sh with arguments
+func shExec(args []string, envs []string) (string, error) {
+ acmeSh, err := getAcmeShFilePath()
+ if err != nil {
+ logger.Error("AcmeShError", err)
+ return "", err
+ }
+
+ logger.Debug("CMD: %s %v", acmeSh, args)
+ // nolint: gosec
+ c := exec.Command(acmeSh, args...)
+ c.Env = append(getCommonEnvVars(), envs...)
+
+ b, e := c.Output()
+
+ if e != nil {
+ logger.Error("AcmeShError", fmt.Errorf("Command error: %s -- %v\n%+v", acmeSh, args, e))
+ logger.Warn(string(b))
+ }
+
+ return string(b), e
+}
+
+func getCommonArgs() []string {
+ args := make([]string, 0)
+
+ if config.Configuration.Acmesh.Home != "" {
+ args = append(args, "--home", config.Configuration.Acmesh.Home)
+ }
+ if config.Configuration.Acmesh.ConfigHome != "" {
+ args = append(args, "--config-home", config.Configuration.Acmesh.ConfigHome)
+ }
+ if config.Configuration.Acmesh.CertHome != "" {
+ args = append(args, "--cert-home", config.Configuration.Acmesh.CertHome)
+ }
+
+ args = append(args, "--log", "/data/logs/acme.sh.log")
+ args = append(args, "--debug", "2")
+
+ return args
+}
+
+// This is split out into it's own function so it's testable
+func buildCertRequestArgs(domains []string, method, outputFullchainFile, outputKeyFile string, dnsProvider *dnsprovider.Model, ca *certificateauthority.Model, force bool) ([]string, error) {
+ // The argument order matters.
+ // see https://github.com/acmesh-official/acme.sh/wiki/How-to-issue-a-cert#3-multiple-domains-san-mode--hybrid-mode
+ // for multiple domains and note that the method of validation is required just after the domain arg, each time.
+
+ // TODO log file location configurable
+ args := []string{"--issue"}
+
+ if ca != nil {
+ args = append(args, "--server", ca.AcmeshServer)
+ if ca.CABundle != "" {
+ args = append(args, "--ca-bundle", ca.CABundle)
+ }
+ }
+
+ if outputFullchainFile != "" {
+ args = append(args, "--fullchain-file", outputFullchainFile)
+ }
+
+ if outputKeyFile != "" {
+ args = append(args, "--key-file", outputKeyFile)
+ }
+
+ methodArgs := make([]string, 0)
+ switch method {
+ case "dns":
+ if dnsProvider == nil {
+ return nil, ErrDNSNeedsDNSProvider
+ }
+ methodArgs = append(methodArgs, "--dns", dnsProvider.AcmeshName)
+ if dnsProvider.DNSSleep > 0 {
+ // See: https://github.com/acmesh-official/acme.sh/wiki/dnscheck
+ methodArgs = append(methodArgs, "--dnssleep", fmt.Sprintf("%d", dnsProvider.DNSSleep))
+ }
+
+ case "http":
+ if dnsProvider != nil {
+ return nil, ErrHTTPHasDNSProvider
+ }
+ methodArgs = append(methodArgs, "-w", config.Configuration.Acmesh.GetWellknown())
+ default:
+ return nil, ErrMethodNotSupported
+ }
+
+ hasMethod := false
+
+ // Add domains to args
+ for _, domain := range domains {
+ args = append(args, "-d", domain)
+ // Method has to appear after each domain
+ if !hasMethod {
+ args = append(args, methodArgs...)
+ hasMethod = true
+ }
+ }
+
+ if force {
+ args = append(args, "--force")
+ }
+
+ args = append(args, getCommonArgs()...)
+
+ return args, nil
+}
diff --git a/backend/internal/acme/acmesh_test.go b/backend/internal/acme/acmesh_test.go
new file mode 100644
index 00000000..837aa365
--- /dev/null
+++ b/backend/internal/acme/acmesh_test.go
@@ -0,0 +1,204 @@
+package acme
+
+import (
+ "fmt"
+ "testing"
+
+ "npm/internal/config"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+
+ "github.com/stretchr/testify/assert"
+)
+
+// Tear up/down
+/*
+func TestMain(m *testing.M) {
+ config.Init(&version, &commit, &sentryDSN)
+ code := m.Run()
+ os.Exit(code)
+}
+*/
+
+// TODO configurable
+const acmeLogFile = "/data/logs/acme.sh.log"
+
+func TestBuildCertRequestArgs(t *testing.T) {
+ type want struct {
+ args []string
+ err error
+ }
+
+ wellknown := config.Configuration.Acmesh.GetWellknown()
+ exampleKey := fmt.Sprintf("%s/example.com.key", config.Configuration.Acmesh.CertHome)
+ exampleChain := fmt.Sprintf("%s/a.crt", config.Configuration.Acmesh.CertHome)
+
+ tests := []struct {
+ name string
+ domains []string
+ method string
+ outputFullchainFile string
+ outputKeyFile string
+ dnsProvider *dnsprovider.Model
+ ca *certificateauthority.Model
+ want want
+ }{
+ {
+ name: "http single domain",
+ domains: []string{"example.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "-w",
+ wellknown,
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "http multiple domains",
+ domains: []string{"example.com", "example-two.com", "example-three.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "-w",
+ wellknown,
+ "-d",
+ "example-two.com",
+ "-d",
+ "example-three.com",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "http single domain with dns provider",
+ domains: []string{"example.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: nil,
+ err: ErrHTTPHasDNSProvider,
+ },
+ },
+ {
+ name: "dns single domain",
+ domains: []string{"example.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "--dns",
+ "dns_cf",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns multiple domains",
+ domains: []string{"example.com", "example-two.com", "example-three.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "--dns",
+ "dns_cf",
+ "-d",
+ "example-two.com",
+ "-d",
+ "example-three.com",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns single domain no provider",
+ domains: []string{"example.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: nil,
+ err: ErrDNSNeedsDNSProvider,
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ args, err := buildCertRequestArgs(tt.domains, tt.method, tt.outputFullchainFile, tt.outputKeyFile, tt.dnsProvider, tt.ca, false)
+
+ assert.Equal(t, tt.want.args, args)
+ assert.Equal(t, tt.want.err, err)
+ })
+ }
+}
diff --git a/backend/internal/acme/errors.go b/backend/internal/acme/errors.go
new file mode 100644
index 00000000..b929ce66
--- /dev/null
+++ b/backend/internal/acme/errors.go
@@ -0,0 +1,10 @@
+package acme
+
+import "errors"
+
+// All errors relating to Acme.sh use
+var (
+ ErrDNSNeedsDNSProvider = errors.New("RequestCert dns method requires a dns provider")
+ ErrHTTPHasDNSProvider = errors.New("RequestCert http method does not need a dns provider")
+ ErrMethodNotSupported = errors.New("RequestCert method not supported")
+)
diff --git a/backend/internal/api/context/context.go b/backend/internal/api/context/context.go
new file mode 100644
index 00000000..f3bc957b
--- /dev/null
+++ b/backend/internal/api/context/context.go
@@ -0,0 +1,25 @@
+package context
+
+var (
+ // BodyCtxKey is the name of the Body value on the context
+ BodyCtxKey = &contextKey{"Body"}
+ // UserIDCtxKey is the name of the UserID value on the context
+ UserIDCtxKey = &contextKey{"UserID"}
+ // FiltersCtxKey is the name of the Filters value on the context
+ FiltersCtxKey = &contextKey{"Filters"}
+ // PrettyPrintCtxKey is the name of the pretty print context
+ PrettyPrintCtxKey = &contextKey{"Pretty"}
+ // ExpansionCtxKey is the name of the expansion context
+ ExpansionCtxKey = &contextKey{"Expansion"}
+)
+
+// contextKey is a value for use with context.WithValue. It's used as
+// a pointer so it fits in an interface{} without allocation. This technique
+// for defining context keys was copied from Go 1.7's new use of context in net/http.
+type contextKey struct {
+ name string
+}
+
+func (k *contextKey) String() string {
+ return "context value: " + k.name
+}
diff --git a/backend/internal/api/filters/helpers.go b/backend/internal/api/filters/helpers.go
new file mode 100644
index 00000000..5f5d5238
--- /dev/null
+++ b/backend/internal/api/filters/helpers.go
@@ -0,0 +1,208 @@
+package filters
+
+import (
+ "fmt"
+ "strings"
+)
+
+// NewFilterSchema is the main method to specify a new Filter Schema for use in Middleware
+func NewFilterSchema(fieldSchemas []string) string {
+ return fmt.Sprintf(baseFilterSchema, strings.Join(fieldSchemas, ", "))
+}
+
+// BoolFieldSchema returns the Field Schema for a Boolean accepted value field
+func BoolFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, boolModifiers, filterBool, filterBool)
+}
+
+// IntFieldSchema returns the Field Schema for a Integer accepted value field
+func IntFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+// StringFieldSchema returns the Field Schema for a String accepted value field
+func StringFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, filterString, filterString)
+}
+
+// RegexFieldSchema returns the Field Schema for a String accepted value field matching a Regex
+func RegexFieldSchema(fieldName string, regex string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "%s"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "%s"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, regex, regex)
+}
+
+// DateFieldSchema returns the Field Schema for a String accepted value field matching a Date format
+func DateFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+// DateTimeFieldSchema returns the Field Schema for a String accepted value field matching a Date format
+// 2020-03-01T10:30:00+10:00
+func DateTimeFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+const allModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin|min|max|greater|less)$"
+}`
+
+const boolModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not)$"
+}`
+
+const stringModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin)$"
+}`
+
+const filterBool = `{
+ "type": "string",
+ "pattern": "^(TRUE|true|t|yes|y|on|1|FALSE|f|false|n|no|off|0)$"
+}`
+
+const filterString = `{
+ "type": "string",
+ "minLength": 1
+}`
+
+const baseFilterSchema = `{
+ "type": "array",
+ "items": {
+ "oneOf": [
+ %s
+ ]
+ }
+}`
diff --git a/backend/internal/api/handler/auth.go b/backend/internal/api/handler/auth.go
new file mode 100644
index 00000000..18f2a989
--- /dev/null
+++ b/backend/internal/api/handler/auth.go
@@ -0,0 +1,93 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+ "time"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ "npm/internal/logger"
+)
+
+type setAuthModel struct {
+ Type string `json:"type" db:"type"`
+ Secret string `json:"secret,omitempty" db:"secret"`
+ CurrentSecret string `json:"current_secret,omitempty"`
+}
+
+// SetAuth sets a auth method. This can be used for "me" and `2` for example
+// Route: POST /users/:userID/auth
+func SetAuth() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newAuth setAuthModel
+ err := json.Unmarshal(bodyBytes, &newAuth)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ userID, isSelf, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ // Load user
+ thisUser, thisUserErr := user.GetByID(userID)
+ if thisUserErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, thisUserErr.Error(), nil)
+ return
+ }
+
+ if thisUser.IsSystem {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "Cannot set password for system user", nil)
+ return
+ }
+
+ // Load existing auth for user
+ userAuth, userAuthErr := auth.GetByUserIDType(userID, newAuth.Type)
+ if userAuthErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userAuthErr.Error(), nil)
+ return
+ }
+
+ if isSelf {
+ // confirm that the current_secret given is valid for the one stored in the database
+ validateErr := userAuth.ValidateSecret(newAuth.CurrentSecret)
+ if validateErr != nil {
+ logger.Debug("%s: %s", "Password change: current password was incorrect", validateErr.Error())
+ // Sleep for 1 second to prevent brute force password guessing
+ time.Sleep(time.Second)
+
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrCurrentPasswordInvalid.Error(), nil)
+ return
+ }
+ }
+
+ if newAuth.Type == auth.TypePassword {
+ err := userAuth.SetPassword(newAuth.Secret)
+ if err != nil {
+ logger.Error("SetPasswordError", err)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+
+ if err = userAuth.Save(); err != nil {
+ logger.Error("AuthSaveError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save Authentication for User", nil)
+ return
+ }
+
+ userAuth.Secret = ""
+
+ // todo: add to audit-log
+
+ h.ResultResponseJSON(w, r, http.StatusOK, userAuth)
+ }
+}
diff --git a/backend/internal/api/handler/certificate_authorities.go b/backend/internal/api/handler/certificate_authorities.go
new file mode 100644
index 00000000..c822cc06
--- /dev/null
+++ b/backend/internal/api/handler/certificate_authorities.go
@@ -0,0 +1,141 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ "npm/internal/acme"
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/logger"
+)
+
+// GetCertificateAuthorities will return a list of Certificate Authorities
+// Route: GET /certificate-authorities
+func GetCertificateAuthorities() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ certificates, err := certificateauthority.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, certificates)
+ }
+ }
+}
+
+// GetCertificateAuthority will return a single Certificate Authority
+// Route: GET /certificate-authorities/{caID}
+func GetCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID int
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ cert, err := certificateauthority.GetByID(caID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, cert)
+ }
+ }
+}
+
+// CreateCertificateAuthority will create a Certificate Authority
+// Route: POST /certificate-authorities
+func CreateCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newCA certificateauthority.Model
+ err := json.Unmarshal(bodyBytes, &newCA)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = newCA.Check(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = newCA.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Certificate Authority: %s", err.Error()), nil)
+ return
+ }
+
+ if err = acme.CreateAccountKey(&newCA); err != nil {
+ logger.Error("CreateAccountKeyError", err)
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newCA)
+ }
+}
+
+// UpdateCertificateAuthority updates a ca
+// Route: PUT /certificate-authorities/{caID}
+func UpdateCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID int
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ ca, err := certificateauthority.GetByID(caID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &ca)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = ca.Check(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = ca.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, ca)
+ }
+ }
+}
+
+// DeleteCertificateAuthority deletes a ca
+// Route: DELETE /certificate-authorities/{caID}
+func DeleteCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID int
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ cert, err := certificateauthority.GetByID(caID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, cert.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/certificates.go b/backend/internal/api/handler/certificates.go
new file mode 100644
index 00000000..8c48ddd1
--- /dev/null
+++ b/backend/internal/api/handler/certificates.go
@@ -0,0 +1,145 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/api/schema"
+ "npm/internal/entity/certificate"
+)
+
+// GetCertificates will return a list of Certificates
+// Route: GET /certificates
+func GetCertificates() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ certificates, err := certificate.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, certificates)
+ }
+ }
+}
+
+// GetCertificate will return a single Certificate
+// Route: GET /certificates/{certificateID}
+func GetCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var certificateID int
+ if certificateID, err = getURLParamInt(r, "certificateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ cert, err := certificate.GetByID(certificateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, cert)
+ }
+ }
+}
+
+// CreateCertificate will create a Certificate
+// Route: POST /certificates
+func CreateCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newCertificate certificate.Model
+ err := json.Unmarshal(bodyBytes, &newCertificate)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ newCertificate.UserID = userID
+
+ if err = newCertificate.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Certificate: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newCertificate)
+ }
+}
+
+// UpdateCertificate updates a cert
+// Route: PUT /certificates/{certificateID}
+func UpdateCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var certificateID int
+ if certificateID, err = getURLParamInt(r, "certificateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ certificateObject, err := certificate.GetByID(certificateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+
+ // This is a special endpoint, as it needs to verify the schema payload
+ // based on the certificate type, without being given a type in the payload.
+ // The middleware would normally handle this.
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ schemaErrors, jsonErr := middleware.CheckRequestSchema(r.Context(), schema.UpdateCertificate(certificateObject.Type), bodyBytes)
+ if jsonErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", jsonErr), nil)
+ return
+ }
+
+ if len(schemaErrors) > 0 {
+ h.ResultSchemaErrorJSON(w, r, schemaErrors)
+ return
+ }
+
+ err := json.Unmarshal(bodyBytes, &certificateObject)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = certificateObject.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, certificateObject)
+ }
+ }
+}
+
+// DeleteCertificate deletes a cert
+// Route: DELETE /certificates/{certificateID}
+func DeleteCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var certificateID int
+ if certificateID, err = getURLParamInt(r, "certificateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ cert, err := certificate.GetByID(certificateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, cert.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/config.go b/backend/internal/api/handler/config.go
new file mode 100644
index 00000000..811d7580
--- /dev/null
+++ b/backend/internal/api/handler/config.go
@@ -0,0 +1,15 @@
+package handler
+
+import (
+ "net/http"
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+// Config returns the entire configuration, for debug purposes
+// Route: GET /config
+func Config() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ h.ResultResponseJSON(w, r, http.StatusOK, config.Configuration)
+ }
+}
diff --git a/backend/internal/api/handler/dns_providers.go b/backend/internal/api/handler/dns_providers.go
new file mode 100644
index 00000000..6040b6bd
--- /dev/null
+++ b/backend/internal/api/handler/dns_providers.go
@@ -0,0 +1,159 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/dnsproviders"
+ "npm/internal/entity/dnsprovider"
+)
+
+// GetDNSProviders will return a list of DNS Providers
+// Route: GET /dns-providers
+func GetDNSProviders() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ items, err := dnsprovider.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+ }
+}
+
+// GetDNSProvider will return a single DNS Provider
+// Route: GET /dns-providers/{providerID}
+func GetDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID int
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// CreateDNSProvider will create a DNS Provider
+// Route: POST /dns-providers
+func CreateDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newItem dnsprovider.Model
+ err := json.Unmarshal(bodyBytes, &newItem)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ newItem.UserID = userID
+
+ if err = newItem.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save DNS Provider: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newItem)
+ }
+}
+
+// UpdateDNSProvider updates a provider
+// Route: PUT /dns-providers/{providerID}
+func UpdateDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID int
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &item)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = item.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// DeleteDNSProvider removes a provider
+// Route: DELETE /dns-providers/{providerID}
+func DeleteDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID int
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ }
+ }
+}
+
+// GetAcmeshProviders will return a list of acme.sh providers
+// Route: GET /dns-providers/acmesh
+func GetAcmeshProviders() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ items := dnsproviders.List()
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+}
+
+// GetAcmeshProvider will return a single acme.sh provider
+// Route: GET /dns-providers/acmesh/{acmeshID}
+func GetAcmeshProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var acmeshID string
+ var err error
+ if acmeshID, err = getURLParamString(r, "acmeshID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, getErr := dnsproviders.Get(acmeshID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, getErr.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/health.go b/backend/internal/api/handler/health.go
new file mode 100644
index 00000000..b4df7a09
--- /dev/null
+++ b/backend/internal/api/handler/health.go
@@ -0,0 +1,34 @@
+package handler
+
+import (
+ "net/http"
+ "npm/internal/acme"
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+type healthCheckResponse struct {
+ Version string `json:"version"`
+ Commit string `json:"commit"`
+ AcmeShVersion string `json:"acme.sh"`
+ Healthy bool `json:"healthy"`
+ IsSetup bool `json:"setup"`
+ ErrorReporting bool `json:"error_reporting"`
+}
+
+// Health returns the health of the api
+// Route: GET /health
+func Health() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ health := healthCheckResponse{
+ Version: config.Version,
+ Commit: config.Commit,
+ Healthy: true,
+ IsSetup: config.IsSetup,
+ AcmeShVersion: acme.GetAcmeShVersion(),
+ ErrorReporting: config.ErrorReporting,
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, health)
+ }
+}
diff --git a/backend/internal/api/handler/helpers.go b/backend/internal/api/handler/helpers.go
new file mode 100644
index 00000000..3d6cf39a
--- /dev/null
+++ b/backend/internal/api/handler/helpers.go
@@ -0,0 +1,175 @@
+package handler
+
+import (
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+
+ "npm/internal/api/context"
+ "npm/internal/model"
+
+ "github.com/go-chi/chi"
+)
+
+const defaultLimit = 10
+
+func getPageInfoFromRequest(r *http.Request) (model.PageInfo, error) {
+ var pageInfo model.PageInfo
+ var err error
+
+ pageInfo.FromDate, pageInfo.ToDate, err = getDateRanges(r)
+ if err != nil {
+ return pageInfo, err
+ }
+
+ pageInfo.Offset, pageInfo.Limit, err = getPagination(r)
+ if err != nil {
+ return pageInfo, err
+ }
+
+ pageInfo.Sort = getSortParameter(r)
+
+ return pageInfo, nil
+}
+
+func getDateRanges(r *http.Request) (time.Time, time.Time, error) {
+ queryValues := r.URL.Query()
+ from := queryValues.Get("from")
+ fromDate := time.Now().AddDate(0, -1, 0) // 1 month ago by default
+ to := queryValues.Get("to")
+ toDate := time.Now()
+
+ if from != "" {
+ var fromErr error
+ fromDate, fromErr = time.Parse(time.RFC3339, from)
+ if fromErr != nil {
+ return fromDate, toDate, fmt.Errorf("From date is not in correct format: %v", strings.ReplaceAll(time.RFC3339, "Z", "+"))
+ }
+ }
+
+ if to != "" {
+ var toErr error
+ toDate, toErr = time.Parse(time.RFC3339, to)
+ if toErr != nil {
+ return fromDate, toDate, fmt.Errorf("To date is not in correct format: %v", strings.ReplaceAll(time.RFC3339, "Z", "+"))
+ }
+ }
+
+ return fromDate, toDate, nil
+}
+
+func getSortParameter(r *http.Request) []model.Sort {
+ var sortFields []model.Sort
+
+ queryValues := r.URL.Query()
+ sortString := queryValues.Get("sort")
+ if sortString == "" {
+ return sortFields
+ }
+
+ // Split sort fields up in to slice
+ sorts := strings.Split(sortString, ",")
+ for _, sortItem := range sorts {
+ if strings.Contains(sortItem, ".") {
+ theseItems := strings.Split(sortItem, ".")
+
+ switch strings.ToLower(theseItems[1]) {
+ case "desc":
+ fallthrough
+ case "descending":
+ theseItems[1] = "DESC"
+ default:
+ theseItems[1] = "ASC"
+ }
+
+ sortFields = append(sortFields, model.Sort{
+ Field: theseItems[0],
+ Direction: theseItems[1],
+ })
+ } else {
+ sortFields = append(sortFields, model.Sort{
+ Field: sortItem,
+ Direction: "ASC",
+ })
+ }
+ }
+
+ return sortFields
+}
+
+func getQueryVarInt(r *http.Request, varName string, required bool, defaultValue int) (int, error) {
+ queryValues := r.URL.Query()
+ varValue := queryValues.Get(varName)
+
+ if varValue == "" && required {
+ return 0, fmt.Errorf("%v was not supplied in the request", varName)
+ } else if varValue == "" {
+ return defaultValue, nil
+ }
+
+ varInt, intErr := strconv.Atoi(varValue)
+ if intErr != nil {
+ return 0, fmt.Errorf("%v is not a valid number", varName)
+ }
+
+ return varInt, nil
+}
+
+func getURLParamInt(r *http.Request, varName string) (int, error) {
+ required := true
+ defaultValue := 0
+ paramStr := chi.URLParam(r, varName)
+ var err error
+ var paramInt int
+
+ if paramStr == "" && required {
+ return 0, fmt.Errorf("%v was not supplied in the request", varName)
+ } else if paramStr == "" {
+ return defaultValue, nil
+ }
+
+ if paramInt, err = strconv.Atoi(paramStr); err != nil {
+ return 0, fmt.Errorf("%v is not a valid number", varName)
+ }
+
+ return paramInt, nil
+}
+
+func getURLParamString(r *http.Request, varName string) (string, error) {
+ required := true
+ defaultValue := ""
+ paramStr := chi.URLParam(r, varName)
+
+ if paramStr == "" && required {
+ return "", fmt.Errorf("%v was not supplied in the request", varName)
+ } else if paramStr == "" {
+ return defaultValue, nil
+ }
+
+ return paramStr, nil
+}
+
+func getPagination(r *http.Request) (int, int, error) {
+ var err error
+ offset, err := getQueryVarInt(r, "offset", false, 0)
+ if err != nil {
+ return 0, 0, err
+ }
+ limit, err := getQueryVarInt(r, "limit", false, defaultLimit)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ return offset, limit, nil
+}
+
+// getExpandFromContext returns the Expansion setting
+func getExpandFromContext(r *http.Request) []string {
+ expand, ok := r.Context().Value(context.ExpansionCtxKey).([]string)
+ if !ok {
+ return nil
+ }
+ return expand
+}
diff --git a/backend/internal/api/handler/host_templates.go b/backend/internal/api/handler/host_templates.go
new file mode 100644
index 00000000..04defd1c
--- /dev/null
+++ b/backend/internal/api/handler/host_templates.go
@@ -0,0 +1,130 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/host"
+ "npm/internal/entity/hosttemplate"
+)
+
+// GetHostTemplates will return a list of Host Templates
+// Route: GET /host-templates
+func GetHostTemplates() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hosts, err := hosttemplate.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hosts)
+ }
+ }
+}
+
+// GetHostTemplate will return a single Host Template
+// Route: GET /host-templates/{templateID}
+func GetHostTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID int
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := hosttemplate.GetByID(templateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, host)
+ }
+ }
+}
+
+// CreateHostTemplate will create a Host Template
+// Route: POST /host-templates
+func CreateHostTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newHostTemplate hosttemplate.Model
+ err := json.Unmarshal(bodyBytes, &newHostTemplate)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ newHostTemplate.UserID = userID
+
+ if err = newHostTemplate.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Host Template: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newHostTemplate)
+ }
+}
+
+// UpdateHostTemplate updates a host template
+// Route: PUT /host-templates/{templateID}
+func UpdateHostTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID int
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hostTemplate, err := hosttemplate.GetByID(templateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &hostTemplate)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = hostTemplate.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, hostTemplate)
+ }
+ }
+}
+
+// DeleteHostTemplate removes a host template
+// Route: DELETE /host-templates/{templateID}
+func DeleteHostTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID int
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hostTemplate, err := host.GetByID(templateID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hostTemplate.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/hosts.go b/backend/internal/api/handler/hosts.go
new file mode 100644
index 00000000..3364d673
--- /dev/null
+++ b/backend/internal/api/handler/hosts.go
@@ -0,0 +1,140 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/host"
+ "npm/internal/validator"
+)
+
+// GetHosts will return a list of Hosts
+// Route: GET /hosts
+func GetHosts() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hosts, err := host.List(pageInfo, middleware.GetFiltersFromContext(r), getExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hosts)
+ }
+ }
+}
+
+// GetHost will return a single Host
+// Route: GET /hosts/{hostID}
+func GetHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hostObject, err := host.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ // nolint: errcheck,gosec
+ hostObject.Expand(getExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, hostObject)
+ }
+ }
+}
+
+// CreateHost will create a Host
+// Route: POST /hosts
+func CreateHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newHost host.Model
+ err := json.Unmarshal(bodyBytes, &newHost)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ newHost.UserID = userID
+
+ if err = validator.ValidateHost(newHost); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = newHost.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Host: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newHost)
+ }
+}
+
+// UpdateHost updates a host
+// Route: PUT /hosts/{hostID}
+func UpdateHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hostObject, err := host.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &hostObject)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = hostObject.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ // nolint: errcheck,gosec
+ hostObject.Expand(getExpandFromContext(r))
+
+ h.ResultResponseJSON(w, r, http.StatusOK, hostObject)
+ }
+ }
+}
+
+// DeleteHost removes a host
+// Route: DELETE /hosts/{hostID}
+func DeleteHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := host.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, host.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/not_allowed.go b/backend/internal/api/handler/not_allowed.go
new file mode 100644
index 00000000..966debab
--- /dev/null
+++ b/backend/internal/api/handler/not_allowed.go
@@ -0,0 +1,14 @@
+package handler
+
+import (
+ "net/http"
+
+ h "npm/internal/api/http"
+)
+
+// NotAllowed is a json error handler for when method is not allowed
+func NotAllowed() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not allowed", nil)
+ }
+}
diff --git a/backend/internal/api/handler/not_found.go b/backend/internal/api/handler/not_found.go
new file mode 100644
index 00000000..07f40c71
--- /dev/null
+++ b/backend/internal/api/handler/not_found.go
@@ -0,0 +1,64 @@
+package handler
+
+import (
+ "errors"
+ "io"
+ "io/fs"
+ "mime"
+ "net/http"
+ "path/filepath"
+ "strings"
+
+ "npm/embed"
+ h "npm/internal/api/http"
+)
+
+var (
+ assetsSub fs.FS
+ errIsDir = errors.New("path is dir")
+)
+
+// NotFound is a json error handler for 404's and method not allowed.
+// It also serves the react frontend as embedded files in the golang binary.
+func NotFound() func(http.ResponseWriter, *http.Request) {
+ assetsSub, _ = fs.Sub(embed.Assets, "assets")
+
+ return func(w http.ResponseWriter, r *http.Request) {
+ path := strings.TrimLeft(r.URL.Path, "/")
+ if path == "" {
+ path = "index.html"
+ }
+
+ err := tryRead(assetsSub, path, w)
+ if err == errIsDir {
+ err = tryRead(assetsSub, "index.html", w)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not found", nil)
+ }
+ } else if err == nil {
+ return
+ }
+
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not found", nil)
+ }
+}
+
+func tryRead(folder fs.FS, requestedPath string, w http.ResponseWriter) error {
+ f, err := folder.Open(requestedPath)
+ if err != nil {
+ return err
+ }
+
+ // nolint: errcheck
+ defer f.Close()
+
+ stat, _ := f.Stat()
+ if stat.IsDir() {
+ return errIsDir
+ }
+
+ contentType := mime.TypeByExtension(filepath.Ext(requestedPath))
+ w.Header().Set("Content-Type", contentType)
+ _, err = io.Copy(w, f)
+ return err
+}
diff --git a/backend/internal/api/handler/schema.go b/backend/internal/api/handler/schema.go
new file mode 100644
index 00000000..2fb37b50
--- /dev/null
+++ b/backend/internal/api/handler/schema.go
@@ -0,0 +1,108 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/fs"
+ "net/http"
+ "strings"
+
+ "npm/embed"
+ "npm/internal/api/schema"
+ "npm/internal/config"
+ "npm/internal/logger"
+
+ jsref "github.com/jc21/jsref"
+ "github.com/jc21/jsref/provider"
+)
+
+var (
+ swaggerSchema []byte
+ apiDocsSub fs.FS
+)
+
+// Schema simply reads the swagger schema from disk and returns is raw
+// Route: GET /schema
+func Schema() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/json; charset=utf-8")
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprint(w, string(getSchema()))
+ }
+}
+
+func getSchema() []byte {
+ if swaggerSchema == nil {
+ apiDocsSub, _ = fs.Sub(embed.APIDocFiles, "api_docs")
+
+ // nolint:gosec
+ swaggerSchema, _ = fs.ReadFile(apiDocsSub, "api.swagger.json")
+
+ // Replace {{VERSION}} with Config Version
+ swaggerSchema = []byte(strings.ReplaceAll(string(swaggerSchema), "{{VERSION}}", config.Version))
+
+ // Dereference the JSON Schema:
+ var schema interface{}
+ if err := json.Unmarshal(swaggerSchema, &schema); err != nil {
+ logger.Error("SwaggerUnmarshalError", err)
+ return nil
+ }
+
+ provider := provider.NewIoFS(apiDocsSub, "")
+ resolver := jsref.New()
+ err := resolver.AddProvider(provider)
+ if err != nil {
+ logger.Error("SchemaProviderError", err)
+ }
+
+ result, err := resolver.Resolve(schema, "", []jsref.Option{jsref.WithRecursiveResolution(true)}...)
+ if err != nil {
+ logger.Error("SwaggerResolveError", err)
+ } else {
+ var marshalErr error
+ swaggerSchema, marshalErr = json.MarshalIndent(result, "", " ")
+ if marshalErr != nil {
+ logger.Error("SwaggerMarshalError", err)
+ }
+ }
+ // End dereference
+
+ // Replace incoming schemas with those we actually use in code
+ swaggerSchema = replaceIncomingSchemas(swaggerSchema)
+ }
+ return swaggerSchema
+}
+
+func replaceIncomingSchemas(swaggerSchema []byte) []byte {
+ str := string(swaggerSchema)
+
+ // Remember to include the double quotes in the replacement!
+ str = strings.ReplaceAll(str, `"{{schema.SetAuth}}"`, schema.SetAuth())
+ str = strings.ReplaceAll(str, `"{{schema.GetToken}}"`, schema.GetToken())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateCertificateAuthority}}"`, schema.CreateCertificateAuthority())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateCertificateAuthority}}"`, schema.UpdateCertificateAuthority())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateCertificate}}"`, schema.CreateCertificate())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateCertificate}}"`, schema.UpdateCertificate(""))
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateSetting}}"`, schema.CreateSetting())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateSetting}}"`, schema.UpdateSetting())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateUser}}"`, schema.CreateUser())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateUser}}"`, schema.UpdateUser())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateHost}}"`, schema.CreateHost())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateHost}}"`, schema.UpdateHost())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateHostTemplate}}"`, schema.CreateHostTemplate())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateHostTemplate}}"`, schema.UpdateHostTemplate())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateStream}}"`, schema.CreateStream())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateStream}}"`, schema.UpdateStream())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateDNSProvider}}"`, schema.CreateDNSProvider())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateDNSProvider}}"`, schema.UpdateDNSProvider())
+
+ return []byte(str)
+}
diff --git a/backend/internal/api/handler/settings.go b/backend/internal/api/handler/settings.go
new file mode 100644
index 00000000..b0e1d7ac
--- /dev/null
+++ b/backend/internal/api/handler/settings.go
@@ -0,0 +1,98 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/setting"
+
+ "github.com/go-chi/chi"
+)
+
+// GetSettings will return a list of Settings
+// Route: GET /settings
+func GetSettings() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ settings, err := setting.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, settings)
+ }
+ }
+}
+
+// GetSetting will return a single Setting
+// Route: GET /settings/{name}
+func GetSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ name := chi.URLParam(r, "name")
+
+ sett, err := setting.GetByName(name)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, sett)
+ }
+ }
+}
+
+// CreateSetting will create a Setting
+// Route: POST /settings
+func CreateSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newSetting setting.Model
+ err := json.Unmarshal(bodyBytes, &newSetting)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = newSetting.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Setting: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newSetting)
+ }
+}
+
+// UpdateSetting updates a setting
+// Route: PUT /settings/{name}
+func UpdateSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ settingName := chi.URLParam(r, "name")
+
+ setting, err := setting.GetByName(settingName)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &setting)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = setting.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, setting)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/streams.go b/backend/internal/api/handler/streams.go
new file mode 100644
index 00000000..17c668e4
--- /dev/null
+++ b/backend/internal/api/handler/streams.go
@@ -0,0 +1,129 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/stream"
+)
+
+// GetStreams will return a list of Streams
+// Route: GET /hosts/streams
+func GetStreams() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hosts, err := stream.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hosts)
+ }
+ }
+}
+
+// GetStream will return a single Streams
+// Route: GET /hosts/streams/{hostID}
+func GetStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := stream.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, host)
+ }
+ }
+}
+
+// CreateStream will create a Stream
+// Route: POST /hosts/steams
+func CreateStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newHost stream.Model
+ err := json.Unmarshal(bodyBytes, &newHost)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ newHost.UserID = userID
+
+ if err = newHost.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Stream: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newHost)
+ }
+}
+
+// UpdateStream updates a stream
+// Route: PUT /hosts/streams/{hostID}
+func UpdateStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := stream.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &host)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = host.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, host)
+ }
+ }
+}
+
+// DeleteStream removes a stream
+// Route: DELETE /hosts/streams/{hostID}
+func DeleteStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID int
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := stream.GetByID(hostID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, host.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/tokens.go b/backend/internal/api/handler/tokens.go
new file mode 100644
index 00000000..2f317a92
--- /dev/null
+++ b/backend/internal/api/handler/tokens.go
@@ -0,0 +1,89 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+ h "npm/internal/api/http"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "time"
+
+ c "npm/internal/api/context"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/user"
+ njwt "npm/internal/jwt"
+)
+
+// tokenPayload is the structure we expect from a incoming login request
+type tokenPayload struct {
+ Type string `json:"type"`
+ Identity string `json:"identity"`
+ Secret string `json:"secret"`
+}
+
+// NewToken Also known as a Login, requesting a new token with credentials
+// Route: POST /tokens
+func NewToken() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ // Read the bytes from the body
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var payload tokenPayload
+ err := json.Unmarshal(bodyBytes, &payload)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Find user
+ userObj, userErr := user.GetByEmail(payload.Identity)
+ if userErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ if userObj.IsDisabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, errors.ErrUserDisabled.Error(), nil)
+ return
+ }
+
+ // Get Auth
+ authObj, authErr := auth.GetByUserIDType(userObj.ID, payload.Type)
+ if authErr != nil {
+ logger.Debug("%s: %s", errors.ErrInvalidLogin.Error(), authErr.Error())
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ // Verify Auth
+ validateErr := authObj.ValidateSecret(payload.Secret)
+ if validateErr != nil {
+ logger.Debug("%s: %s", errors.ErrInvalidLogin.Error(), validateErr.Error())
+ // Sleep for 1 second to prevent brute force password guessing
+ time.Sleep(time.Second)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ if response, err := njwt.Generate(&userObj); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+ }
+}
+
+// RefreshToken an existing token by given them a new one with the same claims
+// Route: GET /tokens
+func RefreshToken() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ // TODO: Use your own methods to verify an existing user is
+ // able to refresh their token and then give them a new one
+ userObj, _ := user.GetByEmail("jc@jc21.com")
+ if response, err := njwt.Generate(&userObj); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/users.go b/backend/internal/api/handler/users.go
new file mode 100644
index 00000000..192ff7b2
--- /dev/null
+++ b/backend/internal/api/handler/users.go
@@ -0,0 +1,235 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/config"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ "npm/internal/logger"
+
+ "github.com/go-chi/chi"
+)
+
+// GetUsers returns all users
+// Route: GET /users
+func GetUsers() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ users, err := user.List(pageInfo, middleware.GetFiltersFromContext(r), getExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, users)
+ }
+ }
+}
+
+// GetUser returns a specific user
+// Route: GET /users/{userID}
+func GetUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ userID, _, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ userObject, err := user.GetByID(userID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ // nolint: errcheck,gosec
+ userObject.Expand(getExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, userObject)
+ }
+ }
+}
+
+// UpdateUser updates a user
+// Route: PUT /users/{userID}
+func UpdateUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ userID, self, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ userObject, err := user.GetByID(userID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ // nolint: errcheck,gosec
+ userObject.Expand([]string{"capabilities"})
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &userObject)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if userObject.IsDisabled && self {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "You cannot disable yourself!", nil)
+ return
+ }
+
+ if err = userObject.Save(); err != nil {
+ if err == errors.ErrDuplicateEmailUser || err == errors.ErrSystemUserReadonly {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ logger.Error("UpdateUserError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save User", nil)
+ }
+ return
+ }
+
+ if !self {
+ err = userObject.SaveCapabilities()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+ }
+
+ // nolint: errcheck,gosec
+ userObject.Expand(getExpandFromContext(r))
+
+ h.ResultResponseJSON(w, r, http.StatusOK, userObject)
+ }
+ }
+}
+
+// DeleteUser removes a user
+// Route: DELETE /users/{userID}
+func DeleteUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var userID int
+ var err error
+ if userID, err = getURLParamInt(r, "userID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ myUserID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+ if myUserID == userID {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "You cannot delete yourself!", nil)
+ return
+ }
+
+ user, err := user.GetByID(userID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, user.Delete())
+ }
+ }
+}
+
+// CreateUser creates a user
+// Route: POST /users
+func CreateUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newUser user.Model
+ err := json.Unmarshal(bodyBytes, &newUser)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = newUser.Save(); err != nil {
+ if err == errors.ErrDuplicateEmailUser || err == errors.ErrSystemUserReadonly {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ logger.Error("UpdateUserError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save User", nil)
+ }
+ return
+ }
+
+ // Set the permissions to full-admin for this user
+ if !config.IsSetup {
+ newUser.Capabilities = []string{user.CapabilityFullAdmin}
+ }
+
+ // nolint: errcheck,gosec
+ err = newUser.SaveCapabilities()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ // newUser has been saved, now save their auth
+ if newUser.Auth.Secret != "" && newUser.Auth.ID == 0 {
+ newUser.Auth.UserID = newUser.ID
+ if newUser.Auth.Type == auth.TypePassword {
+ err = newUser.Auth.SetPassword(newUser.Auth.Secret)
+ if err != nil {
+ logger.Error("SetPasswordError", err)
+ }
+ }
+
+ if err = newUser.Auth.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save Authentication for User", nil)
+ return
+ }
+
+ newUser.Auth.Secret = ""
+ }
+
+ if !config.IsSetup {
+ config.IsSetup = true
+ logger.Info("A new user was created, leaving Setup Mode")
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newUser)
+ }
+}
+
+// DeleteUsers is only available in debug mode for cypress tests
+// Route: DELETE /users
+func DeleteUsers() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ err := user.DeleteAll()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ // also change setup to true
+ config.IsSetup = false
+ logger.Info("Users have been wiped, entering Setup Mode")
+ h.ResultResponseJSON(w, r, http.StatusOK, true)
+ }
+ }
+}
+
+func getUserIDFromRequest(r *http.Request) (int, bool, error) {
+ userIDstr := chi.URLParam(r, "userID")
+ selfUserID, _ := r.Context().Value(c.UserIDCtxKey).(int)
+
+ var userID int
+ self := false
+ if userIDstr == "me" {
+ // Get user id from Token
+ userID = selfUserID
+ self = true
+ } else {
+ var userIDerr error
+ if userID, userIDerr = getURLParamInt(r, "userID"); userIDerr != nil {
+ return 0, false, userIDerr
+ }
+ self = selfUserID == userID
+ }
+ return userID, self, nil
+}
diff --git a/backend/internal/api/http/requests.go b/backend/internal/api/http/requests.go
new file mode 100644
index 00000000..3e7103c0
--- /dev/null
+++ b/backend/internal/api/http/requests.go
@@ -0,0 +1,46 @@
+package http
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+
+ "github.com/qri-io/jsonschema"
+)
+
+var (
+ // ErrInvalidJSON is an error for invalid json
+ ErrInvalidJSON = errors.New("JSON is invalid")
+ // ErrInvalidPayload is an error for invalid incoming data
+ ErrInvalidPayload = errors.New("Payload is invalid")
+)
+
+// ValidateRequestSchema takes a Schema and the Content to validate against it
+func ValidateRequestSchema(schema string, requestBody []byte) ([]jsonschema.KeyError, error) {
+ var jsonErrors []jsonschema.KeyError
+ var schemaBytes = []byte(schema)
+
+ // Make sure the body is valid JSON
+ if !isJSON(requestBody) {
+ return jsonErrors, ErrInvalidJSON
+ }
+
+ rs := &jsonschema.Schema{}
+ if err := json.Unmarshal(schemaBytes, rs); err != nil {
+ return jsonErrors, err
+ }
+
+ var validationErr error
+ ctx := context.TODO()
+ if jsonErrors, validationErr = rs.ValidateBytes(ctx, requestBody); len(jsonErrors) > 0 {
+ return jsonErrors, validationErr
+ }
+
+ // Valid
+ return nil, nil
+}
+
+func isJSON(bytes []byte) bool {
+ var js map[string]interface{}
+ return json.Unmarshal(bytes, &js) == nil
+}
diff --git a/backend/internal/api/http/responses.go b/backend/internal/api/http/responses.go
new file mode 100644
index 00000000..c65736d3
--- /dev/null
+++ b/backend/internal/api/http/responses.go
@@ -0,0 +1,91 @@
+package http
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "reflect"
+
+ c "npm/internal/api/context"
+ "npm/internal/errors"
+ "npm/internal/logger"
+
+ "github.com/qri-io/jsonschema"
+)
+
+// Response interface for standard API results
+type Response struct {
+ Result interface{} `json:"result"`
+ Error interface{} `json:"error,omitempty"`
+}
+
+// ErrorResponse interface for errors returned via the API
+type ErrorResponse struct {
+ Code interface{} `json:"code"`
+ Message interface{} `json:"message"`
+ Invalid interface{} `json:"invalid,omitempty"`
+}
+
+// ResultResponseJSON will write the result as json to the http output
+func ResultResponseJSON(w http.ResponseWriter, r *http.Request, status int, result interface{}) {
+ w.Header().Set("Content-Type", "application/json; charset=utf-8")
+ w.WriteHeader(status)
+
+ var response Response
+ resultClass := fmt.Sprintf("%v", reflect.TypeOf(result))
+
+ if resultClass == "http.ErrorResponse" {
+ response = Response{
+ Error: result,
+ }
+ } else {
+ response = Response{
+ Result: result,
+ }
+ }
+
+ var payload []byte
+ var err error
+ if getPrettyPrintFromContext(r) {
+ payload, err = json.MarshalIndent(response, "", " ")
+ } else {
+ payload, err = json.Marshal(response)
+ }
+
+ if err != nil {
+ logger.Error("ResponseMarshalError", err)
+ }
+
+ fmt.Fprint(w, string(payload))
+}
+
+// ResultSchemaErrorJSON will format the result as a standard error object and send it for output
+func ResultSchemaErrorJSON(w http.ResponseWriter, r *http.Request, errs []jsonschema.KeyError) {
+ errorResponse := ErrorResponse{
+ Code: http.StatusBadRequest,
+ Message: errors.ErrValidationFailed,
+ Invalid: errs,
+ }
+
+ ResultResponseJSON(w, r, http.StatusBadRequest, errorResponse)
+}
+
+// ResultErrorJSON will format the result as a standard error object and send it for output
+func ResultErrorJSON(w http.ResponseWriter, r *http.Request, status int, message string, extended interface{}) {
+ errorResponse := ErrorResponse{
+ Code: status,
+ Message: message,
+ Invalid: extended,
+ }
+
+ ResultResponseJSON(w, r, status, errorResponse)
+}
+
+// getPrettyPrintFromContext returns the PrettyPrint setting
+func getPrettyPrintFromContext(r *http.Request) bool {
+ pretty, ok := r.Context().Value(c.PrettyPrintCtxKey).(bool)
+ if !ok {
+ return false
+ }
+ return pretty
+}
diff --git a/backend/internal/api/middleware/access_control.go b/backend/internal/api/middleware/access_control.go
new file mode 100644
index 00000000..18bca31b
--- /dev/null
+++ b/backend/internal/api/middleware/access_control.go
@@ -0,0 +1,13 @@
+package middleware
+
+import (
+ "net/http"
+)
+
+// AccessControl sets http headers for responses
+func AccessControl(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ next.ServeHTTP(w, r)
+ })
+}
diff --git a/backend/internal/api/middleware/auth.go b/backend/internal/api/middleware/auth.go
new file mode 100644
index 00000000..65249f0b
--- /dev/null
+++ b/backend/internal/api/middleware/auth.go
@@ -0,0 +1,94 @@
+package middleware
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/config"
+ "npm/internal/entity/user"
+ njwt "npm/internal/jwt"
+ "npm/internal/logger"
+ "npm/internal/util"
+
+ "github.com/go-chi/jwtauth"
+)
+
+// DecodeAuth decodes an auth header
+func DecodeAuth() func(http.Handler) http.Handler {
+ privateKey, privateKeyParseErr := njwt.GetPrivateKey()
+ if privateKeyParseErr != nil && privateKey == nil {
+ logger.Error("PrivateKeyParseError", privateKeyParseErr)
+ }
+
+ publicKey, publicKeyParseErr := njwt.GetPublicKey()
+ if publicKeyParseErr != nil && publicKey == nil {
+ logger.Error("PublicKeyParseError", publicKeyParseErr)
+ }
+
+ tokenAuth := jwtauth.New("RS256", privateKey, publicKey)
+ return jwtauth.Verifier(tokenAuth)
+}
+
+// Enforce is a authentication middleware to enforce access from the
+// jwtauth.Verifier middleware request context values. The Authenticator sends a 401 Unauthorised
+// response for any unverified tokens and passes the good ones through.
+func Enforce(permission string) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+
+ if config.IsSetup {
+ token, claims, err := jwtauth.FromContext(ctx)
+
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, err.Error(), nil)
+ return
+ }
+
+ userID := int(claims["uid"].(float64))
+ _, enabled := user.IsEnabled(userID)
+ if token == nil || !token.Valid || !enabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "Unauthorised", nil)
+ return
+ }
+
+ // Check if permissions exist for this user
+ if permission != "" {
+ // Since the permission that we require is not on the token, we have to get it from the DB
+ // So we don't go crazy with hits, we will use a memory cache
+ cacheKey := fmt.Sprintf("userCapabilties.%v", userID)
+ cacheItem, found := AuthCache.Get(cacheKey)
+
+ var userCapabilities []string
+ if found {
+ userCapabilities = cacheItem.([]string)
+ } else {
+ // Get from db and store it
+ userCapabilities, err = user.GetCapabilities(userID)
+ if err != nil {
+ AuthCacheSet(cacheKey, userCapabilities)
+ }
+ }
+
+ // Now check that they have the permission in their admin capabilities
+ // full-admin can do anything
+ if !util.SliceContainsItem(userCapabilities, user.CapabilityFullAdmin) && !util.SliceContainsItem(userCapabilities, permission) {
+ // Access denied
+ logger.Debug("User has: %+v but needs %s", userCapabilities, permission)
+ h.ResultErrorJSON(w, r, http.StatusForbidden, "Forbidden", nil)
+ return
+ }
+ }
+
+ // Add claims to context
+ ctx = context.WithValue(ctx, c.UserIDCtxKey, userID)
+ }
+
+ // Token is authenticated, continue as normal
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/auth_cache.go b/backend/internal/api/middleware/auth_cache.go
new file mode 100644
index 00000000..66becfe7
--- /dev/null
+++ b/backend/internal/api/middleware/auth_cache.go
@@ -0,0 +1,23 @@
+package middleware
+
+import (
+ "time"
+
+ "npm/internal/logger"
+
+ cache "github.com/patrickmn/go-cache"
+)
+
+// AuthCache is a cache item that stores the Admin API data for each admin that has been requesting endpoints
+var AuthCache *cache.Cache
+
+// AuthCacheInit will create a new Memory Cache
+func AuthCacheInit() {
+ logger.Debug("Creating a new AuthCache")
+ AuthCache = cache.New(1*time.Minute, 5*time.Minute)
+}
+
+// AuthCacheSet will store the item in memory for the expiration time
+func AuthCacheSet(k string, x interface{}) {
+ AuthCache.Set(k, x, cache.DefaultExpiration)
+}
diff --git a/backend/internal/api/middleware/body_context.go b/backend/internal/api/middleware/body_context.go
new file mode 100644
index 00000000..68cfaa08
--- /dev/null
+++ b/backend/internal/api/middleware/body_context.go
@@ -0,0 +1,26 @@
+package middleware
+
+import (
+ "context"
+ "io/ioutil"
+ "net/http"
+
+ c "npm/internal/api/context"
+)
+
+// BodyContext simply adds the body data to a context item
+func BodyContext() func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Grab the Body Data
+ var body []byte
+ if r.Body != nil {
+ body, _ = ioutil.ReadAll(r.Body)
+ }
+ // Add it to the context
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, c.BodyCtxKey, body)
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/cors.go b/backend/internal/api/middleware/cors.go
new file mode 100644
index 00000000..f2de6d72
--- /dev/null
+++ b/backend/internal/api/middleware/cors.go
@@ -0,0 +1,88 @@
+package middleware
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/go-chi/chi"
+)
+
+var methodMap = []string{
+ http.MethodGet,
+ http.MethodHead,
+ http.MethodPost,
+ http.MethodPut,
+ http.MethodPatch,
+ http.MethodDelete,
+ http.MethodConnect,
+ http.MethodTrace,
+}
+
+func getRouteMethods(routes chi.Router, path string) []string {
+ var methods []string
+ tctx := chi.NewRouteContext()
+ for _, method := range methodMap {
+ if routes.Match(tctx, method, path) {
+ methods = append(methods, method)
+ }
+ }
+ return methods
+}
+
+var headersAllowedByCORS = []string{
+ "Authorization",
+ "Host",
+ "Content-Type",
+ "Connection",
+ "User-Agent",
+ "Cache-Control",
+ "Accept-Encoding",
+ "X-Jumbo-AppKey",
+ "X-Jumbo-SKey",
+ "X-Jumbo-SV",
+ "X-Jumbo-Timestamp",
+ "X-Jumbo-Version",
+ "X-Jumbo-Customer-Id",
+}
+
+// Cors handles cors headers
+func Cors(routes chi.Router) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ methods := getRouteMethods(routes, r.URL.Path)
+ if len(methods) == 0 {
+ // no route no cors
+ next.ServeHTTP(w, r)
+ return
+ }
+ methods = append(methods, http.MethodOptions)
+ w.Header().Set("Access-Control-Allow-Methods", strings.Join(methods, ","))
+ w.Header().Set("Access-Control-Allow-Headers",
+ strings.Join(headersAllowedByCORS, ","),
+ )
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+// Options handles options requests
+func Options(routes chi.Router) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ methods := getRouteMethods(routes, r.URL.Path)
+ if len(methods) == 0 {
+ // no route shouldn't have options
+ next.ServeHTTP(w, r)
+ return
+ }
+ if r.Method == http.MethodOptions {
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ w.Header().Set("Content-Type", "application/json")
+ fmt.Fprint(w, "{}")
+ return
+ }
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/enforce_setup.go b/backend/internal/api/middleware/enforce_setup.go
new file mode 100644
index 00000000..3c75ccd9
--- /dev/null
+++ b/backend/internal/api/middleware/enforce_setup.go
@@ -0,0 +1,28 @@
+package middleware
+
+import (
+ "fmt"
+ "net/http"
+
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+// EnforceSetup will error if the config setup doesn't match what is required
+func EnforceSetup(shouldBeSetup bool) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if config.IsSetup != shouldBeSetup {
+ state := "during"
+ if config.IsSetup {
+ state = "after"
+ }
+ h.ResultErrorJSON(w, r, http.StatusForbidden, fmt.Sprintf("Not available %s setup phase", state), nil)
+ return
+ }
+
+ // All good
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/expansion.go b/backend/internal/api/middleware/expansion.go
new file mode 100644
index 00000000..871bd27e
--- /dev/null
+++ b/backend/internal/api/middleware/expansion.go
@@ -0,0 +1,24 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+ "strings"
+
+ c "npm/internal/api/context"
+)
+
+// Expansion will determine whether the request should have objects expanded
+// with ?expand=1 or ?expand=true
+func Expansion(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ expandStr := r.URL.Query().Get("expand")
+ if expandStr != "" {
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, c.ExpansionCtxKey, strings.Split(expandStr, ","))
+ next.ServeHTTP(w, r.WithContext(ctx))
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+}
diff --git a/backend/internal/api/middleware/filters.go b/backend/internal/api/middleware/filters.go
new file mode 100644
index 00000000..885defef
--- /dev/null
+++ b/backend/internal/api/middleware/filters.go
@@ -0,0 +1,115 @@
+package middleware
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/model"
+ "npm/internal/util"
+ "strings"
+
+ "github.com/qri-io/jsonschema"
+)
+
+// Filters will accept a pre-defined schemaData to validate against the GET query params
+// passed in to this endpoint. This will ensure that the filters are not injecting SQL.
+// After we have determined what the Filters are to be, they are saved on the Context
+// to be used later in other endpoints.
+func Filters(schemaData string) func(http.Handler) http.Handler {
+ reservedFilterKeys := []string{
+ "limit",
+ "offset",
+ "sort",
+ "order",
+ "expand",
+ "t", // This is used as a timestamp paramater in some clients and can be ignored
+ }
+
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ var filters []model.Filter
+ for key, val := range r.URL.Query() {
+ key = strings.ToLower(key)
+
+ // Split out the modifier from the field name and set a default modifier
+ var keyParts []string
+ keyParts = strings.Split(key, ":")
+ if len(keyParts) == 1 {
+ // Default modifier
+ keyParts = append(keyParts, "equals")
+ }
+
+ // Only use this filter if it's not a reserved get param
+ if !util.SliceContainsItem(reservedFilterKeys, keyParts[0]) {
+ for _, valItem := range val {
+ // Check that the val isn't empty
+ if len(strings.TrimSpace(valItem)) > 0 {
+ valSlice := []string{valItem}
+ if keyParts[1] == "in" || keyParts[1] == "notin" {
+ valSlice = strings.Split(valItem, ",")
+ }
+
+ filters = append(filters, model.Filter{
+ Field: keyParts[0],
+ Modifier: keyParts[1],
+ Value: valSlice,
+ })
+ }
+ }
+ }
+ }
+
+ // Only validate schema if there are filters to validate
+ if len(filters) > 0 {
+ ctx := r.Context()
+
+ // Marshal the Filters in to a JSON string so that the Schema Validation works against it
+ filterData, marshalErr := json.MarshalIndent(filters, "", " ")
+ if marshalErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", marshalErr), nil)
+ return
+ }
+
+ // Create root schema
+ rs := &jsonschema.Schema{}
+ if err := json.Unmarshal([]byte(schemaData), rs); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", err), nil)
+ return
+ }
+
+ // Validate it
+ errors, jsonError := rs.ValidateBytes(ctx, filterData)
+ if jsonError != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, jsonError.Error(), nil)
+ return
+ }
+
+ if len(errors) > 0 {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "Invalid Filters", errors)
+ return
+ }
+
+ ctx = context.WithValue(ctx, c.FiltersCtxKey, filters)
+ next.ServeHTTP(w, r.WithContext(ctx))
+
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+ }
+}
+
+// GetFiltersFromContext returns the Filters
+func GetFiltersFromContext(r *http.Request) []model.Filter {
+ filters, ok := r.Context().Value(c.FiltersCtxKey).([]model.Filter)
+ if !ok {
+ // the assertion failed
+ var emptyFilters []model.Filter
+ return emptyFilters
+ }
+ return filters
+}
diff --git a/backend/internal/api/middleware/pretty_print.go b/backend/internal/api/middleware/pretty_print.go
new file mode 100644
index 00000000..270d2a24
--- /dev/null
+++ b/backend/internal/api/middleware/pretty_print.go
@@ -0,0 +1,23 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+
+ c "npm/internal/api/context"
+)
+
+// PrettyPrint will determine whether the request should be pretty printed in output
+// with ?pretty=1 or ?pretty=true
+func PrettyPrint(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ prettyStr := r.URL.Query().Get("pretty")
+ if prettyStr == "1" || prettyStr == "true" {
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, c.PrettyPrintCtxKey, true)
+ next.ServeHTTP(w, r.WithContext(ctx))
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+}
diff --git a/backend/internal/api/middleware/schema.go b/backend/internal/api/middleware/schema.go
new file mode 100644
index 00000000..3254e042
--- /dev/null
+++ b/backend/internal/api/middleware/schema.go
@@ -0,0 +1,55 @@
+package middleware
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+
+ "github.com/qri-io/jsonschema"
+)
+
+// CheckRequestSchema checks the payload against schema
+func CheckRequestSchema(ctx context.Context, schemaData string, payload []byte) ([]jsonschema.KeyError, error) {
+ // Create root schema
+ rs := &jsonschema.Schema{}
+ if err := json.Unmarshal([]byte(schemaData), rs); err != nil {
+ return nil, fmt.Errorf("Schema Fatal: %v", err)
+ }
+
+ // Validate it
+ schemaErrors, jsonError := rs.ValidateBytes(ctx, payload)
+ if jsonError != nil {
+ return nil, jsonError
+ }
+
+ return schemaErrors, nil
+}
+
+// EnforceRequestSchema accepts a schema and validates the request body against it
+func EnforceRequestSchema(schemaData string) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+
+ // Get content from context
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ schemaErrors, err := CheckRequestSchema(r.Context(), schemaData, bodyBytes)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ if len(schemaErrors) > 0 {
+ h.ResultSchemaErrorJSON(w, r, schemaErrors)
+ return
+ }
+
+ // All good
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/router.go b/backend/internal/api/router.go
new file mode 100644
index 00000000..27bc32bd
--- /dev/null
+++ b/backend/internal/api/router.go
@@ -0,0 +1,198 @@
+package api
+
+import (
+ "net/http"
+ "time"
+
+ "npm/internal/api/handler"
+ "npm/internal/api/middleware"
+ "npm/internal/api/schema"
+ "npm/internal/config"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/entity/host"
+ "npm/internal/entity/hosttemplate"
+ "npm/internal/entity/setting"
+ "npm/internal/entity/stream"
+ "npm/internal/entity/user"
+ "npm/internal/logger"
+
+ "github.com/go-chi/chi"
+ chiMiddleware "github.com/go-chi/chi/middleware"
+ "github.com/go-chi/cors"
+)
+
+// NewRouter returns a new router object
+func NewRouter() http.Handler {
+ // Cors
+ cors := cors.New(cors.Options{
+ AllowedOrigins: []string{"*"},
+ AllowedMethods: []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"},
+ AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "X-Requested-With"},
+ AllowCredentials: true,
+ MaxAge: 300,
+ })
+
+ r := chi.NewRouter()
+ r.Use(
+ middleware.AccessControl,
+ middleware.Cors(r),
+ middleware.Options(r),
+ cors.Handler,
+ chiMiddleware.RealIP,
+ chiMiddleware.Recoverer,
+ chiMiddleware.Throttle(5),
+ chiMiddleware.Timeout(30*time.Second),
+ middleware.PrettyPrint,
+ middleware.Expansion,
+ middleware.DecodeAuth(),
+ middleware.BodyContext(),
+ )
+
+ return applyRoutes(r)
+}
+
+// applyRoutes is where the magic happens
+func applyRoutes(r chi.Router) chi.Router {
+ middleware.AuthCacheInit()
+ r.NotFound(handler.NotFound())
+ r.MethodNotAllowed(handler.NotAllowed())
+
+ // API
+ r.Route("/api", func(r chi.Router) {
+ r.Get("/", handler.Health())
+ r.Get("/schema", handler.Schema())
+ r.With(middleware.EnforceSetup(true), middleware.Enforce("")).
+ Get("/config", handler.Config())
+
+ // Tokens
+ r.With(middleware.EnforceSetup(true)).Route("/tokens", func(r chi.Router) {
+ r.With(middleware.EnforceRequestSchema(schema.GetToken())).
+ Post("/", handler.NewToken())
+ r.With(middleware.Enforce("")).
+ Get("/", handler.RefreshToken())
+ })
+
+ // Users
+ r.Route("/users", func(r chi.Router) {
+ r.With(middleware.EnforceSetup(true), middleware.Enforce("")).Get("/{userID:(?:me)}", handler.GetUser())
+ r.With(middleware.EnforceSetup(true), middleware.Enforce(user.CapabilityUsersManage)).Get("/{userID:(?:[0-9]+)}", handler.GetUser())
+
+ r.With(middleware.EnforceSetup(true), middleware.Enforce(user.CapabilityUsersManage)).Delete("/{userID:(?:[0-9]+|me)}", handler.DeleteUser())
+ r.With(middleware.EnforceSetup(true), middleware.Enforce(user.CapabilityUsersManage)).With(middleware.Filters(user.GetFilterSchema())).
+ Get("/", handler.GetUsers())
+ r.With(middleware.EnforceRequestSchema(schema.CreateUser()), middleware.Enforce(user.CapabilityUsersManage)).
+ Post("/", handler.CreateUser())
+
+ r.With(middleware.EnforceSetup(true)).With(middleware.EnforceRequestSchema(schema.UpdateUser()), middleware.Enforce("")).
+ Put("/{userID:(?:me)}", handler.UpdateUser())
+ r.With(middleware.EnforceSetup(true)).With(middleware.EnforceRequestSchema(schema.UpdateUser()), middleware.Enforce(user.CapabilityUsersManage)).
+ Put("/{userID:(?:[0-9]+)}", handler.UpdateUser())
+
+ // Auth
+ r.With(middleware.EnforceSetup(true)).With(middleware.EnforceRequestSchema(schema.SetAuth()), middleware.Enforce("")).
+ Post("/{userID:(?:me)}/auth", handler.SetAuth())
+ r.With(middleware.EnforceSetup(true)).With(middleware.EnforceRequestSchema(schema.SetAuth()), middleware.Enforce(user.CapabilityUsersManage)).
+ Post("/{userID:(?:[0-9]+)}/auth", handler.SetAuth())
+ })
+
+ // Only available in debug mode: delete users without auth
+ if config.GetLogLevel() == logger.DebugLevel {
+ r.Delete("/users", handler.DeleteUsers())
+ }
+
+ // Settings
+ r.With(middleware.EnforceSetup(true), middleware.Enforce(user.CapabilitySettingsManage)).Route("/settings", func(r chi.Router) {
+ r.With(middleware.Filters(setting.GetFilterSchema())).
+ Get("/", handler.GetSettings())
+ r.Get("/{name}", handler.GetSetting())
+ r.With(middleware.EnforceRequestSchema(schema.CreateSetting())).
+ Post("/", handler.CreateSetting())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateSetting())).
+ Put("/{name}", handler.UpdateSetting())
+ })
+
+ // DNS Providers
+ r.With(middleware.EnforceSetup(true)).Route("/dns-providers", func(r chi.Router) {
+ r.With(middleware.Filters(dnsprovider.GetFilterSchema()), middleware.Enforce(user.CapabilityDNSProvidersView)).
+ Get("/", handler.GetDNSProviders())
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersView)).Get("/{providerID:[0-9]+}", handler.GetDNSProvider())
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersManage)).Delete("/{providerID:[0-9]+}", handler.DeleteDNSProvider())
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersManage)).With(middleware.EnforceRequestSchema(schema.CreateDNSProvider())).
+ Post("/", handler.CreateDNSProvider())
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersManage)).With(middleware.EnforceRequestSchema(schema.UpdateDNSProvider())).
+ Put("/{providerID:[0-9]+}", handler.UpdateDNSProvider())
+
+ r.With(middleware.EnforceSetup(true), middleware.Enforce(user.CapabilityDNSProvidersView)).Route("/acmesh", func(r chi.Router) {
+ r.Get("/{acmeshID:[a-z0-9_]+}", handler.GetAcmeshProvider())
+ r.Get("/", handler.GetAcmeshProviders())
+ })
+ })
+
+ // Certificate Authorities
+ r.With(middleware.EnforceSetup(true)).Route("/certificate-authorities", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesView), middleware.Filters(certificateauthority.GetFilterSchema())).
+ Get("/", handler.GetCertificateAuthorities())
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesView)).Get("/{caID:[0-9]+}", handler.GetCertificateAuthority())
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage)).Delete("/{caID:[0-9]+}", handler.DeleteCertificateAuthority())
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage)).With(middleware.EnforceRequestSchema(schema.CreateCertificateAuthority())).
+ Post("/", handler.CreateCertificateAuthority())
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage)).With(middleware.EnforceRequestSchema(schema.UpdateCertificateAuthority())).
+ Put("/{caID:[0-9]+}", handler.UpdateCertificateAuthority())
+ })
+
+ // Certificates
+ r.With(middleware.EnforceSetup(true)).Route("/certificates", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityCertificatesView), middleware.Filters(certificate.GetFilterSchema())).
+ Get("/", handler.GetCertificates())
+ r.With(middleware.Enforce(user.CapabilityCertificatesView)).Get("/{certificateID:[0-9]+}", handler.GetCertificate())
+ r.With(middleware.Enforce(user.CapabilityCertificatesManage)).Delete("/{certificateID:[0-9]+}", handler.DeleteCertificate())
+ r.With(middleware.Enforce(user.CapabilityCertificatesManage)).With(middleware.EnforceRequestSchema(schema.CreateCertificate())).
+ Post("/", handler.CreateCertificate())
+ /*
+ r.With(middleware.EnforceRequestSchema(schema.UpdateCertificate())).
+ Put("/{certificateID:[0-9]+}", handler.UpdateCertificate())
+ */
+ r.With(middleware.Enforce(user.CapabilityCertificatesManage)).Put("/{certificateID:[0-9]+}", handler.UpdateCertificate())
+ })
+
+ // Hosts
+ r.With(middleware.EnforceSetup(true)).Route("/hosts", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityHostsView), middleware.Filters(host.GetFilterSchema())).
+ Get("/", handler.GetHosts())
+ r.With(middleware.Enforce(user.CapabilityHostsView)).Get("/{hostID:[0-9]+}", handler.GetHost())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).Delete("/{hostID:[0-9]+}", handler.DeleteHost())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).With(middleware.EnforceRequestSchema(schema.CreateHost())).
+ Post("/", handler.CreateHost())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).With(middleware.EnforceRequestSchema(schema.UpdateHost())).
+ Put("/{hostID:[0-9]+}", handler.UpdateHost())
+ })
+
+ // Host Templates
+ r.With(middleware.EnforceSetup(true)).Route("/host-templates", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityHostTemplatesView), middleware.Filters(hosttemplate.GetFilterSchema())).
+ Get("/", handler.GetHostTemplates())
+ r.With(middleware.Enforce(user.CapabilityHostTemplatesView)).Get("/{templateID:[0-9]+}", handler.GetHostTemplates())
+ r.With(middleware.Enforce(user.CapabilityHostTemplatesManage)).Delete("/{templateID:[0-9]+}", handler.DeleteHostTemplate())
+ r.With(middleware.Enforce(user.CapabilityHostTemplatesManage)).With(middleware.EnforceRequestSchema(schema.CreateHostTemplate())).
+ Post("/", handler.CreateHostTemplate())
+ r.With(middleware.Enforce(user.CapabilityHostTemplatesManage)).With(middleware.EnforceRequestSchema(schema.UpdateHostTemplate())).
+ Put("/{templateID:[0-9]+}", handler.UpdateHostTemplate())
+ })
+
+ // Streams
+ r.With(middleware.EnforceSetup(true)).Route("/streams", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityStreamsView), middleware.Filters(stream.GetFilterSchema())).
+ Get("/", handler.GetStreams())
+ r.With(middleware.Enforce(user.CapabilityStreamsView)).Get("/{hostID:[0-9]+}", handler.GetStream())
+ r.With(middleware.Enforce(user.CapabilityStreamsManage)).Delete("/{hostID:[0-9]+}", handler.DeleteStream())
+ r.With(middleware.Enforce(user.CapabilityStreamsManage)).With(middleware.EnforceRequestSchema(schema.CreateStream())).
+ Post("/", handler.CreateStream())
+ r.With(middleware.Enforce(user.CapabilityStreamsManage)).With(middleware.EnforceRequestSchema(schema.UpdateStream())).
+ Put("/{hostID:[0-9]+}", handler.UpdateStream())
+ })
+ })
+
+ return r
+}
diff --git a/backend/internal/api/router_test.go b/backend/internal/api/router_test.go
new file mode 100644
index 00000000..78ec784f
--- /dev/null
+++ b/backend/internal/api/router_test.go
@@ -0,0 +1,44 @@
+package api
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "testing"
+
+ "npm/internal/config"
+
+ "github.com/stretchr/testify/assert"
+)
+
+var (
+ r = NewRouter()
+ version = "3.0.0"
+ commit = "abcdefgh"
+ sentryDSN = ""
+)
+
+// Tear up/down
+func TestMain(m *testing.M) {
+ config.Init(&version, &commit, &sentryDSN)
+ code := m.Run()
+ os.Exit(code)
+}
+
+func TestGetHealthz(t *testing.T) {
+ respRec := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/api/", nil)
+
+ r.ServeHTTP(respRec, req)
+ assert.Equal(t, http.StatusOK, respRec.Code)
+ assert.Contains(t, respRec.Body.String(), "healthy")
+}
+
+func TestNonExistent(t *testing.T) {
+ respRec := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/non-existent-endpoint", nil)
+
+ r.ServeHTTP(respRec, req)
+ assert.Equal(t, http.StatusNotFound, respRec.Code)
+ assert.Equal(t, respRec.Body.String(), `{"result":null,"error":{"code":404,"message":"Not found"}}`, "404 Message should match")
+}
diff --git a/backend/internal/api/schema/certificates.go b/backend/internal/api/schema/certificates.go
new file mode 100644
index 00000000..1326f6f1
--- /dev/null
+++ b/backend/internal/api/schema/certificates.go
@@ -0,0 +1,209 @@
+package schema
+
+import (
+ "fmt"
+
+ "npm/internal/entity/certificate"
+)
+
+// This validation is strictly for Custom certificates
+// and the combination of values that must be defined
+func createCertificateCustom() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, strictString("custom"), stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for HTTP certificates
+// and the combination of values that must be defined
+func createCertificateHTTP() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "certificate_authority_id",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "certificate_authority_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ },
+ "is_ecc": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 1
+ }
+ }
+ }`, strictString("http"), intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for DNS certificates
+// and the combination of values that must be defined
+func createCertificateDNS() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "certificate_authority_id",
+ "dns_provider_id",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "certificate_authority_id": %s,
+ "dns_provider_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ },
+ "is_ecc": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 1
+ }
+ }
+ }`, strictString("dns"), intMinOne, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for MKCERT certificates
+// and the combination of values that must be defined
+func createCertificateMkcert() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, strictString("mkcert"), stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateHTTP() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "certificate_authority_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateDNS() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "certificate_authority_id": %s,
+ "dns_provider_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, intMinOne, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateCustom() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateMkcert() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, stringMinMax(1, 100), domainNames())
+}
+
+// CreateCertificate is the schema for incoming data validation
+func CreateCertificate() string {
+ return fmt.Sprintf(`
+ {
+ "oneOf": [%s, %s, %s, %s]
+ }`, createCertificateHTTP(), createCertificateDNS(), createCertificateCustom(), createCertificateMkcert())
+}
+
+// UpdateCertificate is the schema for incoming data validation
+func UpdateCertificate(certificateType string) string {
+ switch certificateType {
+ case certificate.TypeHTTP:
+ return updateCertificateHTTP()
+ case certificate.TypeDNS:
+ return updateCertificateDNS()
+ case certificate.TypeCustom:
+ return updateCertificateCustom()
+ case certificate.TypeMkcert:
+ return updateCertificateMkcert()
+ default:
+ return fmt.Sprintf(`
+ {
+ "oneOf": [%s, %s, %s, %s]
+ }`, updateCertificateHTTP(), updateCertificateDNS(), updateCertificateCustom(), updateCertificateMkcert())
+ }
+}
diff --git a/backend/internal/api/schema/common.go b/backend/internal/api/schema/common.go
new file mode 100644
index 00000000..9313bcb6
--- /dev/null
+++ b/backend/internal/api/schema/common.go
@@ -0,0 +1,70 @@
+package schema
+
+import "fmt"
+
+func strictString(value string) string {
+ return fmt.Sprintf(`{
+ "type": "string",
+ "pattern": "^%s$"
+ }`, value)
+}
+
+const intMinOne = `
+{
+ "type": "integer",
+ "minimum": 1
+}
+`
+
+const boolean = `
+{
+ "type": "boolean"
+}
+`
+
+func stringMinMax(minLength, maxLength int) string {
+ return fmt.Sprintf(`{
+ "type": "string",
+ "minLength": %d,
+ "maxLength": %d
+ }`, minLength, maxLength)
+}
+
+func capabilties() string {
+ return `{
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 1
+ }
+ }`
+}
+
+func domainNames() string {
+ return fmt.Sprintf(`
+ {
+ "type": "array",
+ "minItems": 1,
+ "items": %s
+ }`, stringMinMax(4, 255))
+}
+
+const anyType = `
+{
+ "anyOf": [
+ {
+ "type": "array"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object"
+ },
+ {
+ "type": "integer"
+ }
+ ]
+}
+`
diff --git a/backend/internal/api/schema/create_certificate_authority.go b/backend/internal/api/schema/create_certificate_authority.go
new file mode 100644
index 00000000..113c2ce3
--- /dev/null
+++ b/backend/internal/api/schema/create_certificate_authority.go
@@ -0,0 +1,25 @@
+package schema
+
+import "fmt"
+
+// CreateCertificateAuthority is the schema for incoming data validation
+func CreateCertificateAuthority() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "acmesh_server",
+ "max_domains"
+ ],
+ "properties": {
+ "name": %s,
+ "acmesh_server": %s,
+ "max_domains": %s,
+ "ca_bundle": %s,
+ "is_wildcard_supported": %s
+ }
+ }
+ `, stringMinMax(1, 100), stringMinMax(2, 255), intMinOne, stringMinMax(2, 255), boolean)
+}
diff --git a/backend/internal/api/schema/create_dns_provider.go b/backend/internal/api/schema/create_dns_provider.go
new file mode 100644
index 00000000..03b2000d
--- /dev/null
+++ b/backend/internal/api/schema/create_dns_provider.go
@@ -0,0 +1,51 @@
+package schema
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/dnsproviders"
+ "npm/internal/util"
+)
+
+// CreateDNSProvider is the schema for incoming data validation
+func CreateDNSProvider() string {
+ allProviders := dnsproviders.GetAll()
+ fmtStr := fmt.Sprintf(`{"oneOf": [%s]}`, strings.TrimRight(strings.Repeat("\n%s,", len(allProviders)), ","))
+
+ allSchemasWrapped := make([]string, 0)
+ for providerName, provider := range allProviders {
+ allSchemasWrapped = append(allSchemasWrapped, createDNSProviderType(providerName, provider.Schema))
+ }
+
+ return fmt.Sprintf(fmtStr, util.ConvertStringSliceToInterface(allSchemasWrapped)...)
+}
+
+func createDNSProviderType(name, metaSchema string) string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "acmesh_name",
+ "name",
+ "meta"
+ ],
+ "properties": {
+ "acmesh_name": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "dns_sleep": {
+ "type": "integer"
+ },
+ "meta": %s
+ }
+ }
+ `, name, metaSchema)
+}
diff --git a/backend/internal/api/schema/create_host.go b/backend/internal/api/schema/create_host.go
new file mode 100644
index 00000000..0c448427
--- /dev/null
+++ b/backend/internal/api/schema/create_host.go
@@ -0,0 +1,80 @@
+package schema
+
+import "fmt"
+
+// CreateHost is the schema for incoming data validation
+// This schema supports 3 possible types with different data combinations:
+// - proxy
+// - redirection
+// - dead
+func CreateHost() string {
+ return fmt.Sprintf(`
+ {
+ "oneOf": [
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "domain_names",
+ "host_template_id"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^proxy$"
+ },
+ "host_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "listen_interface": %s,
+ "domain_names": %s,
+ "upstream_id": {
+ "type": "integer"
+ },
+ "certificate_id": {
+ "type": "integer"
+ },
+ "access_list_id": {
+ "type": "integer"
+ },
+ "ssl_forced": {
+ "type": "boolean"
+ },
+ "caching_enabled": {
+ "type": "boolean"
+ },
+ "block_exploits": {
+ "type": "boolean"
+ },
+ "allow_websocket_upgrade": {
+ "type": "boolean"
+ },
+ "http2_support": {
+ "type": "boolean"
+ },
+ "hsts_enabled": {
+ "type": "boolean"
+ },
+ "hsts_subdomains": {
+ "type": "boolean"
+ },
+ "paths": {
+ "type": "string"
+ },
+ "upstream_options": {
+ "type": "string"
+ },
+ "advanced_config": {
+ "type": "string"
+ },
+ "is_disabled": {
+ "type": "boolean"
+ }
+ }
+ }
+ ]
+ }
+ `, stringMinMax(0, 255), domainNames())
+}
diff --git a/backend/internal/api/schema/create_host_template.go b/backend/internal/api/schema/create_host_template.go
new file mode 100644
index 00000000..ebb0cf42
--- /dev/null
+++ b/backend/internal/api/schema/create_host_template.go
@@ -0,0 +1,30 @@
+package schema
+
+// CreateHostTemplate is the schema for incoming data validation
+func CreateHostTemplate() string {
+ return `
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "host_type",
+ "template"
+ ],
+ "properties": {
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "host_type": {
+ "type": "string",
+ "pattern": "^proxy|redirect|dead|stream$"
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+ }
+ `
+}
diff --git a/backend/internal/api/schema/create_setting.go b/backend/internal/api/schema/create_setting.go
new file mode 100644
index 00000000..dca3869c
--- /dev/null
+++ b/backend/internal/api/schema/create_setting.go
@@ -0,0 +1,21 @@
+package schema
+
+import "fmt"
+
+// CreateSetting is the schema for incoming data validation
+func CreateSetting() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "value"
+ ],
+ "properties": {
+ "name": %s,
+ "value": %s
+ }
+ }
+ `, stringMinMax(2, 100), anyType)
+}
diff --git a/backend/internal/api/schema/create_stream.go b/backend/internal/api/schema/create_stream.go
new file mode 100644
index 00000000..792b8818
--- /dev/null
+++ b/backend/internal/api/schema/create_stream.go
@@ -0,0 +1,27 @@
+package schema
+
+import "fmt"
+
+// CreateStream is the schema for incoming data validation
+func CreateStream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/create_user.go b/backend/internal/api/schema/create_user.go
new file mode 100644
index 00000000..ee17617a
--- /dev/null
+++ b/backend/internal/api/schema/create_user.go
@@ -0,0 +1,42 @@
+package schema
+
+import "fmt"
+
+// CreateUser is the schema for incoming data validation
+func CreateUser() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "email",
+ "is_disabled",
+ "capabilities"
+ ],
+ "properties": {
+ "name": %s,
+ "nickname": %s,
+ "email": %s,
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "auth": {
+ "type": "object",
+ "required": [
+ "type",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^password$"
+ },
+ "secret": %s
+ }
+ },
+ "capabilities": %s
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(2, 100), stringMinMax(5, 150), stringMinMax(8, 255), capabilties())
+}
diff --git a/backend/internal/api/schema/get_token.go b/backend/internal/api/schema/get_token.go
new file mode 100644
index 00000000..fe1a9502
--- /dev/null
+++ b/backend/internal/api/schema/get_token.go
@@ -0,0 +1,28 @@
+package schema
+
+import "fmt"
+
+// GetToken is the schema for incoming data validation
+// nolint: gosec
+func GetToken() string {
+ stdField := stringMinMax(1, 255)
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "identity",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^password$"
+ },
+ "identity": %s,
+ "secret": %s
+ }
+ }
+ `, stdField, stdField)
+}
diff --git a/backend/internal/api/schema/set_auth.go b/backend/internal/api/schema/set_auth.go
new file mode 100644
index 00000000..f2df26aa
--- /dev/null
+++ b/backend/internal/api/schema/set_auth.go
@@ -0,0 +1,25 @@
+package schema
+
+import "fmt"
+
+// SetAuth is the schema for incoming data validation
+func SetAuth() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^password$"
+ },
+ "secret": %s,
+ "current_secret": %s
+ }
+ }
+ `, stringMinMax(8, 225), stringMinMax(8, 225))
+}
diff --git a/backend/internal/api/schema/update_certificate_authority.go b/backend/internal/api/schema/update_certificate_authority.go
new file mode 100644
index 00000000..e53db5c2
--- /dev/null
+++ b/backend/internal/api/schema/update_certificate_authority.go
@@ -0,0 +1,21 @@
+package schema
+
+import "fmt"
+
+// UpdateCertificateAuthority is the schema for incoming data validation
+func UpdateCertificateAuthority() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "acmesh_server": %s,
+ "max_domains": %s,
+ "ca_bundle": %s,
+ "is_wildcard_supported": %s
+ }
+ }
+ `, stringMinMax(1, 100), stringMinMax(2, 255), intMinOne, stringMinMax(2, 255), boolean)
+}
diff --git a/backend/internal/api/schema/update_dns_provider.go b/backend/internal/api/schema/update_dns_provider.go
new file mode 100644
index 00000000..b852c88a
--- /dev/null
+++ b/backend/internal/api/schema/update_dns_provider.go
@@ -0,0 +1,20 @@
+package schema
+
+import "fmt"
+
+// UpdateDNSProvider is the schema for incoming data validation
+func UpdateDNSProvider() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(1, 100))
+}
diff --git a/backend/internal/api/schema/update_host.go b/backend/internal/api/schema/update_host.go
new file mode 100644
index 00000000..78d1322f
--- /dev/null
+++ b/backend/internal/api/schema/update_host.go
@@ -0,0 +1,27 @@
+package schema
+
+import "fmt"
+
+// UpdateHost is the schema for incoming data validation
+func UpdateHost() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "host_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/update_host_template.go b/backend/internal/api/schema/update_host_template.go
new file mode 100644
index 00000000..d51cf342
--- /dev/null
+++ b/backend/internal/api/schema/update_host_template.go
@@ -0,0 +1,22 @@
+package schema
+
+// UpdateHostTemplate is the schema for incoming data validation
+func UpdateHostTemplate() string {
+ return `
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+ }
+ `
+}
diff --git a/backend/internal/api/schema/update_setting.go b/backend/internal/api/schema/update_setting.go
new file mode 100644
index 00000000..e9af221b
--- /dev/null
+++ b/backend/internal/api/schema/update_setting.go
@@ -0,0 +1,17 @@
+package schema
+
+import "fmt"
+
+// UpdateSetting is the schema for incoming data validation
+func UpdateSetting() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "value": %s
+ }
+ }
+ `, anyType)
+}
diff --git a/backend/internal/api/schema/update_stream.go b/backend/internal/api/schema/update_stream.go
new file mode 100644
index 00000000..51d85ff6
--- /dev/null
+++ b/backend/internal/api/schema/update_stream.go
@@ -0,0 +1,23 @@
+package schema
+
+import "fmt"
+
+// UpdateStream is the schema for incoming data validation
+func UpdateStream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/update_user.go b/backend/internal/api/schema/update_user.go
new file mode 100644
index 00000000..5eb4fd8a
--- /dev/null
+++ b/backend/internal/api/schema/update_user.go
@@ -0,0 +1,23 @@
+package schema
+
+import "fmt"
+
+// UpdateUser is the schema for incoming data validation
+func UpdateUser() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "nickname": %s,
+ "email": %s,
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "capabilities": %s
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(2, 100), stringMinMax(5, 150), capabilties())
+}
diff --git a/backend/internal/api/server.go b/backend/internal/api/server.go
new file mode 100644
index 00000000..c64de44a
--- /dev/null
+++ b/backend/internal/api/server.go
@@ -0,0 +1,19 @@
+package api
+
+import (
+ "fmt"
+ "net/http"
+
+ "npm/internal/logger"
+)
+
+const httpPort = 3000
+
+// StartServer creates a http server
+func StartServer() {
+ logger.Info("Server starting on port %v", httpPort)
+ err := http.ListenAndServe(fmt.Sprintf(":%v", httpPort), NewRouter())
+ if err != nil {
+ logger.Error("HttpListenError", err)
+ }
+}
diff --git a/backend/internal/audit-log.js b/backend/internal/audit-log.js
deleted file mode 100644
index 422b4f46..00000000
--- a/backend/internal/audit-log.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const error = require('../lib/error');
-const auditLogModel = require('../models/audit-log');
-
-const internalAuditLog = {
-
- /**
- * All logs
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('auditlog:list')
- .then(() => {
- let query = auditLogModel
- .query()
- .orderBy('created_on', 'DESC')
- .orderBy('id', 'DESC')
- .limit(100)
- .allowEager('[user]');
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('meta', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * This method should not be publicly used, it doesn't check certain things. It will be assumed
- * that permission to add to audit log is already considered, however the access token is used for
- * default user id determination.
- *
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.action
- * @param {Number} [data.user_id]
- * @param {Number} [data.object_id]
- * @param {Number} [data.object_type]
- * @param {Object} [data.meta]
- * @returns {Promise}
- */
- add: (access, data) => {
- return new Promise((resolve, reject) => {
- // Default the user id
- if (typeof data.user_id === 'undefined' || !data.user_id) {
- data.user_id = access.token.getUserId(1);
- }
-
- if (typeof data.action === 'undefined' || !data.action) {
- reject(new error.InternalValidationError('Audit log entry must contain an Action'));
- } else {
- // Make sure at least 1 of the IDs are set and action
- resolve(auditLogModel
- .query()
- .insert({
- user_id: data.user_id,
- action: data.action,
- object_type: data.object_type || '',
- object_id: data.object_id || 0,
- meta: data.meta || {}
- }));
- }
- });
- }
-};
-
-module.exports = internalAuditLog;
diff --git a/backend/internal/cache/cache.go b/backend/internal/cache/cache.go
new file mode 100644
index 00000000..347ce92f
--- /dev/null
+++ b/backend/internal/cache/cache.go
@@ -0,0 +1,51 @@
+package cache
+
+import (
+ "time"
+
+ "npm/internal/entity/setting"
+ "npm/internal/logger"
+)
+
+// Cache is a memory cache
+type Cache struct {
+ Settings *map[string]setting.Model
+}
+
+// Status is the status of last update
+type Status struct {
+ LastUpdate time.Time
+ Valid bool
+}
+
+// NewCache will create and return a new Cache object
+func NewCache() *Cache {
+ return &Cache{
+ Settings: nil,
+ }
+}
+
+// Refresh will refresh all cache items
+func (c *Cache) Refresh() {
+ c.RefreshSettings()
+}
+
+// Clear will clear the cache
+func (c *Cache) Clear() {
+ c.Settings = nil
+}
+
+// RefreshSettings will refresh the settings from db
+func (c *Cache) RefreshSettings() {
+ logger.Info("Cache refreshing Settings")
+ /*
+ c.ProductOffers = client.GetProductOffers()
+
+ if c.ProductOffers != nil {
+ c.Status["product_offers"] = Status{
+ LastUpdate: time.Now(),
+ Valid: true,
+ }
+ }
+ */
+}
diff --git a/backend/internal/certificate.js b/backend/internal/certificate.js
deleted file mode 100644
index 7c8fddee..00000000
--- a/backend/internal/certificate.js
+++ /dev/null
@@ -1,1223 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const https = require('https');
-const tempWrite = require('temp-write');
-const moment = require('moment');
-const logger = require('../logger').ssl;
-const error = require('../lib/error');
-const utils = require('../lib/utils');
-const certificateModel = require('../models/certificate');
-const dnsPlugins = require('../global/certbot-dns-plugins');
-const internalAuditLog = require('./audit-log');
-const internalNginx = require('./nginx');
-const internalHost = require('./host');
-const letsencryptStaging = process.env.NODE_ENV !== 'production';
-const letsencryptConfig = '/etc/letsencrypt.ini';
-const certbotCommand = 'certbot';
-const archiver = require('archiver');
-const path = require('path');
-const { isArray } = require('lodash');
-
-function omissions() {
- return ['is_deleted'];
-}
-
-const internalCertificate = {
-
- allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
- intervalTimeout: 1000 * 60 * 60, // 1 hour
- interval: null,
- intervalProcessing: false,
-
- initTimer: () => {
- logger.info('Let\'s Encrypt Renewal Timer initialized');
- internalCertificate.interval = setInterval(internalCertificate.processExpiringHosts, internalCertificate.intervalTimeout);
- // And do this now as well
- internalCertificate.processExpiringHosts();
- },
-
- /**
- * Triggered by a timer, this will check for expiring hosts and renew their ssl certs if required
- */
- processExpiringHosts: () => {
- if (!internalCertificate.intervalProcessing) {
- internalCertificate.intervalProcessing = true;
- logger.info('Renewing SSL certs close to expiry...');
-
- const cmd = certbotCommand + ' renew --non-interactive --quiet ' +
- '--config "' + letsencryptConfig + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--disable-hook-validation ' +
- (letsencryptStaging ? '--staging' : '');
-
- return utils.exec(cmd)
- .then((result) => {
- if (result) {
- logger.info('Renew Result: ' + result);
- }
-
- return internalNginx.reload()
- .then(() => {
- logger.info('Renew Complete');
- return result;
- });
- })
- .then(() => {
- // Now go and fetch all the letsencrypt certs from the db and query the files and update expiry times
- return certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('provider', 'letsencrypt')
- .then((certificates) => {
- if (certificates && certificates.length) {
- let promises = [];
-
- certificates.map(function (certificate) {
- promises.push(
- internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
- .then((cert_info) => {
- return certificateModel
- .query()
- .where('id', certificate.id)
- .andWhere('provider', 'letsencrypt')
- .patch({
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- });
- })
- .catch((err) => {
- // Don't want to stop the train here, just log the error
- logger.error(err.message);
- })
- );
- });
-
- return Promise.all(promises);
- }
- });
- })
- .then(() => {
- internalCertificate.intervalProcessing = false;
- })
- .catch((err) => {
- logger.error(err);
- internalCertificate.intervalProcessing = false;
- });
- }
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('certificates:create', data)
- .then(() => {
- data.owner_user_id = access.token.getUserId(1);
-
- if (data.provider === 'letsencrypt') {
- data.nice_name = data.domain_names.join(', ');
- }
-
- return certificateModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- // Request a new Cert from LE. Let the fun begin.
-
- // 1. Find out any hosts that are using any of the hostnames in this cert
- // 2. Disable them in nginx temporarily
- // 3. Generate the LE config
- // 4. Request cert
- // 5. Remove LE config
- // 6. Re-instate previously disabled hosts
-
- // 1. Find out any hosts that are using any of the hostnames in this cert
- return internalHost.getHostsWithDomains(certificate.domain_names)
- .then((in_use_result) => {
- // 2. Disable them in nginx temporarily
- return internalCertificate.disableInUseHosts(in_use_result)
- .then(() => {
- return in_use_result;
- });
- })
- .then((in_use_result) => {
- // With DNS challenge no config is needed, so skip 3 and 5.
- if (certificate.meta.dns_challenge) {
- return internalNginx.reload().then(() => {
- // 4. Request cert
- return internalCertificate.requestLetsEncryptSslWithDnsChallenge(certificate);
- })
- .then(internalNginx.reload)
- .then(() => {
- // 6. Re-instate previously disabled hosts
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(() => {
- return certificate;
- })
- .catch((err) => {
- // In the event of failure, revert things and throw err back
- return internalCertificate.enableInUseHosts(in_use_result)
- .then(internalNginx.reload)
- .then(() => {
- throw err;
- });
- });
- } else {
- // 3. Generate the LE config
- return internalNginx.generateLetsEncryptRequestConfig(certificate)
- .then(internalNginx.reload)
- .then(async() => await new Promise((r) => setTimeout(r, 5000)))
- .then(() => {
- // 4. Request cert
- return internalCertificate.requestLetsEncryptSsl(certificate);
- })
- .then(() => {
- // 5. Remove LE config
- return internalNginx.deleteLetsEncryptRequestConfig(certificate);
- })
- .then(internalNginx.reload)
- .then(() => {
- // 6. Re-instate previously disabled hosts
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(() => {
- return certificate;
- })
- .catch((err) => {
- // In the event of failure, revert things and throw err back
- return internalNginx.deleteLetsEncryptRequestConfig(certificate)
- .then(() => {
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(internalNginx.reload)
- .then(() => {
- throw err;
- });
- });
- }
- })
- .then(() => {
- // At this point, the letsencrypt cert should exist on disk.
- // Lets get the expiry date from the file and update the row silently
- return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
- .then((cert_info) => {
- return certificateModel
- .query()
- .patchAndFetchById(certificate.id, {
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- })
- .then((saved_row) => {
- // Add cert data for audit log
- saved_row.meta = _.assign({}, saved_row.meta, {
- letsencrypt_certificate: cert_info
- });
-
- return saved_row;
- });
- });
- }).catch(async (error) => {
- // Delete the certificate from the database if it was not created successfully
- await certificateModel
- .query()
- .deleteById(certificate.id);
-
- throw error;
- });
- } else {
- return certificate;
- }
- }).then((certificate) => {
-
- data.meta = _.assign({}, data.meta || {}, certificate.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'certificate',
- object_id: certificate.id,
- meta: data
- })
- .then(() => {
- return certificate;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.email]
- * @param {String} [data.name]
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('certificates:update', data.id)
- .then((/*access_data*/) => {
- return internalCertificate.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Certificate could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return certificateModel
- .query()
- .omit(omissions())
- .patchAndFetchById(row.id, data)
- .then((saved_row) => {
- saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
- data.meta = internalCertificate.cleanMeta(data.meta);
-
- // Add row.nice_name for custom certs
- if (saved_row.provider === 'other') {
- data.nice_name = saved_row.nice_name;
- }
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'certificate',
- object_id: row.id,
- meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('certificates:get', data.id)
- .then((access_data) => {
- let query = certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @returns {Promise}
- */
- download: (access, data) => {
- return new Promise((resolve, reject) => {
- access.can('certificates:get', data)
- .then(() => {
- return internalCertificate.get(access, data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- const zipDirectory = '/etc/letsencrypt/live/npm-' + data.id;
-
- if (!fs.existsSync(zipDirectory)) {
- throw new error.ItemNotFoundError('Certificate ' + certificate.nice_name + ' does not exists');
- }
-
- let certFiles = fs.readdirSync(zipDirectory)
- .filter((fn) => fn.endsWith('.pem'))
- .map((fn) => fs.realpathSync(path.join(zipDirectory, fn)));
- const downloadName = 'npm-' + data.id + '-' + `${Date.now()}.zip`;
- const opName = '/tmp/' + downloadName;
- internalCertificate.zipFiles(certFiles, opName)
- .then(() => {
- logger.debug('zip completed : ', opName);
- const resp = {
- fileName: opName
- };
- resolve(resp);
- }).catch((err) => reject(err));
- } else {
- throw new error.ValidationError('Only Let\'sEncrypt certificates can be downloaded');
- }
- }).catch((err) => reject(err));
- });
- },
-
- /**
- * @param {String} source
- * @param {String} out
- * @returns {Promise}
- */
- zipFiles(source, out) {
- const archive = archiver('zip', { zlib: { level: 9 } });
- const stream = fs.createWriteStream(out);
-
- return new Promise((resolve, reject) => {
- source
- .map((fl) => {
- let fileName = path.basename(fl);
- logger.debug(fl, 'added to certificate zip');
- archive.file(fl, { name: fileName });
- });
- archive
- .on('error', (err) => reject(err))
- .pipe(stream);
-
- stream.on('close', () => resolve());
- archive.finalize();
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('certificates:delete', data.id)
- .then(() => {
- return internalCertificate.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return certificateModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Add to audit log
- row.meta = internalCertificate.cleanMeta(row.meta);
-
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'certificate',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- })
- .then(() => {
- if (row.provider === 'letsencrypt') {
- // Revoke the cert
- return internalCertificate.revokeLetsEncryptSsl(row);
- }
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Certs
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('certificates:list')
- .then((access_data) => {
- let query = certificateModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner]')
- .orderBy('nice_name', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('nice_name', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = certificateModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * @param {Object} certificate
- * @returns {Promise}
- */
- writeCustomCert: (certificate) => {
- logger.info('Writing Custom Certificate:', certificate);
-
- const dir = '/data/custom_ssl/npm-' + certificate.id;
-
- return new Promise((resolve, reject) => {
- if (certificate.provider === 'letsencrypt') {
- reject(new Error('Refusing to write letsencrypt certs here'));
- return;
- }
-
- let certData = certificate.meta.certificate;
- if (typeof certificate.meta.intermediate_certificate !== 'undefined') {
- certData = certData + '\n' + certificate.meta.intermediate_certificate;
- }
-
- try {
- if (!fs.existsSync(dir)) {
- fs.mkdirSync(dir);
- }
- } catch (err) {
- reject(err);
- return;
- }
-
- fs.writeFile(dir + '/fullchain.pem', certData, function (err) {
- if (err) {
- reject(err);
- } else {
- resolve();
- }
- });
- })
- .then(() => {
- return new Promise((resolve, reject) => {
- fs.writeFile(dir + '/privkey.pem', certificate.meta.certificate_key, function (err) {
- if (err) {
- reject(err);
- } else {
- resolve();
- }
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Array} data.domain_names
- * @param {String} data.meta.letsencrypt_email
- * @param {Boolean} data.meta.letsencrypt_agree
- * @returns {Promise}
- */
- createQuickCertificate: (access, data) => {
- return internalCertificate.create(access, {
- provider: 'letsencrypt',
- domain_names: data.domain_names,
- meta: data.meta
- });
- },
-
- /**
- * Validates that the certs provided are good.
- * No access required here, nothing is changed or stored.
- *
- * @param {Object} data
- * @param {Object} data.files
- * @returns {Promise}
- */
- validate: (data) => {
- return new Promise((resolve) => {
- // Put file contents into an object
- let files = {};
- _.map(data.files, (file, name) => {
- if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
- files[name] = file.data.toString();
- }
- });
-
- resolve(files);
- })
- .then((files) => {
- // For each file, create a temp file and write the contents to it
- // Then test it depending on the file type
- let promises = [];
- _.map(files, (content, type) => {
- promises.push(new Promise((resolve) => {
- if (type === 'certificate_key') {
- resolve(internalCertificate.checkPrivateKey(content));
- } else {
- // this should handle `certificate` and intermediate certificate
- resolve(internalCertificate.getCertificateInfo(content, true));
- }
- }).then((res) => {
- return {[type]: res};
- }));
- });
-
- return Promise.all(promises)
- .then((files) => {
- let data = {};
-
- _.each(files, (file) => {
- data = _.assign({}, data, file);
- });
-
- return data;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Object} data.files
- * @returns {Promise}
- */
- upload: (access, data) => {
- return internalCertificate.get(access, {id: data.id})
- .then((row) => {
- if (row.provider !== 'other') {
- throw new error.ValidationError('Cannot upload certificates for this type of provider');
- }
-
- return internalCertificate.validate(data)
- .then((validations) => {
- if (typeof validations.certificate === 'undefined') {
- throw new error.ValidationError('Certificate file was not provided');
- }
-
- _.map(data.files, (file, name) => {
- if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
- row.meta[name] = file.data.toString();
- }
- });
-
- // TODO: This uses a mysql only raw function that won't translate to postgres
- return internalCertificate.update(access, {
- id: data.id,
- expires_on: moment(validations.certificate.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
- domain_names: [validations.certificate.cn],
- meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
- })
- .then((certificate) => {
- console.log('ROWMETA:', row.meta);
- certificate.meta = row.meta;
- return internalCertificate.writeCustomCert(certificate);
- });
- })
- .then(() => {
- return _.pick(row.meta, internalCertificate.allowedSslFiles);
- });
- });
- },
-
- /**
- * Uses the openssl command to validate the private key.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} private_key This is the entire key contents as a string
- */
- checkPrivateKey: (private_key) => {
- return tempWrite(private_key, '/tmp')
- .then((filepath) => {
- return new Promise((resolve, reject) => {
- const failTimeout = setTimeout(() => {
- reject(new error.ValidationError('Result Validation Error: Validation timed out. This could be due to the key being passphrase-protected.'));
- }, 10000);
- utils
- .exec('openssl pkey -in ' + filepath + ' -check -noout 2>&1 ')
- .then((result) => {
- clearTimeout(failTimeout);
- if (!result.toLowerCase().includes('key is valid')) {
- reject(new error.ValidationError('Result Validation Error: ' + result));
- }
- fs.unlinkSync(filepath);
- resolve(true);
- })
- .catch((err) => {
- clearTimeout(failTimeout);
- fs.unlinkSync(filepath);
- reject(new error.ValidationError('Certificate Key is not valid (' + err.message + ')', err));
- });
- });
- });
- },
-
- /**
- * Uses the openssl command to both validate and get info out of the certificate.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} certificate This is the entire cert contents as a string
- * @param {Boolean} [throw_expired] Throw when the certificate is out of date
- */
- getCertificateInfo: (certificate, throw_expired) => {
- return tempWrite(certificate, '/tmp')
- .then((filepath) => {
- return internalCertificate.getCertificateInfoFromFile(filepath, throw_expired)
- .then((certData) => {
- fs.unlinkSync(filepath);
- return certData;
- }).catch((err) => {
- fs.unlinkSync(filepath);
- throw err;
- });
- });
- },
-
- /**
- * Uses the openssl command to both validate and get info out of the certificate.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} certificate_file The file location on disk
- * @param {Boolean} [throw_expired] Throw when the certificate is out of date
- */
- getCertificateInfoFromFile: (certificate_file, throw_expired) => {
- let certData = {};
-
- return utils.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
- .then((result) => {
- // subject=CN = something.example.com
- const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
- const match = regex.exec(result);
-
- if (typeof match[1] === 'undefined') {
- throw new error.ValidationError('Could not determine subject from certificate: ' + result);
- }
-
- certData['cn'] = match[1];
- })
- .then(() => {
- return utils.exec('openssl x509 -in ' + certificate_file + ' -issuer -noout');
- })
- .then((result) => {
- // issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
- const regex = /^(?:issuer=)?(.*)$/gim;
- const match = regex.exec(result);
-
- if (typeof match[1] === 'undefined') {
- throw new error.ValidationError('Could not determine issuer from certificate: ' + result);
- }
-
- certData['issuer'] = match[1];
- })
- .then(() => {
- return utils.exec('openssl x509 -in ' + certificate_file + ' -dates -noout');
- })
- .then((result) => {
- // notBefore=Jul 14 04:04:29 2018 GMT
- // notAfter=Oct 12 04:04:29 2018 GMT
- let validFrom = null;
- let validTo = null;
-
- const lines = result.split('\n');
- lines.map(function (str) {
- const regex = /^(\S+)=(.*)$/gim;
- const match = regex.exec(str.trim());
-
- if (match && typeof match[2] !== 'undefined') {
- const date = parseInt(moment(match[2], 'MMM DD HH:mm:ss YYYY z').format('X'), 10);
-
- if (match[1].toLowerCase() === 'notbefore') {
- validFrom = date;
- } else if (match[1].toLowerCase() === 'notafter') {
- validTo = date;
- }
- }
- });
-
- if (!validFrom || !validTo) {
- throw new error.ValidationError('Could not determine dates from certificate: ' + result);
- }
-
- if (throw_expired && validTo < parseInt(moment().format('X'), 10)) {
- throw new error.ValidationError('Certificate has expired');
- }
-
- certData['dates'] = {
- from: validFrom,
- to: validTo
- };
-
- return certData;
- }).catch((err) => {
- throw new error.ValidationError('Certificate is not valid (' + err.message + ')', err);
- });
- },
-
- /**
- * Cleans the ssl keys from the meta object and sets them to "true"
- *
- * @param {Object} meta
- * @param {Boolean} [remove]
- * @returns {Object}
- */
- cleanMeta: function (meta, remove) {
- internalCertificate.allowedSslFiles.map((key) => {
- if (typeof meta[key] !== 'undefined' && meta[key]) {
- if (remove) {
- delete meta[key];
- } else {
- meta[key] = true;
- }
- }
- });
-
- return meta;
- },
-
- /**
- * Request a certificate using the http challenge
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- requestLetsEncryptSsl: (certificate) => {
- logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const cmd = certbotCommand + ' certonly ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--agree-tos ' +
- '--authenticator webroot ' +
- '--email "' + certificate.meta.letsencrypt_email + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--domains "' + certificate.domain_names.join(',') + '" ' +
- (letsencryptStaging ? '--staging' : '');
-
- logger.info('Command:', cmd);
-
- return utils.exec(cmd)
- .then((result) => {
- logger.success(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @param {String} dns_provider the dns provider name (key used in `certbot-dns-plugins.js`)
- * @param {String | null} credentials the content of this providers credentials file
- * @param {String} propagation_seconds the cloudflare api token
- * @returns {Promise}
- */
- requestLetsEncryptSslWithDnsChallenge: (certificate) => {
- const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
-
- if (!dns_plugin) {
- throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
- }
-
- logger.info(`Requesting Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
-
- const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- // Escape single quotes and backslashes
- const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
- const credentialsCmd = 'mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentialsLocation + '\' && chmod 600 \'' + credentialsLocation + '\'';
- const prepareCmd = 'pip install ' + dns_plugin.package_name + (dns_plugin.version_requirement || '') + ' ' + dns_plugin.dependencies;
-
- // Whether the plugin has a ---credentials argument
- const hasConfigArg = certificate.meta.dns_provider !== 'route53';
-
- let mainCmd = certbotCommand + ' certonly ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--agree-tos ' +
- '--email "' + certificate.meta.letsencrypt_email + '" ' +
- '--domains "' + certificate.domain_names.join(',') + '" ' +
- '--authenticator ' + dns_plugin.full_plugin_name + ' ' +
- (
- hasConfigArg
- ? '--' + dns_plugin.full_plugin_name + '-credentials "' + credentialsLocation + '"'
- : ''
- ) +
- (
- certificate.meta.propagation_seconds !== undefined
- ? ' --' + dns_plugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
- : ''
- ) +
- (letsencryptStaging ? ' --staging' : '');
-
- // Prepend the path to the credentials file as an environment variable
- if (certificate.meta.dns_provider === 'route53') {
- mainCmd = 'AWS_CONFIG_FILE=\'' + credentialsLocation + '\' ' + mainCmd;
- }
-
- logger.info('Command:', `${credentialsCmd} && ${prepareCmd} && ${mainCmd}`);
-
- return utils.exec(credentialsCmd)
- .then(() => {
- return utils.exec(prepareCmd)
- .then(() => {
- return utils.exec(mainCmd)
- .then(async (result) => {
- logger.info(result);
- return result;
- });
- });
- }).catch(async (err) => {
- // Don't fail if file does not exist
- const delete_credentialsCmd = `rm -f '${credentialsLocation}' || true`;
- await utils.exec(delete_credentialsCmd);
- throw err;
- });
- },
-
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @returns {Promise}
- */
- renew: (access, data) => {
- return access.can('certificates:update', data)
- .then(() => {
- return internalCertificate.get(access, data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- const renewMethod = certificate.meta.dns_challenge ? internalCertificate.renewLetsEncryptSslWithDnsChallenge : internalCertificate.renewLetsEncryptSsl;
-
- return renewMethod(certificate)
- .then(() => {
- return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem');
- })
- .then((cert_info) => {
- return certificateModel
- .query()
- .patchAndFetchById(certificate.id, {
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- });
- })
- .then((updated_certificate) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'renewed',
- object_type: 'certificate',
- object_id: updated_certificate.id,
- meta: updated_certificate
- })
- .then(() => {
- return updated_certificate;
- });
- });
- } else {
- throw new error.ValidationError('Only Let\'sEncrypt certificates can be renewed');
- }
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- renewLetsEncryptSsl: (certificate) => {
- logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const cmd = certbotCommand + ' renew --force-renewal ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--no-random-sleep-on-renew ' +
- '--disable-hook-validation ' +
- (letsencryptStaging ? '--staging' : '');
-
- logger.info('Command:', cmd);
-
- return utils.exec(cmd)
- .then((result) => {
- logger.info(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- renewLetsEncryptSslWithDnsChallenge: (certificate) => {
- const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
-
- if (!dns_plugin) {
- throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
- }
-
- logger.info(`Renewing Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
-
- let mainCmd = certbotCommand + ' renew ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--disable-hook-validation ' +
- '--no-random-sleep-on-renew ' +
- (letsencryptStaging ? ' --staging' : '');
-
- // Prepend the path to the credentials file as an environment variable
- if (certificate.meta.dns_provider === 'route53') {
- const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- mainCmd = 'AWS_CONFIG_FILE=\'' + credentialsLocation + '\' ' + mainCmd;
- }
-
- logger.info('Command:', mainCmd);
-
- return utils.exec(mainCmd)
- .then(async (result) => {
- logger.info(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- revokeLetsEncryptSsl: (certificate, throw_errors) => {
- logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const mainCmd = certbotCommand + ' revoke ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
- '--delete-after-revoke ' +
- (letsencryptStaging ? '--staging' : '');
-
- // Don't fail command if file does not exist
- const delete_credentialsCmd = `rm -f '/etc/letsencrypt/credentials/credentials-${certificate.id}' || true`;
-
- logger.info('Command:', mainCmd + '; ' + delete_credentialsCmd);
-
- return utils.exec(mainCmd)
- .then(async (result) => {
- await utils.exec(delete_credentialsCmd);
- logger.info(result);
- return result;
- })
- .catch((err) => {
- logger.error(err.message);
-
- if (throw_errors) {
- throw err;
- }
- });
- },
-
- /**
- * @param {Object} certificate
- * @returns {Boolean}
- */
- hasLetsEncryptSslCerts: (certificate) => {
- const letsencryptPath = '/etc/letsencrypt/live/npm-' + certificate.id;
-
- return fs.existsSync(letsencryptPath + '/fullchain.pem') && fs.existsSync(letsencryptPath + '/privkey.pem');
- },
-
- /**
- * @param {Object} in_use_result
- * @param {Number} in_use_result.total_count
- * @param {Array} in_use_result.proxy_hosts
- * @param {Array} in_use_result.redirection_hosts
- * @param {Array} in_use_result.dead_hosts
- */
- disableInUseHosts: (in_use_result) => {
- if (in_use_result.total_count) {
- let promises = [];
-
- if (in_use_result.proxy_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('proxy_host', in_use_result.proxy_hosts));
- }
-
- if (in_use_result.redirection_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('redirection_host', in_use_result.redirection_hosts));
- }
-
- if (in_use_result.dead_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('dead_host', in_use_result.dead_hosts));
- }
-
- return Promise.all(promises);
-
- } else {
- return Promise.resolve();
- }
- },
-
- /**
- * @param {Object} in_use_result
- * @param {Number} in_use_result.total_count
- * @param {Array} in_use_result.proxy_hosts
- * @param {Array} in_use_result.redirection_hosts
- * @param {Array} in_use_result.dead_hosts
- */
- enableInUseHosts: (in_use_result) => {
- if (in_use_result.total_count) {
- let promises = [];
-
- if (in_use_result.proxy_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('proxy_host', in_use_result.proxy_hosts));
- }
-
- if (in_use_result.redirection_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('redirection_host', in_use_result.redirection_hosts));
- }
-
- if (in_use_result.dead_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('dead_host', in_use_result.dead_hosts));
- }
-
- return Promise.all(promises);
-
- } else {
- return Promise.resolve();
- }
- },
-
- testHttpsChallenge: async (access, domains) => {
- await access.can('certificates:list');
-
- if (!isArray(domains)) {
- throw new error.InternalValidationError('Domains must be an array of strings');
- }
- if (domains.length === 0) {
- throw new error.InternalValidationError('No domains provided');
- }
-
- // Create a test challenge file
- const testChallengeDir = '/data/letsencrypt-acme-challenge/.well-known/acme-challenge';
- const testChallengeFile = testChallengeDir + '/test-challenge';
- fs.mkdirSync(testChallengeDir, {recursive: true});
- fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
-
- async function performTestForDomain (domain) {
- logger.info('Testing http challenge for ' + domain);
- const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
- const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
- const options = {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/x-www-form-urlencoded',
- 'Content-Length': Buffer.byteLength(formBody)
- }
- };
-
- const result = await new Promise((resolve) => {
-
- const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, function (res) {
- let responseBody = '';
-
- res.on('data', (chunk) => responseBody = responseBody + chunk);
- res.on('end', function () {
- const parsedBody = JSON.parse(responseBody + '');
- if (res.statusCode !== 200) {
- logger.warn(`Failed to test HTTP challenge for domain ${domain}`, res);
- resolve(undefined);
- }
- resolve(parsedBody);
- });
- });
-
- // Make sure to write the request body.
- req.write(formBody);
- req.end();
- req.on('error', function (e) { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
- resolve(undefined); });
- });
-
- if (!result) {
- // Some error occurred while trying to get the data
- return 'failed';
- } else if (`${result.responsecode}` === '200' && result.htmlresponse === 'Success') {
- // Server exists and has responded with the correct data
- return 'ok';
- } else if (`${result.responsecode}` === '200') {
- // Server exists but has responded with wrong data
- logger.info(`HTTP challenge test failed for domain ${domain} because of invalid returned data:`, result.htmlresponse);
- return 'wrong-data';
- } else if (`${result.responsecode}` === '404') {
- // Server exists but responded with a 404
- logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
- return '404';
- } else if (`${result.responsecode}` === '0' || (typeof result.reason === 'string' && result.reason.toLowerCase() === 'host unavailable')) {
- // Server does not exist at domain
- logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
- return 'no-host';
- } else {
- // Other errors
- logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
- return `other:${result.responsecode}`;
- }
- }
-
- const results = {};
-
- for (const domain of domains){
- results[domain] = await performTestForDomain(domain);
- }
-
- // Remove the test challenge file
- fs.unlinkSync(testChallengeFile);
-
- return results;
- }
-};
-
-module.exports = internalCertificate;
diff --git a/backend/internal/config/args.go b/backend/internal/config/args.go
new file mode 100644
index 00000000..6bade68d
--- /dev/null
+++ b/backend/internal/config/args.go
@@ -0,0 +1,28 @@
+package config
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/alexflint/go-arg"
+)
+
+// ArgConfig is the settings for passing arguments to the command
+type ArgConfig struct {
+ Version bool `arg:"-v" help:"print version and exit"`
+}
+
+var (
+ appArguments ArgConfig
+)
+
+// InitArgs will parse arg vars
+func InitArgs(version, commit *string) {
+ // nolint: errcheck, gosec
+ arg.MustParse(&appArguments)
+
+ if appArguments.Version {
+ fmt.Printf("v%s (%s)\n", *version, *commit)
+ os.Exit(0)
+ }
+}
diff --git a/backend/internal/config/config.go b/backend/internal/config/config.go
new file mode 100644
index 00000000..d3d5944f
--- /dev/null
+++ b/backend/internal/config/config.go
@@ -0,0 +1,79 @@
+package config
+
+import (
+ "fmt"
+ golog "log"
+ "runtime"
+
+ "npm/internal/logger"
+
+ "github.com/getsentry/sentry-go"
+ "github.com/vrischmann/envconfig"
+)
+
+// Init will parse environment variables into the Env struct
+func Init(version, commit, sentryDSN *string) {
+ // ErrorReporting is enabled until we load the status of it from the DB later
+ ErrorReporting = true
+
+ Version = *version
+ Commit = *commit
+
+ if err := envconfig.InitWithPrefix(&Configuration, "NPM"); err != nil {
+ fmt.Printf("%+v\n", err)
+ }
+
+ initLogger(*sentryDSN)
+ logger.Info("Build Version: %s (%s)", Version, Commit)
+ createDataFolders()
+ loadKeys()
+}
+
+// Init initialises the Log object and return it
+func initLogger(sentryDSN string) {
+ // this removes timestamp prefixes from logs
+ golog.SetFlags(0)
+
+ switch Configuration.Log.Level {
+ case "debug":
+ logLevel = logger.DebugLevel
+ case "warn":
+ logLevel = logger.WarnLevel
+ case "error":
+ logLevel = logger.ErrorLevel
+ default:
+ logLevel = logger.InfoLevel
+ }
+
+ err := logger.Configure(&logger.Config{
+ LogThreshold: logLevel,
+ Formatter: Configuration.Log.Format,
+ SentryConfig: sentry.ClientOptions{
+ // This is the jc21 NginxProxyManager Sentry project,
+ // errors will be reported here (if error reporting is enable)
+ // and this project is private. No personal information should
+ // be sent in any error messages, only stacktraces.
+ Dsn: sentryDSN,
+ Release: Commit,
+ Dist: Version,
+ Environment: fmt.Sprintf("%s-%s", runtime.GOOS, runtime.GOARCH),
+ },
+ })
+
+ if err != nil {
+ logger.Error("LoggerConfigurationError", err)
+ }
+}
+
+// GetLogLevel returns the logger const level
+func GetLogLevel() logger.Level {
+ return logLevel
+}
+
+func isError(errorClass string, err error) bool {
+ if err != nil {
+ logger.Error(errorClass, err)
+ return true
+ }
+ return false
+}
diff --git a/backend/internal/config/folders.go b/backend/internal/config/folders.go
new file mode 100644
index 00000000..bf3d43cf
--- /dev/null
+++ b/backend/internal/config/folders.go
@@ -0,0 +1,34 @@
+package config
+
+import (
+ "fmt"
+ "npm/internal/logger"
+ "os"
+)
+
+// createDataFolders will recursively create these folders within the
+// data folder defined in configuration.
+func createDataFolders() {
+ folders := []string{
+ "access",
+ "certificates",
+ "logs",
+ // Acme.sh:
+ Configuration.Acmesh.GetWellknown(),
+ // Nginx:
+ "nginx/hosts",
+ "nginx/streams",
+ "nginx/temp",
+ }
+
+ for _, folder := range folders {
+ path := folder
+ if path[0:1] != "/" {
+ path = fmt.Sprintf("%s/%s", Configuration.DataFolder, folder)
+ }
+ logger.Debug("Creating folder: %s", path)
+ if err := os.MkdirAll(path, os.ModePerm); err != nil {
+ logger.Error("CreateDataFolderError", err)
+ }
+ }
+}
diff --git a/backend/internal/config/keys.go b/backend/internal/config/keys.go
new file mode 100644
index 00000000..9ac3e903
--- /dev/null
+++ b/backend/internal/config/keys.go
@@ -0,0 +1,112 @@
+package config
+
+import (
+ "bytes"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/asn1"
+ "encoding/pem"
+ "fmt"
+ "io/ioutil"
+ "os"
+
+ "npm/internal/logger"
+)
+
+var keysFolder string
+var publicKeyFile string
+var privateKeyFile string
+
+func loadKeys() {
+ // check if keys folder exists in data folder
+ keysFolder = fmt.Sprintf("%s/keys", Configuration.DataFolder)
+ publicKeyFile = fmt.Sprintf("%s/public.key", keysFolder)
+ privateKeyFile = fmt.Sprintf("%s/private.key", keysFolder)
+
+ if _, err := os.Stat(keysFolder); os.IsNotExist(err) {
+ // nolint:errcheck,gosec
+ os.Mkdir(keysFolder, 0700)
+ }
+
+ // check if keys exist on disk
+ _, publicKeyErr := os.Stat(publicKeyFile)
+ _, privateKeyErr := os.Stat(privateKeyFile)
+
+ // generate keys if either one doesn't exist
+ if os.IsNotExist(publicKeyErr) || os.IsNotExist(privateKeyErr) {
+ generateKeys()
+ saveKeys()
+ }
+
+ // Load keys from disk
+ // nolint:gosec
+ publicKeyBytes, publicKeyBytesErr := ioutil.ReadFile(publicKeyFile)
+ // nolint:gosec
+ privateKeyBytes, privateKeyBytesErr := ioutil.ReadFile(privateKeyFile)
+ PublicKey = string(publicKeyBytes)
+ PrivateKey = string(privateKeyBytes)
+
+ if isError("PublicKeyReadError", publicKeyBytesErr) || isError("PrivateKeyReadError", privateKeyBytesErr) || PublicKey == "" || PrivateKey == "" {
+ logger.Warn("There was an error loading keys, proceeding to generate new RSA keys")
+ generateKeys()
+ saveKeys()
+ }
+}
+
+func generateKeys() {
+ reader := rand.Reader
+ bitSize := 4096
+
+ key, err := rsa.GenerateKey(reader, bitSize)
+ if isError("RSAGenerateError", err) {
+ return
+ }
+
+ privateKey := &pem.Block{
+ Type: "PRIVATE KEY",
+ Bytes: x509.MarshalPKCS1PrivateKey(key),
+ }
+
+ privateKeyBuffer := new(bytes.Buffer)
+ err = pem.Encode(privateKeyBuffer, privateKey)
+ if isError("PrivatePEMEncodeError", err) {
+ return
+ }
+
+ asn1Bytes, err2 := asn1.Marshal(key.PublicKey)
+ if isError("RSAMarshalError", err2) {
+ return
+ }
+
+ publicKey := &pem.Block{
+ Type: "PUBLIC KEY",
+ Bytes: asn1Bytes,
+ }
+
+ publicKeyBuffer := new(bytes.Buffer)
+ err = pem.Encode(publicKeyBuffer, publicKey)
+ if isError("PublicPEMEncodeError", err) {
+ return
+ }
+
+ PublicKey = publicKeyBuffer.String()
+ PrivateKey = privateKeyBuffer.String()
+ logger.Info("Generated new RSA keys")
+}
+
+func saveKeys() {
+ err := ioutil.WriteFile(publicKeyFile, []byte(PublicKey), 0600)
+ if err != nil {
+ logger.Error("PublicKeyWriteError", err)
+ } else {
+ logger.Info("Saved Public Key: %s", publicKeyFile)
+ }
+
+ err = ioutil.WriteFile(privateKeyFile, []byte(PrivateKey), 0600)
+ if err != nil {
+ logger.Error("PrivateKeyWriteError", err)
+ } else {
+ logger.Info("Saved Private Key: %s", privateKeyFile)
+ }
+}
diff --git a/backend/internal/config/vars.go b/backend/internal/config/vars.go
new file mode 100644
index 00000000..38207355
--- /dev/null
+++ b/backend/internal/config/vars.go
@@ -0,0 +1,49 @@
+package config
+
+import (
+ "fmt"
+ "npm/internal/logger"
+)
+
+// Version is the version set by ldflags
+var Version string
+
+// Commit is the git commit set by ldflags
+var Commit string
+
+// IsSetup defines whether we have an admin user or not
+var IsSetup bool
+
+// ErrorReporting defines whether we will send errors to Sentry
+var ErrorReporting bool
+
+// PublicKey is the public key
+var PublicKey string
+
+// PrivateKey is the private key
+var PrivateKey string
+
+var logLevel logger.Level
+
+type log struct {
+ Level string `json:"level" envconfig:"optional,default=info"`
+ Format string `json:"format" envconfig:"optional,default=nice"`
+}
+
+type acmesh struct {
+ Home string `json:"home" envconfig:"optional,default=/data/.acme.sh"`
+ ConfigHome string `json:"config_home" envconfig:"optional,default=/data/.acme.sh/config"`
+ CertHome string `json:"cert_home" envconfig:"optional,default=/data/.acme.sh/certs"`
+}
+
+// Configuration is the main configuration object
+var Configuration struct {
+ DataFolder string `json:"data_folder" envconfig:"optional,default=/data"`
+ Acmesh acmesh `json:"acmesh"`
+ Log log `json:"log"`
+}
+
+// GetWellknown returns the well known path
+func (a *acmesh) GetWellknown() string {
+ return fmt.Sprintf("%s/.well-known", a.Home)
+}
diff --git a/backend/internal/database/helpers.go b/backend/internal/database/helpers.go
new file mode 100644
index 00000000..7553af3b
--- /dev/null
+++ b/backend/internal/database/helpers.go
@@ -0,0 +1,46 @@
+package database
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/errors"
+ "npm/internal/model"
+ "npm/internal/util"
+)
+
+const (
+ // DateFormat for DateFormat
+ DateFormat = "2006-01-02"
+ // DateTimeFormat for DateTimeFormat
+ DateTimeFormat = "2006-01-02T15:04:05"
+)
+
+// GetByQuery returns a row given a query, populating the model given
+func GetByQuery(model interface{}, query string, params []interface{}) error {
+ db := GetInstance()
+ if db != nil {
+ err := db.Get(model, query, params...)
+ return err
+ }
+
+ return errors.ErrDatabaseUnavailable
+}
+
+// BuildOrderBySQL takes a `Sort` slice and constructs a query fragment
+func BuildOrderBySQL(columns []string, sort *[]model.Sort) (string, []model.Sort) {
+ var sortStrings []string
+ var newSort []model.Sort
+ for _, sortItem := range *sort {
+ if util.SliceContainsItem(columns, sortItem.Field) {
+ sortStrings = append(sortStrings, fmt.Sprintf("`%s` %s", sortItem.Field, sortItem.Direction))
+ newSort = append(newSort, sortItem)
+ }
+ }
+
+ if len(sortStrings) > 0 {
+ return fmt.Sprintf("ORDER BY %s", strings.Join(sortStrings, ", ")), newSort
+ }
+
+ return "", newSort
+}
diff --git a/backend/internal/database/migrator.go b/backend/internal/database/migrator.go
new file mode 100644
index 00000000..8b81c408
--- /dev/null
+++ b/backend/internal/database/migrator.go
@@ -0,0 +1,202 @@
+package database
+
+import (
+ "database/sql"
+ "fmt"
+ "io/fs"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+ "time"
+
+ "npm/embed"
+ "npm/internal/logger"
+ "npm/internal/util"
+
+ "github.com/jmoiron/sqlx"
+)
+
+// MigrationConfiguration options for the migrator.
+type MigrationConfiguration struct {
+ Table string `json:"table"`
+ mux sync.Mutex
+}
+
+// Default migrator configuration
+var mConfiguration = MigrationConfiguration{
+ Table: "migration",
+}
+
+// ConfigureMigrator and will return error if missing required fields.
+func ConfigureMigrator(c *MigrationConfiguration) error {
+ // ensure updates to the config are atomic
+ mConfiguration.mux.Lock()
+ defer mConfiguration.mux.Unlock()
+ if c == nil {
+ return fmt.Errorf("a non nil Configuration is mandatory")
+ }
+ if strings.TrimSpace(c.Table) != "" {
+ mConfiguration.Table = c.Table
+ }
+ mConfiguration.Table = c.Table
+ return nil
+}
+
+type afterMigrationComplete func()
+
+// Migrate will perform the migration from start to finish
+func Migrate(followup afterMigrationComplete) bool {
+ logger.Info("Migration: Started")
+
+ // Try to connect to the database sleeping for 15 seconds in between
+ var db *sqlx.DB
+ for {
+ db = GetInstance()
+ if db == nil {
+ logger.Warn("Database is unavailable for migration, retrying in 15 seconds")
+ time.Sleep(15 * time.Second)
+ } else {
+ break
+ }
+ }
+
+ // Check for migration table existence
+ if !tableExists(db, mConfiguration.Table) {
+ err := createMigrationTable(db)
+ if err != nil {
+ logger.Error("MigratorError", err)
+ return false
+ }
+ logger.Info("Migration: Migration Table created")
+ }
+
+ // DO MIGRATION
+ migrationCount, migrateErr := performFileMigrations(db)
+ if migrateErr != nil {
+ logger.Error("MigratorError", migrateErr)
+ }
+
+ if migrateErr == nil {
+ logger.Info("Migration: Completed %v migration files", migrationCount)
+ followup()
+ return true
+ }
+ return false
+}
+
+// createMigrationTable performs a query to create the migration table
+// with the name specified in the configuration
+func createMigrationTable(db *sqlx.DB) error {
+ logger.Info("Migration: Creating Migration Table: %v", mConfiguration.Table)
+ // nolint:lll
+ query := fmt.Sprintf("CREATE TABLE IF NOT EXISTS `%v` (filename TEXT PRIMARY KEY, migrated_on INTEGER NOT NULL DEFAULT 0)", mConfiguration.Table)
+ _, err := db.Exec(query)
+ return err
+}
+
+// tableExists will check the database for the existence of the specified table.
+func tableExists(db *sqlx.DB, tableName string) bool {
+ query := `SELECT CASE name WHEN $1 THEN true ELSE false END AS found FROM sqlite_master WHERE type='table' AND name = $1`
+
+ row := db.QueryRowx(query, tableName)
+ if row == nil {
+ logger.Error("MigratorError", fmt.Errorf("Cannot check if table exists, no row returned: %v", tableName))
+ return false
+ }
+
+ var exists *bool
+ if err := row.Scan(&exists); err != nil {
+ if err == sql.ErrNoRows {
+ return false
+ }
+ logger.Error("MigratorError", err)
+ return false
+ }
+ return *exists
+}
+
+// performFileMigrations will perform the actual migration,
+// importing files and updating the database with the rows imported.
+func performFileMigrations(db *sqlx.DB) (int, error) {
+ var importedCount = 0
+
+ // Grab a list of previously ran migrations from the database:
+ previousMigrations, prevErr := getPreviousMigrations(db)
+ if prevErr != nil {
+ return importedCount, prevErr
+ }
+
+ // List up the ".sql" files on disk
+ err := fs.WalkDir(embed.MigrationFiles, ".", func(file string, d fs.DirEntry, err error) error {
+ if !d.IsDir() {
+ shortFile := filepath.Base(file)
+
+ // Check if this file already exists in the previous migrations
+ // and if so, ignore it
+ if util.SliceContainsItem(previousMigrations, shortFile) {
+ return nil
+ }
+
+ logger.Info("Migration: Importing %v", shortFile)
+
+ sqlContents, ioErr := embed.MigrationFiles.ReadFile(path.Clean(file))
+ if ioErr != nil {
+ return ioErr
+ }
+
+ sqlString := string(sqlContents)
+
+ tx := db.MustBegin()
+ if _, execErr := tx.Exec(sqlString); execErr != nil {
+ return execErr
+ }
+ if commitErr := tx.Commit(); commitErr != nil {
+ return commitErr
+ }
+ if markErr := markMigrationSuccessful(db, shortFile); markErr != nil {
+ return markErr
+ }
+
+ importedCount++
+ }
+ return nil
+ })
+
+ return importedCount, err
+}
+
+// getPreviousMigrations will query the migration table for names
+// of migrations we can ignore because they should have already
+// been imported
+func getPreviousMigrations(db *sqlx.DB) ([]string, error) {
+ var existingMigrations []string
+ // nolint:gosec
+ query := fmt.Sprintf("SELECT filename FROM `%v` ORDER BY filename", mConfiguration.Table)
+ rows, err := db.Queryx(query)
+ if err != nil {
+ if err == sql.ErrNoRows {
+ return existingMigrations, nil
+ }
+ return existingMigrations, err
+ }
+
+ for rows.Next() {
+ var filename *string
+ err := rows.Scan(&filename)
+ if err != nil {
+ return existingMigrations, err
+ }
+ existingMigrations = append(existingMigrations, *filename)
+ }
+
+ return existingMigrations, nil
+}
+
+// markMigrationSuccessful will add a row to the migration table
+func markMigrationSuccessful(db *sqlx.DB, filename string) error {
+ // nolint:gosec
+ query := fmt.Sprintf("INSERT INTO `%v` (filename) VALUES ($1)", mConfiguration.Table)
+ _, err := db.Exec(query, filename)
+ return err
+}
diff --git a/backend/internal/database/setup.go b/backend/internal/database/setup.go
new file mode 100644
index 00000000..b4101db5
--- /dev/null
+++ b/backend/internal/database/setup.go
@@ -0,0 +1,37 @@
+package database
+
+import (
+ "database/sql"
+
+ "npm/internal/config"
+ "npm/internal/errors"
+ "npm/internal/logger"
+)
+
+// CheckSetup Quick check by counting the number of users in the database
+func CheckSetup() {
+ query := `SELECT COUNT(*) FROM "user" WHERE is_deleted = $1 AND is_disabled = $1 AND is_system = $1`
+ db := GetInstance()
+
+ if db != nil {
+ row := db.QueryRowx(query, false)
+ var totalRows int
+ queryErr := row.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Error("SetupError", queryErr)
+ return
+ }
+ if totalRows == 0 {
+ logger.Warn("No users found, starting in Setup Mode")
+ } else {
+ config.IsSetup = true
+ logger.Info("Application is setup")
+ }
+
+ if config.ErrorReporting {
+ logger.Warn("Error reporting is enabled - Application Errors WILL be sent to Sentry, you can disable this in the Settings interface")
+ }
+ } else {
+ logger.Error("DatabaseError", errors.ErrDatabaseUnavailable)
+ }
+}
diff --git a/backend/internal/database/sqlite.go b/backend/internal/database/sqlite.go
new file mode 100644
index 00000000..ebfa75f6
--- /dev/null
+++ b/backend/internal/database/sqlite.go
@@ -0,0 +1,74 @@
+package database
+
+import (
+ "fmt"
+ "os"
+
+ "npm/internal/config"
+ "npm/internal/logger"
+
+ "github.com/jmoiron/sqlx"
+
+ // Blank import for Sqlite
+ _ "github.com/mattn/go-sqlite3"
+)
+
+var dbInstance *sqlx.DB
+
+// NewDB creates a new connection
+func NewDB() {
+ logger.Info("Creating new DB instance")
+ db := SqliteDB()
+ if db != nil {
+ dbInstance = db
+ }
+}
+
+// GetInstance returns an existing or new instance
+func GetInstance() *sqlx.DB {
+ if dbInstance == nil {
+ NewDB()
+ } else if err := dbInstance.Ping(); err != nil {
+ NewDB()
+ }
+
+ return dbInstance
+}
+
+// SqliteDB Create sqlite client
+func SqliteDB() *sqlx.DB {
+ dbFile := fmt.Sprintf("%s/nginxproxymanager.db", config.Configuration.DataFolder)
+ autocreate(dbFile)
+ db, err := sqlx.Open("sqlite3", dbFile)
+ if err != nil {
+ logger.Error("SqliteError", err)
+ return nil
+ }
+
+ return db
+}
+
+// Commit will close and reopen the db file
+func Commit() *sqlx.DB {
+ if dbInstance != nil {
+ err := dbInstance.Close()
+ if err != nil {
+ logger.Error("DatabaseCloseError", err)
+ }
+ }
+ NewDB()
+ return dbInstance
+}
+
+func autocreate(dbFile string) {
+ if _, err := os.Stat(dbFile); os.IsNotExist(err) {
+ // Create it
+ logger.Info("Creating Sqlite DB: %s", dbFile)
+ // nolint: gosec
+ _, err = os.Create(dbFile)
+ if err != nil {
+ logger.Error("FileCreateError", err)
+ }
+ Commit()
+ }
+}
diff --git a/backend/internal/dead-host.js b/backend/internal/dead-host.js
deleted file mode 100644
index d35fec25..00000000
--- a/backend/internal/dead-host.js
+++ /dev/null
@@ -1,461 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const deadHostModel = require('../models/dead_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalDeadHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('dead_hosts:create', data)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return deadHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalDeadHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalDeadHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'dead-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('dead_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return deadHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'dead-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalDeadHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate']
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('dead_hosts:get', data.id)
- .then((access_data) => {
- let query = deadHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('dead_hosts:delete', data.id)
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('dead_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('dead_hosts:update', data.id)
- .then(() => {
- return internalDeadHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('dead_hosts:update', data.id)
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('dead_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('dead_hosts:list')
- .then((access_data) => {
- let query = deadHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = deadHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalDeadHost;
diff --git a/backend/internal/dnsproviders/common.go b/backend/internal/dnsproviders/common.go
new file mode 100644
index 00000000..5b155efe
--- /dev/null
+++ b/backend/internal/dnsproviders/common.go
@@ -0,0 +1,135 @@
+package dnsproviders
+
+import (
+ "errors"
+ "npm/internal/util"
+)
+
+type providerField struct {
+ Name string `json:"name"`
+ Type string `json:"type"`
+ IsRequired bool `json:"is_required"`
+ IsSecret bool `json:"is_secret"`
+ MetaKey string `json:"meta_key"`
+ EnvKey string `json:"-"` // not exposed in api
+}
+
+// Provider is a simple struct
+type Provider struct {
+ AcmeshName string `json:"acmesh_name"`
+ Schema string `json:"-"`
+ Fields []providerField `json:"fields"`
+}
+
+// GetAcmeEnvVars will map the meta given to the env var required for
+// acme.sh to use this dns provider
+func (p *Provider) GetAcmeEnvVars(meta interface{}) map[string]string {
+ res := make(map[string]string)
+ for _, field := range p.Fields {
+ if acmeShEnvValue, found := util.FindItemInInterface(field.MetaKey, meta); found {
+ res[field.EnvKey] = acmeShEnvValue.(string)
+ }
+ }
+ return res
+}
+
+// List returns an array of providers
+func List() []Provider {
+ return []Provider{
+ getDNSAd(),
+ getDNSAli(),
+ getDNSAws(),
+ getDNSCf(),
+ getDNSCloudns(),
+ getDNSCx(),
+ getDNSCyon(),
+ getDNSDgon(),
+ getDNSDNSimple(),
+ getDNSDp(),
+ getDNSDuckDNS(),
+ getDNSDyn(),
+ getDNSDynu(),
+ getDNSFreeDNS(),
+ getDNSGandiLiveDNS(),
+ getDNSGd(),
+ getDNSHe(),
+ getDNSInfoblox(),
+ getDNSIspconfig(),
+ getDNSLinodeV4(),
+ getDNSLua(),
+ getDNSMe(),
+ getDNSNamecom(),
+ getDNSOne(),
+ getDNSPDNS(),
+ getDNSUnoeuro(),
+ getDNSVscale(),
+ getDNSYandex(),
+ }
+}
+
+// GetAll returns all the configured providers
+func GetAll() map[string]Provider {
+ mp := make(map[string]Provider)
+ items := List()
+ for _, item := range items {
+ mp[item.AcmeshName] = item
+ }
+ return mp
+}
+
+// Get returns a single provider by name
+func Get(provider string) (Provider, error) {
+ all := GetAll()
+ if item, found := all[provider]; found {
+ return item, nil
+ }
+ return Provider{}, errors.New("provider_not_found")
+}
+
+// GetAllSchemas returns a flat array with just the schemas
+func GetAllSchemas() []string {
+ items := List()
+ mp := make([]string, 0)
+ for _, item := range items {
+ mp = append(mp, item.Schema)
+ }
+ return mp
+}
+
+const commonKeySchema = `
+{
+ "type": "object",
+ "required": [
+ "api_key"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+// nolint: gosec
+const commonKeySecretSchema = `
+{
+ "type": "object",
+ "required": [
+ "api_key",
+ "secret"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ },
+ "secret": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
diff --git a/backend/internal/dnsproviders/dns_ad.go b/backend/internal/dnsproviders/dns_ad.go
new file mode 100644
index 00000000..f3c5e588
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ad.go
@@ -0,0 +1,17 @@
+package dnsproviders
+
+func getDNSAd() Provider {
+ return Provider{
+ AcmeshName: "dns_ad",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "API Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "AD_API_KEY",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_ali.go b/backend/internal/dnsproviders/dns_ali.go
new file mode 100644
index 00000000..d4906dfc
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ali.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSAli() Provider {
+ return Provider{
+ AcmeshName: "dns_ali",
+ Schema: commonKeySecretSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "Ali_Key",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret",
+ Type: "password",
+ MetaKey: "secret",
+ EnvKey: "Ali_Secret",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_aws.go b/backend/internal/dnsproviders/dns_aws.go
new file mode 100644
index 00000000..88139e88
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_aws.go
@@ -0,0 +1,56 @@
+package dnsproviders
+
+const route53Schema = `
+{
+ "type": "object",
+ "required": [
+ "access_key_id",
+ "access_key"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "access_key_id": {
+ "type": "string",
+ "minLength": 10
+ },
+ "access_key": {
+ "type": "string",
+ "minLength": 10
+ },
+ "slow_rate": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSAws() Provider {
+ return Provider{
+ AcmeshName: "dns_aws",
+ Schema: route53Schema,
+ Fields: []providerField{
+ {
+ Name: "Access Key ID",
+ Type: "text",
+ MetaKey: "access_key_id",
+ EnvKey: "AWS_ACCESS_KEY_ID",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret Access Key",
+ Type: "password",
+ MetaKey: "access_key",
+ EnvKey: "AWS_SECRET_ACCESS_KEY",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "Slow Rate",
+ Type: "number",
+ MetaKey: "slow_rate",
+ EnvKey: "AWS_DNS_SLOWRATE",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cf.go b/backend/internal/dnsproviders/dns_cf.go
new file mode 100644
index 00000000..dab20548
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cf.go
@@ -0,0 +1,80 @@
+package dnsproviders
+
+const cloudflareSchema = `
+{
+ "type": "object",
+ "required": [
+ "api_key",
+ "email",
+ "token",
+ "account_id"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ },
+ "email": {
+ "type": "string",
+ "minLength": 5
+ },
+ "token": {
+ "type": "string",
+ "minLength": 5
+ },
+ "account_id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "zone_id": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSCf() Provider {
+ return Provider{
+ AcmeshName: "dns_cf",
+ Schema: cloudflareSchema,
+ Fields: []providerField{
+ {
+ Name: "API Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "CF_Key",
+ IsRequired: true,
+ },
+ {
+ Name: "Email",
+ Type: "text",
+ MetaKey: "email",
+ EnvKey: "CF_Email",
+ IsRequired: true,
+ },
+ {
+ Name: "Token",
+ Type: "text",
+ MetaKey: "token",
+ EnvKey: "CF_Token",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "Account ID",
+ Type: "text",
+ MetaKey: "account_id",
+ EnvKey: "CF_Account_ID",
+ IsRequired: true,
+ },
+ {
+ Name: "Zone ID",
+ Type: "string",
+ MetaKey: "zone_id",
+ EnvKey: "CF_Zone_ID",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cloudns.go b/backend/internal/dnsproviders/dns_cloudns.go
new file mode 100644
index 00000000..7848a3a3
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cloudns.go
@@ -0,0 +1,54 @@
+package dnsproviders
+
+const clouDNSNetSchema = `
+{
+ "type": "object",
+ "required": [
+ "password"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "auth_id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "sub_auth_id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSCloudns() Provider {
+ return Provider{
+ AcmeshName: "dns_cloudns",
+ Schema: clouDNSNetSchema,
+ Fields: []providerField{
+ {
+ Name: "Auth ID",
+ Type: "text",
+ MetaKey: "auth_id",
+ EnvKey: "CLOUDNS_AUTH_ID",
+ },
+ {
+ Name: "Sub Auth ID",
+ Type: "text",
+ MetaKey: "sub_auth_id",
+ EnvKey: "CLOUDNS_SUB_AUTH_ID",
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "CLOUDNS_AUTH_PASSWORD",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cx.go b/backend/internal/dnsproviders/dns_cx.go
new file mode 100644
index 00000000..39d23256
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cx.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSCx() Provider {
+ return Provider{
+ AcmeshName: "dns_cx",
+ Schema: commonKeySecretSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "CX_Key",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret",
+ Type: "password",
+ MetaKey: "secret",
+ EnvKey: "CX_Secret",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cyon.go b/backend/internal/dnsproviders/dns_cyon.go
new file mode 100644
index 00000000..cacd25d2
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cyon.go
@@ -0,0 +1,57 @@
+package dnsproviders
+
+const cyonChSchema = `
+{
+ "type": "object",
+ "required": [
+ "user",
+ "password"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "user": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ },
+ "otp_secret": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSCyon() Provider {
+ return Provider{
+ AcmeshName: "dns_cyon",
+ Schema: cyonChSchema,
+ Fields: []providerField{
+ {
+ Name: "User",
+ Type: "text",
+ MetaKey: "user",
+ EnvKey: "CY_Username",
+ IsRequired: true,
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "CY_Password",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "OTP Secret",
+ Type: "password",
+ MetaKey: "otp_secret",
+ EnvKey: "CY_OTP_Secret",
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dgon.go b/backend/internal/dnsproviders/dns_dgon.go
new file mode 100644
index 00000000..0d8aaa1e
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dgon.go
@@ -0,0 +1,18 @@
+package dnsproviders
+
+func getDNSDgon() Provider {
+ return Provider{
+ AcmeshName: "dns_dgon",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "API Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "DO_API_KEY",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dnsimple.go b/backend/internal/dnsproviders/dns_dnsimple.go
new file mode 100644
index 00000000..7fb7e983
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dnsimple.go
@@ -0,0 +1,18 @@
+package dnsproviders
+
+func getDNSDNSimple() Provider {
+ return Provider{
+ AcmeshName: "dns_dnsimple",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "OAuth Token",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "DNSimple_OAUTH_TOKEN",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dp.go b/backend/internal/dnsproviders/dns_dp.go
new file mode 100644
index 00000000..0c0579f9
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dp.go
@@ -0,0 +1,46 @@
+package dnsproviders
+
+const dnsPodCnSchema = `
+{
+ "type": "object",
+ "required": [
+ "id",
+ "api_key"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSDp() Provider {
+ return Provider{
+ AcmeshName: "dns_dp",
+ Schema: dnsPodCnSchema,
+ Fields: []providerField{
+ {
+ Name: "ID",
+ Type: "text",
+ MetaKey: "id",
+ EnvKey: "DP_Id",
+ IsRequired: true,
+ },
+ {
+ Name: "Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "DP_Key",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_duckdns.go b/backend/internal/dnsproviders/dns_duckdns.go
new file mode 100644
index 00000000..f88e3cbe
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_duckdns.go
@@ -0,0 +1,18 @@
+package dnsproviders
+
+func getDNSDuckDNS() Provider {
+ return Provider{
+ AcmeshName: "dns_duckdns",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "Token",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "DuckDNS_Token",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dyn.go b/backend/internal/dnsproviders/dns_dyn.go
new file mode 100644
index 00000000..a3b75fe8
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dyn.go
@@ -0,0 +1,58 @@
+package dnsproviders
+
+const dynSchema = `
+{
+ "type": "object",
+ "required": [
+ "customer",
+ "username",
+ "password"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "customer": {
+ "type": "string",
+ "minLength": 1
+ },
+ "username": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSDyn() Provider {
+ return Provider{
+ AcmeshName: "dns_dyn",
+ Schema: dynSchema,
+ Fields: []providerField{
+ {
+ Name: "Customer",
+ Type: "text",
+ MetaKey: "customer",
+ EnvKey: "DYN_Customer",
+ IsRequired: true,
+ },
+ {
+ Name: "Username",
+ Type: "text",
+ MetaKey: "username",
+ EnvKey: "DYN_Username",
+ IsRequired: true,
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "DYN_Password",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dynu.go b/backend/internal/dnsproviders/dns_dynu.go
new file mode 100644
index 00000000..468b7f8b
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dynu.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSDynu() Provider {
+ return Provider{
+ AcmeshName: "dns_dynu",
+ Schema: commonKeySecretSchema,
+ Fields: []providerField{
+ {
+ Name: "Client ID",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "Dynu_ClientId",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret",
+ Type: "password",
+ MetaKey: "secret",
+ EnvKey: "Dynu_Secret",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_freedns.go b/backend/internal/dnsproviders/dns_freedns.go
new file mode 100644
index 00000000..3c56069d
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_freedns.go
@@ -0,0 +1,46 @@
+package dnsproviders
+
+const freeDNSSchema = `
+{
+ "type": "object",
+ "required": [
+ "user",
+ "password"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "user": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSFreeDNS() Provider {
+ return Provider{
+ AcmeshName: "dns_freedns",
+ Schema: freeDNSSchema,
+ Fields: []providerField{
+ {
+ Name: "User",
+ Type: "text",
+ MetaKey: "user",
+ EnvKey: "FREEDNS_User",
+ IsRequired: true,
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "FREEDNS_Password",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_gandi_livedns.go b/backend/internal/dnsproviders/dns_gandi_livedns.go
new file mode 100644
index 00000000..214b0619
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_gandi_livedns.go
@@ -0,0 +1,17 @@
+package dnsproviders
+
+func getDNSGandiLiveDNS() Provider {
+ return Provider{
+ AcmeshName: "dns_gandi_livedns",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "GANDI_LIVEDNS_KEY",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_gd.go b/backend/internal/dnsproviders/dns_gd.go
new file mode 100644
index 00000000..0429c73b
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_gd.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSGd() Provider {
+ return Provider{
+ AcmeshName: "dns_gd",
+ Schema: commonKeySecretSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "GD_Key",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret",
+ Type: "password",
+ MetaKey: "secret",
+ EnvKey: "GD_Secret",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_he.go b/backend/internal/dnsproviders/dns_he.go
new file mode 100644
index 00000000..dc6911e2
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_he.go
@@ -0,0 +1,47 @@
+package dnsproviders
+
+// nolint: gosec
+const commonUserPassSchema = `
+{
+ "type": "object",
+ "required": [
+ "username",
+ "password"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "username": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSHe() Provider {
+ return Provider{
+ AcmeshName: "dns_he",
+ Schema: commonUserPassSchema,
+ Fields: []providerField{
+ {
+ Name: "Username",
+ Type: "text",
+ MetaKey: "username",
+ EnvKey: "HE_Username",
+ IsRequired: true,
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "HE_Password",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_infoblox.go b/backend/internal/dnsproviders/dns_infoblox.go
new file mode 100644
index 00000000..97368ca3
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_infoblox.go
@@ -0,0 +1,46 @@
+package dnsproviders
+
+const infobloxSchema = `
+{
+ "type": "object",
+ "required": [
+ "credentials",
+ "server"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "credentials": {
+ "type": "string",
+ "minLength": 1
+ },
+ "server": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSInfoblox() Provider {
+ return Provider{
+ AcmeshName: "dns_infoblox",
+ Schema: infobloxSchema,
+ Fields: []providerField{
+ {
+ Name: "Credentials",
+ Type: "text",
+ MetaKey: "credentials",
+ EnvKey: "Infoblox_Creds",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "Server",
+ Type: "text",
+ MetaKey: "server",
+ EnvKey: "Infoblox_Server",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_ispconfig.go b/backend/internal/dnsproviders/dns_ispconfig.go
new file mode 100644
index 00000000..00c1dd1a
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ispconfig.go
@@ -0,0 +1,67 @@
+package dnsproviders
+
+const ispConfigSchema = `
+{
+ "type": "object",
+ "required": [
+ "user",
+ "password",
+ "api_url"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "user": {
+ "type": "string",
+ "minLength": 1
+ },
+ "password": {
+ "type": "string",
+ "minLength": 1
+ },
+ "api_url": {
+ "type": "string",
+ "minLength": 1
+ },
+ "insecure": {
+ "type": "string"
+ }
+ }
+}
+`
+
+func getDNSIspconfig() Provider {
+ return Provider{
+ AcmeshName: "dns_ispconfig",
+ Schema: ispConfigSchema,
+ Fields: []providerField{
+ {
+ Name: "User",
+ Type: "text",
+ MetaKey: "user",
+ EnvKey: "ISPC_User",
+ IsRequired: true,
+ },
+ {
+ Name: "Password",
+ Type: "password",
+ MetaKey: "password",
+ EnvKey: "ISPC_Password",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "API URL",
+ Type: "text",
+ MetaKey: "api_url",
+ EnvKey: "ISPC_Api",
+ IsRequired: true,
+ },
+ {
+ Name: "Insecure",
+ Type: "bool",
+ MetaKey: "insecure",
+ EnvKey: "ISPC_Api_Insecure",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_linode_v4.go b/backend/internal/dnsproviders/dns_linode_v4.go
new file mode 100644
index 00000000..b79ff532
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_linode_v4.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+// Note: https://github.com/acmesh-official/acme.sh/wiki/dnsapi#14-use-linode-domain-api
+// needs 15 minute sleep, not currently implemented
+func getDNSLinodeV4() Provider {
+ return Provider{
+ AcmeshName: "dns_linode_v4",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "API Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "LINODE_V4_API_KEY",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_lua.go b/backend/internal/dnsproviders/dns_lua.go
new file mode 100644
index 00000000..6c24a456
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_lua.go
@@ -0,0 +1,46 @@
+package dnsproviders
+
+const luaDNSSchema = `
+{
+ "type": "object",
+ "required": [
+ "api_key",
+ "email"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ },
+ "email": {
+ "type": "string",
+ "minLength": 5
+ }
+ }
+}
+`
+
+func getDNSLua() Provider {
+ return Provider{
+ AcmeshName: "dns_lua",
+ Schema: luaDNSSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "LUA_Key",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "Email",
+ Type: "text",
+ MetaKey: "email",
+ EnvKey: "LUA_Email",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_me.go b/backend/internal/dnsproviders/dns_me.go
new file mode 100644
index 00000000..bf888ca5
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_me.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSMe() Provider {
+ return Provider{
+ AcmeshName: "dns_me",
+ Schema: commonKeySecretSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "text",
+ MetaKey: "api_key",
+ EnvKey: "ME_Key",
+ IsRequired: true,
+ },
+ {
+ Name: "Secret",
+ Type: "password",
+ MetaKey: "secret",
+ EnvKey: "ME_Secret",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_namecom.go b/backend/internal/dnsproviders/dns_namecom.go
new file mode 100644
index 00000000..5d040d92
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_namecom.go
@@ -0,0 +1,46 @@
+package dnsproviders
+
+const nameComSchema = `
+{
+ "type": "object",
+ "required": [
+ "username",
+ "token"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "username": {
+ "type": "string",
+ "minLength": 1
+ },
+ "token": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSNamecom() Provider {
+ return Provider{
+ AcmeshName: "dns_namecom",
+ Schema: nameComSchema,
+ Fields: []providerField{
+ {
+ Name: "Username",
+ Type: "text",
+ MetaKey: "username",
+ EnvKey: "Namecom_Username",
+ IsRequired: true,
+ },
+ {
+ Name: "Token",
+ Type: "text",
+ MetaKey: "token",
+ EnvKey: "Namecom_Token",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_nsone.go b/backend/internal/dnsproviders/dns_nsone.go
new file mode 100644
index 00000000..b02a1788
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_nsone.go
@@ -0,0 +1,18 @@
+package dnsproviders
+
+func getDNSOne() Provider {
+ return Provider{
+ AcmeshName: "dns_nsone",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "NS1_Key",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_pdns.go b/backend/internal/dnsproviders/dns_pdns.go
new file mode 100644
index 00000000..35db50ee
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_pdns.go
@@ -0,0 +1,69 @@
+package dnsproviders
+
+const powerDNSSchema = `
+{
+ "type": "object",
+ "required": [
+ "url",
+ "server_id",
+ "token",
+ "ttl"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "url": {
+ "type": "string",
+ "minLength": 1
+ },
+ "server_id": {
+ "type": "string",
+ "minLength": 1
+ },
+ "token": {
+ "type": "string",
+ "minLength": 1
+ },
+ "ttl": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSPDNS() Provider {
+ return Provider{
+ AcmeshName: "dns_pdns",
+ Schema: powerDNSSchema,
+ Fields: []providerField{
+ {
+ Name: "URL",
+ Type: "text",
+ MetaKey: "url",
+ EnvKey: "PDNS_Url",
+ IsRequired: true,
+ },
+ {
+ Name: "Server ID",
+ Type: "text",
+ MetaKey: "server_id",
+ EnvKey: "PDNS_ServerId",
+ IsRequired: true,
+ },
+ {
+ Name: "Token",
+ Type: "text",
+ MetaKey: "token",
+ EnvKey: "PDNS_Token",
+ IsRequired: true,
+ },
+ {
+ Name: "TTL",
+ Type: "number",
+ MetaKey: "ttl",
+ EnvKey: "PDNS_Ttl",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_unoeuro.go b/backend/internal/dnsproviders/dns_unoeuro.go
new file mode 100644
index 00000000..8524a89b
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_unoeuro.go
@@ -0,0 +1,47 @@
+package dnsproviders
+
+const unoEuroSchema = `
+{
+ "type": "object",
+ "required": [
+ "api_key",
+ "user"
+ ],
+ "additionalProperties": false,
+ "properties": {
+ "api_key": {
+ "type": "string",
+ "minLength": 1
+ },
+ "user": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+}
+`
+
+func getDNSUnoeuro() Provider {
+ return Provider{
+ AcmeshName: "dns_unoeuro",
+ Schema: unoEuroSchema,
+ Fields: []providerField{
+ {
+ Name: "Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "UNO_Key",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ {
+ Name: "User",
+ Type: "text",
+ MetaKey: "user",
+ EnvKey: "UNO_User",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_vscale.go b/backend/internal/dnsproviders/dns_vscale.go
new file mode 100644
index 00000000..59463cf8
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_vscale.go
@@ -0,0 +1,17 @@
+package dnsproviders
+
+func getDNSVscale() Provider {
+ return Provider{
+ AcmeshName: "dns_vscale",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "API Key",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "VSCALE_API_KEY",
+ IsRequired: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_yandex.go b/backend/internal/dnsproviders/dns_yandex.go
new file mode 100644
index 00000000..7812069e
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_yandex.go
@@ -0,0 +1,18 @@
+package dnsproviders
+
+func getDNSYandex() Provider {
+ return Provider{
+ AcmeshName: "dns_yandex",
+ Schema: commonKeySchema,
+ Fields: []providerField{
+ {
+ Name: "Token",
+ Type: "password",
+ MetaKey: "api_key",
+ EnvKey: "PDD_Token",
+ IsRequired: true,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/entity/auth/methods.go b/backend/internal/entity/auth/methods.go
new file mode 100644
index 00000000..9cfb7faa
--- /dev/null
+++ b/backend/internal/entity/auth/methods.go
@@ -0,0 +1,82 @@
+package auth
+
+import (
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+)
+
+// GetByID finds a auth by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByUserIDType finds a user by email
+func GetByUserIDType(userID int, authType string) (Model, error) {
+ var m Model
+ err := m.LoadByUserIDType(userID, authType)
+ return m, err
+}
+
+// Create will create a Auth from this model
+func Create(auth *Model) (int, error) {
+ if auth.ID != 0 {
+ return 0, goerrors.New("Cannot create auth when model already has an ID")
+ }
+
+ auth.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ type,
+ secret,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :type,
+ :secret,
+ :is_deleted
+ )`, auth)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a Auth from this model
+func Update(auth *Model) error {
+ if auth.ID == 0 {
+ return goerrors.New("Cannot update auth when model doesn't have an ID")
+ }
+
+ auth.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ user_id = :user_id,
+ type = :type,
+ secret = :secret,
+ is_deleted = :is_deleted
+ WHERE id = :id`, auth)
+
+ return err
+}
diff --git a/backend/internal/entity/auth/model.go b/backend/internal/entity/auth/model.go
new file mode 100644
index 00000000..b5640bbf
--- /dev/null
+++ b/backend/internal/entity/auth/model.go
@@ -0,0 +1,98 @@
+package auth
+
+import (
+ goerrors "errors"
+ "fmt"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/types"
+
+ "golang.org/x/crypto/bcrypt"
+)
+
+const (
+ tableName = "auth"
+
+ // TypePassword is the Password Type
+ TypePassword = "password"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id"`
+ UserID int `json:"user_id" db:"user_id"`
+ Type string `json:"type" db:"type"`
+ Secret string `json:"secret,omitempty" db:"secret"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? LIMIT 1", tableName)
+ params := []interface{}{id}
+ return m.getByQuery(query, params)
+}
+
+// LoadByUserIDType will load from an ID
+func (m *Model) LoadByUserIDType(userID int, authType string) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE user_id = ? AND type = ? LIMIT 1", tableName)
+ params := []interface{}{userID, authType}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// SetPassword will generate a hashed password based on given string
+func (m *Model) SetPassword(password string) error {
+ hash, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.MinCost+2)
+ if err != nil {
+ return err
+ }
+
+ m.Type = TypePassword
+ m.Secret = string(hash)
+
+ return nil
+}
+
+// ValidateSecret will check if a given secret matches the encrypted secret
+func (m *Model) ValidateSecret(secret string) error {
+ if m.Type != TypePassword {
+ return goerrors.New("Could not validate Secret, auth type is not a Password")
+ }
+
+ err := bcrypt.CompareHashAndPassword([]byte(m.Secret), []byte(secret))
+ if err != nil {
+ return goerrors.New("Invalid Password")
+ }
+
+ return nil
+}
diff --git a/backend/internal/entity/certificate/filters.go b/backend/internal/entity/certificate/filters.go
new file mode 100644
index 00000000..1c034c15
--- /dev/null
+++ b/backend/internal/entity/certificate/filters.go
@@ -0,0 +1,25 @@
+package certificate
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/certificate/methods.go b/backend/internal/entity/certificate/methods.go
new file mode 100644
index 00000000..e24181be
--- /dev/null
+++ b/backend/internal/entity/certificate/methods.go
@@ -0,0 +1,174 @@
+package certificate
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// Create will create a row from this model
+func Create(certificate *Model) (int, error) {
+ if certificate.ID != 0 {
+ return 0, goerrors.New("Cannot create certificate when model already has an ID")
+ }
+
+ certificate.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ type,
+ certificate_authority_id,
+ dns_provider_id,
+ name,
+ domain_names,
+ expires_on,
+ status,
+ meta,
+ is_ecc,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :type,
+ :certificate_authority_id,
+ :dns_provider_id,
+ :name,
+ :domain_names,
+ :expires_on,
+ :status,
+ :meta,
+ :is_ecc,
+ :is_deleted
+ )`, certificate)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a Auth from this model
+func Update(certificate *Model) error {
+ if certificate.ID == 0 {
+ return goerrors.New("Cannot update certificate when model doesn't have an ID")
+ }
+
+ certificate.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ type = :type,
+ user_id = :user_id,
+ certificate_authority_id = :certificate_authority_id,
+ dns_provider_id = :dns_provider_id,
+ name = :name,
+ domain_names = :domain_names,
+ expires_on = :expires_on,
+ status = :status,
+ meta = :meta,
+ is_ecc = :is_ecc,
+ is_deleted = :is_deleted
+ WHERE id = :id`, certificate)
+
+ return err
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
+
+// GetByStatus will select rows that are ready for requesting
+func GetByStatus(status string) ([]Model, error) {
+ models := make([]Model, 0)
+ db := database.GetInstance()
+
+ query := fmt.Sprintf(`
+ SELECT
+ t.*
+ FROM "%s" t
+ INNER JOIN "certificate_authority" c ON c."id" = t."certificate_authority_id"
+ WHERE
+ t."type" IN ("http", "dns") AND
+ t."status" = ? AND
+ t."certificate_authority_id" > 0 AND
+ t."is_deleted" = 0
+ `, tableName)
+
+ params := []interface{}{StatusReady}
+ err := db.Select(&models, query, params...)
+ if err != nil && err != sql.ErrNoRows {
+ logger.Error("GetByStatusError", err)
+ logger.Debug("Query: %s -- %+v", query, params)
+ }
+
+ return models, err
+}
diff --git a/backend/internal/entity/certificate/model.go b/backend/internal/entity/certificate/model.go
new file mode 100644
index 00000000..556e1699
--- /dev/null
+++ b/backend/internal/entity/certificate/model.go
@@ -0,0 +1,266 @@
+package certificate
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "npm/internal/acme"
+ "npm/internal/config"
+ "npm/internal/database"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/logger"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "certificate"
+
+ // TypeCustom custom cert type
+ TypeCustom = "custom"
+ // TypeHTTP http cert type
+ TypeHTTP = "http"
+ // TypeDNS dns cert type
+ TypeDNS = "dns"
+ // TypeMkcert mkcert cert type
+ TypeMkcert = "mkcert"
+
+ // StatusReady is ready for certificate to be requested
+ StatusReady = "ready"
+ // StatusRequesting is process of being requested
+ StatusRequesting = "requesting"
+ // StatusFailed is a certicifate that failed to request
+ StatusFailed = "failed"
+ // StatusProvided is a certificate provided and ready for actual use
+ StatusProvided = "provided"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ ExpiresOn types.NullableDBDate `json:"expires_on" db:"expires_on" filter:"expires_on,integer"`
+ Type string `json:"type" db:"type" filter:"type,string"`
+ UserID int `json:"user_id" db:"user_id" filter:"user_id,integer"`
+ CertificateAuthorityID int `json:"certificate_authority_id" db:"certificate_authority_id" filter:"certificate_authority_id,integer"`
+ DNSProviderID int `json:"dns_provider_id" db:"dns_provider_id" filter:"dns_provider_id,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ DomainNames types.JSONB `json:"domain_names" db:"domain_names" filter:"domain_names,string"`
+ Status string `json:"status" db:"status" filter:"status,string"`
+ ErrorMessage string `json:"error_message" db:"error_message" filter:"error_message,string"`
+ Meta types.JSONB `json:"-" db:"meta"`
+ IsECC int `json:"is_ecc" db:"is_ecc" filter:"is_ecc,integer"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+ // Expansions:
+ CertificateAuthority *certificateauthority.Model `json:"certificate_authority,omitempty"`
+ DNSProvider *dnsprovider.Model `json:"dns_provider,omitempty"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.UserID == 0 {
+ return fmt.Errorf("User ID must be specified")
+ }
+
+ if !m.Validate() {
+ return fmt.Errorf("Certificate data is incorrect or incomplete for this type")
+ }
+
+ if !m.ValidateWildcardSupport() {
+ return fmt.Errorf("Cannot use Wildcard domains with this CA")
+ }
+
+ m.setDefaultStatus()
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a certificate as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
+
+// Validate will make sure the data given is expected. This object is a bit complicated,
+// as there could be multiple combinations of values.
+func (m *Model) Validate() bool {
+ switch m.Type {
+ case TypeCustom:
+ // TODO: make sure meta contains required fields
+ return m.DNSProviderID == 0 && m.CertificateAuthorityID == 0
+
+ case TypeHTTP:
+ return m.DNSProviderID == 0 && m.CertificateAuthorityID > 0
+
+ case TypeDNS:
+ return m.DNSProviderID > 0 && m.CertificateAuthorityID > 0
+
+ case TypeMkcert:
+ return true
+
+ default:
+ return false
+ }
+}
+
+// ValidateWildcardSupport will ensure that the CA given supports wildcards,
+// only if the domains on this object have at least 1 wildcard
+func (m *Model) ValidateWildcardSupport() bool {
+ domains, err := m.DomainNames.AsStringArray()
+ if err != nil {
+ logger.Error("ValidateWildcardSupportError", err)
+ return false
+ }
+
+ hasWildcard := false
+ for _, domain := range domains {
+ if strings.Contains(domain, "*") {
+ hasWildcard = true
+ }
+ }
+
+ if hasWildcard {
+ m.Expand()
+ if !m.CertificateAuthority.IsWildcardSupported {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (m *Model) setDefaultStatus() {
+ if m.ID == 0 {
+ // It's a new certificate
+ if m.Type == TypeCustom {
+ m.Status = StatusProvided
+ } else {
+ m.Status = StatusReady
+ }
+ }
+}
+
+// Expand will populate attached objects for the model
+func (m *Model) Expand() {
+ if m.CertificateAuthorityID > 0 {
+ certificateAuthority, _ := certificateauthority.GetByID(m.CertificateAuthorityID)
+ m.CertificateAuthority = &certificateAuthority
+ }
+ if m.DNSProviderID > 0 {
+ dnsProvider, _ := dnsprovider.GetByID(m.DNSProviderID)
+ m.DNSProvider = &dnsProvider
+ }
+}
+
+// GetCertificateLocations will return the paths on disk where the SSL
+// certs should or would be.
+// Returns: (key, fullchain, certFolder)
+func (m *Model) GetCertificateLocations() (string, string, string) {
+ if m.ID == 0 {
+ logger.Error("GetCertificateLocationsError", errors.New("GetCertificateLocations called before certificate was saved"))
+ return "", "", ""
+ }
+
+ certFolder := fmt.Sprintf("%s/certificates", config.Configuration.DataFolder)
+
+ // Generate a unique folder name for this cert
+ m1 := regexp.MustCompile(`[^A-Za-z0-9\.]`)
+
+ niceName := m1.ReplaceAllString(m.Name, "_")
+ if len(niceName) > 20 {
+ niceName = niceName[:20]
+ }
+ folderName := fmt.Sprintf("%d-%s", m.ID, niceName)
+
+ return fmt.Sprintf("%s/%s/key.pem", certFolder, folderName),
+ fmt.Sprintf("%s/%s/fullchain.pem", certFolder, folderName),
+ fmt.Sprintf("%s/%s", certFolder, folderName)
+}
+
+// Request makes a certificate request
+func (m *Model) Request() error {
+ logger.Info("Requesting certificate for: #%d %v", m.ID, m.Name)
+
+ m.Expand()
+ m.Status = StatusRequesting
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+
+ // do request
+ domains, err := m.DomainNames.AsStringArray()
+ if err != nil {
+ logger.Error("CertificateRequestError", err)
+ return err
+ }
+
+ certKeyFile, certFullchainFile, certFolder := m.GetCertificateLocations()
+
+ // ensure certFolder is created
+ if err := os.MkdirAll(certFolder, os.ModePerm); err != nil {
+ logger.Error("CreateFolderError", err)
+ return err
+ }
+
+ errMsg, err := acme.RequestCert(domains, m.Type, certFullchainFile, certKeyFile, m.DNSProvider, m.CertificateAuthority, true)
+ if err != nil {
+ m.Status = StatusFailed
+ m.ErrorMessage = errMsg
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+ return nil
+ }
+
+ // If done
+ m.Status = StatusProvided
+ t := time.Now()
+ m.ExpiresOn.Time = &t // todo
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+
+ logger.Info("Request for certificate for: #%d %v was completed", m.ID, m.Name)
+ return nil
+}
diff --git a/backend/internal/entity/certificate/structs.go b/backend/internal/entity/certificate/structs.go
new file mode 100644
index 00000000..a9b99369
--- /dev/null
+++ b/backend/internal/entity/certificate/structs.go
@@ -0,0 +1,15 @@
+package certificate
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for users list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/certificateauthority/filters.go b/backend/internal/entity/certificateauthority/filters.go
new file mode 100644
index 00000000..a16ba01e
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/filters.go
@@ -0,0 +1,25 @@
+package certificateauthority
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/certificateauthority/methods.go b/backend/internal/entity/certificateauthority/methods.go
new file mode 100644
index 00000000..28941e21
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/methods.go
@@ -0,0 +1,134 @@
+package certificateauthority
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// Create will create a row from this model
+func Create(ca *Model) (int, error) {
+ if ca.ID != 0 {
+ return 0, goerrors.New("Cannot create certificate authority when model already has an ID")
+ }
+
+ ca.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ name,
+ acmesh_server,
+ ca_bundle,
+ max_domains,
+ is_wildcard_supported,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :name,
+ :acmesh_server,
+ :ca_bundle,
+ :max_domains,
+ :is_wildcard_supported,
+ :is_deleted
+ )`, ca)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a row from this model
+func Update(ca *Model) error {
+ if ca.ID == 0 {
+ return goerrors.New("Cannot update certificate authority when model doesn't have an ID")
+ }
+
+ ca.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ name = :name,
+ acmesh_server = :acmesh_server,
+ ca_bundle = :ca_bundle,
+ max_domains = :max_domains,
+ is_wildcard_supported = :is_wildcard_supported,
+ is_deleted = :is_deleted
+ WHERE id = :id`, ca)
+
+ return err
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Error("ListCertificateAuthoritiesError", queryErr)
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Error("ListCertificateAuthoritiesError", err)
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/certificateauthority/model.go b/backend/internal/entity/certificateauthority/model.go
new file mode 100644
index 00000000..e5817d0a
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/model.go
@@ -0,0 +1,88 @@
+package certificateauthority
+
+import (
+ goerrors "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/errors"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "certificate_authority"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ AcmeshServer string `json:"acmesh_server" db:"acmesh_server" filter:"acmesh_server,string"`
+ CABundle string `json:"ca_bundle" db:"ca_bundle" filter:"ca_bundle,string"`
+ MaxDomains int `json:"max_domains" db:"max_domains" filter:"max_domains,integer"`
+ IsWildcardSupported bool `json:"is_wildcard_supported" db:"is_wildcard_supported" filter:"is_wildcard_supported,boolean"`
+ IsReadonly bool `json:"is_readonly" db:"is_readonly" filter:"is_readonly,boolean"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a certificate as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
+
+// Check will ensure the ca bundle path exists if it's set
+func (m *Model) Check() error {
+ var err error
+
+ if m.CABundle != "" {
+ if _, fileerr := os.Stat(filepath.Clean(m.CABundle)); goerrors.Is(fileerr, os.ErrNotExist) {
+ err = errors.ErrCABundleDoesNotExist
+ }
+ }
+
+ return err
+}
diff --git a/backend/internal/entity/certificateauthority/structs.go b/backend/internal/entity/certificateauthority/structs.go
new file mode 100644
index 00000000..85e3521a
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/structs.go
@@ -0,0 +1,15 @@
+package certificateauthority
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for users list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/dnsprovider/filters.go b/backend/internal/entity/dnsprovider/filters.go
new file mode 100644
index 00000000..30ab6c52
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/filters.go
@@ -0,0 +1,25 @@
+package dnsprovider
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/dnsprovider/methods.go b/backend/internal/entity/dnsprovider/methods.go
new file mode 100644
index 00000000..c1a1e0e1
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/methods.go
@@ -0,0 +1,134 @@
+package dnsprovider
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// Create will create a row from this model
+func Create(provider *Model) (int, error) {
+ if provider.ID != 0 {
+ return 0, goerrors.New("Cannot create dns provider when model already has an ID")
+ }
+
+ provider.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ name,
+ acmesh_name,
+ dns_sleep,
+ meta,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :name,
+ :acmesh_name,
+ :dns_sleep,
+ :meta,
+ :is_deleted
+ )`, provider)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a row from this model
+func Update(provider *Model) error {
+ if provider.ID == 0 {
+ return goerrors.New("Cannot update dns provider when model doesn't have an ID")
+ }
+
+ provider.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ user_id = :user_id,
+ name = :name,
+ acmesh_name = :acmesh_name,
+ dns_sleep = :dns_sleep,
+ meta = :meta,
+ is_deleted = :is_deleted
+ WHERE id = :id`, provider)
+
+ return err
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Error("ListDnsProvidersError", queryErr)
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Error("ListDnsProvidersError", err)
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/dnsprovider/model.go b/backend/internal/entity/dnsprovider/model.go
new file mode 100644
index 00000000..34253bea
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/model.go
@@ -0,0 +1,101 @@
+package dnsprovider
+
+import (
+ "fmt"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/dnsproviders"
+ "npm/internal/logger"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "dns_provider"
+)
+
+// Model is the user model
+// Also see: https://github.com/acmesh-official/acme.sh/wiki/dnscheck
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ UserID int `json:"user_id" db:"user_id" filter:"user_id,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ AcmeshName string `json:"acmesh_name" db:"acmesh_name" filter:"acmesh_name,string"`
+ DNSSleep int `json:"dns_sleep" db:"dns_sleep" filter:"dns_sleep,integer"`
+ Meta types.JSONB `json:"meta" db:"meta"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.UserID == 0 {
+ return fmt.Errorf("User ID must be specified")
+ }
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a certificate as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
+
+// GetAcmeShEnvVars returns the env vars required for acme.sh dns cert requests
+func (m *Model) GetAcmeShEnvVars() ([]string, error) {
+ logger.Debug("GetAcmeShEnvVars for: %s", m.AcmeshName)
+ // First, fetch the provider obj with this AcmeShName
+ acmeDNSProvider, err := dnsproviders.Get(m.AcmeshName)
+ logger.Debug("acmeDNSProvider: %+v", acmeDNSProvider)
+ if err != nil {
+ logger.Error("GetAcmeShEnvVarsError", err)
+ return nil, err
+ }
+
+ envs := make([]string, 0)
+
+ // Then, using the meta, convert to env vars
+ envPairs := acmeDNSProvider.GetAcmeEnvVars(m.Meta.Decoded)
+ logger.Debug("meta: %+v", m.Meta)
+ logger.Debug("envPairs: %+v", envPairs)
+ for envName, envValue := range envPairs {
+ envs = append(envs, fmt.Sprintf(`%s=%v`, envName, envValue))
+ }
+
+ return envs, nil
+}
diff --git a/backend/internal/entity/dnsprovider/model_test.go b/backend/internal/entity/dnsprovider/model_test.go
new file mode 100644
index 00000000..5c4fef28
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/model_test.go
@@ -0,0 +1,82 @@
+package dnsprovider
+
+import (
+ "encoding/json"
+ "npm/internal/types"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestModelGetAcmeShEnvVars(t *testing.T) {
+ type want struct {
+ envs []string
+ err error
+ }
+
+ tests := []struct {
+ name string
+ dnsProvider Model
+ metaJSON string
+ want want
+ }{
+ {
+ name: "dns_aws",
+ dnsProvider: Model{
+ AcmeshName: "dns_aws",
+ },
+ metaJSON: `{"access_key_id":"sdfsdfsdfljlbjkljlkjsdfoiwje","access_key":"xxxxxxx"}`,
+ want: want{
+ envs: []string{
+ `AWS_ACCESS_KEY_ID=sdfsdfsdfljlbjkljlkjsdfoiwje`,
+ `AWS_SECRET_ACCESS_KEY=xxxxxxx`,
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns_cf",
+ dnsProvider: Model{
+ AcmeshName: "dns_cf",
+ },
+ metaJSON: `{"api_key":"sdfsdfsdfljlbjkljlkjsdfoiwje","email":"me@example.com","token":"dkfjghdk","account_id":"hgbdjfg","zone_id":"ASDASD"}`,
+ want: want{
+ envs: []string{
+ `CF_Token=dkfjghdk`,
+ `CF_Account_ID=hgbdjfg`,
+ `CF_Zone_ID=ASDASD`,
+ `CF_Key=sdfsdfsdfljlbjkljlkjsdfoiwje`,
+ `CF_Email=me@example.com`,
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns_duckdns",
+ dnsProvider: Model{
+ AcmeshName: "dns_duckdns",
+ },
+ metaJSON: `{"api_key":"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"}`,
+ want: want{
+ envs: []string{
+ `DuckDNS_Token=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`,
+ },
+ err: nil,
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var meta types.JSONB
+ err := json.Unmarshal([]byte(tt.metaJSON), &meta.Decoded)
+ assert.Equal(t, nil, err)
+ tt.dnsProvider.Meta = meta
+ envs, err := tt.dnsProvider.GetAcmeShEnvVars()
+ assert.Equal(t, tt.want.err, err)
+ for _, i := range tt.want.envs {
+ assert.Contains(t, envs, i)
+ }
+ })
+ }
+}
diff --git a/backend/internal/entity/dnsprovider/structs.go b/backend/internal/entity/dnsprovider/structs.go
new file mode 100644
index 00000000..835c947b
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/structs.go
@@ -0,0 +1,15 @@
+package dnsprovider
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for the list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/filters.go b/backend/internal/entity/filters.go
new file mode 100644
index 00000000..9709fa16
--- /dev/null
+++ b/backend/internal/entity/filters.go
@@ -0,0 +1,158 @@
+package entity
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "npm/internal/model"
+)
+
+// FilterMapFunction is a filter map function
+type FilterMapFunction func(value []string) []string
+
+// FilterTagName tag name user for filter pickups
+const FilterTagName = "filter"
+
+// DBTagName tag name user for field name pickups
+const DBTagName = "db"
+
+// GenerateSQLFromFilters will return a Query and params for use as WHERE clause in SQL queries
+// This will use a AND where clause approach.
+func GenerateSQLFromFilters(filters []model.Filter, fieldMap map[string]string, fieldMapFunctions map[string]FilterMapFunction) (string, []interface{}) {
+ clauses := make([]string, 0)
+ params := make([]interface{}, 0)
+
+ for _, filter := range filters {
+ // Lookup this filter field from the functions map
+ if _, ok := fieldMapFunctions[filter.Field]; ok {
+ filter.Value = fieldMapFunctions[filter.Field](filter.Value)
+ }
+
+ // Lookup this filter field from the name map
+ if _, ok := fieldMap[filter.Field]; ok {
+ filter.Field = fieldMap[filter.Field]
+ }
+
+ // Special case for LIKE queries, the column needs to be uppercase for comparison
+ fieldName := fmt.Sprintf("`%s`", filter.Field)
+ if strings.ToLower(filter.Modifier) == "contains" || strings.ToLower(filter.Modifier) == "starts" || strings.ToLower(filter.Modifier) == "ends" {
+ fieldName = fmt.Sprintf("UPPER(`%s`)", filter.Field)
+ }
+
+ clauses = append(clauses, fmt.Sprintf("%s %s", fieldName, getSQLAssignmentFromModifier(filter, ¶ms)))
+ }
+
+ return strings.Join(clauses, " AND "), params
+}
+
+func getSQLAssignmentFromModifier(filter model.Filter, params *[]interface{}) string {
+ var clause string
+
+ // Quick hacks
+ if filter.Modifier == "in" && len(filter.Value) == 1 {
+ filter.Modifier = "equals"
+ } else if filter.Modifier == "notin" && len(filter.Value) == 1 {
+ filter.Modifier = "not"
+ }
+
+ switch strings.ToLower(filter.Modifier) {
+ default:
+ clause = "= ?"
+ case "not":
+ clause = "!= ?"
+ case "min":
+ clause = ">= ?"
+ case "max":
+ clause = "<= ?"
+ case "greater":
+ clause = "> ?"
+ case "lesser":
+ clause = "< ?"
+
+ // LIKE modifiers:
+ case "contains":
+ *params = append(*params, strings.ToUpper(filter.Value[0]))
+ return "LIKE '%' || ? || '%'"
+ case "starts":
+ *params = append(*params, strings.ToUpper(filter.Value[0]))
+ return "LIKE ? || '%'"
+ case "ends":
+ *params = append(*params, strings.ToUpper(filter.Value[0]))
+ return "LIKE '%' || ?"
+
+ // Array parameter modifiers:
+ case "in":
+ s, p := buildInArray(filter.Value)
+ *params = append(*params, p...)
+ return fmt.Sprintf("IN (%s)", s)
+ case "notin":
+ s, p := buildInArray(filter.Value)
+ *params = append(*params, p...)
+ return fmt.Sprintf("NOT IN (%s)", s)
+ }
+
+ *params = append(*params, filter.Value[0])
+ return clause
+}
+
+// GetFilterMap returns the filter map
+func GetFilterMap(m interface{}) map[string]string {
+ var filterMap = make(map[string]string)
+
+ // TypeOf returns the reflection Type that represents the dynamic type of variable.
+ // If variable is a nil interface value, TypeOf returns nil.
+ t := reflect.TypeOf(m)
+
+ // Iterate over all available fields and read the tag value
+ for i := 0; i < t.NumField(); i++ {
+ // Get the field, returns https://golang.org/pkg/reflect/#StructField
+ field := t.Field(i)
+
+ // Get the field tag value
+ filterTag := field.Tag.Get(FilterTagName)
+ dbTag := field.Tag.Get(DBTagName)
+ if filterTag != "" && dbTag != "" && dbTag != "-" && filterTag != "-" {
+ // Filter tag can be a 2 part thing: name,type
+ // ie: account_id,integer
+ // So we need to split and use the first part
+ parts := strings.Split(filterTag, ",")
+ filterMap[parts[0]] = dbTag
+ filterMap[filterTag] = dbTag
+ }
+ }
+
+ return filterMap
+}
+
+// GetDBColumns returns the db columns
+func GetDBColumns(m interface{}) []string {
+ var columns []string
+ t := reflect.TypeOf(m)
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ dbTag := field.Tag.Get(DBTagName)
+ if dbTag != "" && dbTag != "-" {
+ columns = append(columns, dbTag)
+ }
+ }
+
+ return columns
+}
+
+func buildInArray(items []string) (string, []interface{}) {
+ // Query string placeholder
+ strs := make([]string, len(items))
+ for i := 0; i < len(items); i++ {
+ strs[i] = "?"
+ }
+
+ // Params as interface
+ params := make([]interface{}, len(items))
+ for i, v := range items {
+ params[i] = v
+ }
+
+ return strings.Join(strs, ", "), params
+}
diff --git a/backend/internal/entity/filters_schema.go b/backend/internal/entity/filters_schema.go
new file mode 100644
index 00000000..9686293b
--- /dev/null
+++ b/backend/internal/entity/filters_schema.go
@@ -0,0 +1,223 @@
+package entity
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+// GetFilterSchema creates a jsonschema for validating filters, based on the model
+// object given and by reading the struct "filter" tags.
+func GetFilterSchema(m interface{}) string {
+ var schemas []string
+ t := reflect.TypeOf(m)
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ filterTag := field.Tag.Get(FilterTagName)
+
+ if filterTag != "" && filterTag != "-" {
+ // split out tag value "field,filtreType"
+ // with a default filter type of string
+ items := strings.Split(filterTag, ",")
+ if len(items) == 1 {
+ items = append(items, "string")
+ }
+
+ switch items[1] {
+ case "int":
+ fallthrough
+ case "integer":
+ schemas = append(schemas, intFieldSchema(items[0]))
+ case "bool":
+ fallthrough
+ case "boolean":
+ schemas = append(schemas, boolFieldSchema(items[0]))
+ case "date":
+ schemas = append(schemas, dateFieldSchema(items[0]))
+ case "regex":
+ if len(items) < 3 {
+ items = append(items, ".*")
+ }
+ schemas = append(schemas, regexFieldSchema(items[0], items[2]))
+
+ default:
+ schemas = append(schemas, stringFieldSchema(items[0]))
+ }
+ }
+ }
+
+ return newFilterSchema(schemas)
+}
+
+// newFilterSchema is the main method to specify a new Filter Schema for use in Middleware
+func newFilterSchema(fieldSchemas []string) string {
+ return fmt.Sprintf(baseFilterSchema, strings.Join(fieldSchemas, ", "))
+}
+
+// boolFieldSchema returns the Field Schema for a Boolean accepted value field
+func boolFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, boolModifiers, filterBool, filterBool)
+}
+
+// intFieldSchema returns the Field Schema for a Integer accepted value field
+func intFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+// stringFieldSchema returns the Field Schema for a String accepted value field
+func stringFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, filterString, filterString)
+}
+
+// regexFieldSchema returns the Field Schema for a String accepted value field matching a Regex
+func regexFieldSchema(fieldName string, regex string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "%s"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "%s"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, regex, regex)
+}
+
+// dateFieldSchema returns the Field Schema for a String accepted value field matching a Date format
+func dateFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+const allModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin|min|max|greater|less)$"
+}`
+
+const boolModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not)$"
+}`
+
+const stringModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin)$"
+}`
+
+const filterBool = `{
+ "type": "string",
+ "pattern": "^(TRUE|true|t|yes|y|on|1|FALSE|f|false|n|no|off|0)$"
+}`
+
+const filterString = `{
+ "type": "string",
+ "minLength": 1
+}`
+
+const baseFilterSchema = `{
+ "type": "array",
+ "items": {
+ "oneOf": [
+ %s
+ ]
+ }
+}`
diff --git a/backend/internal/entity/host/filters.go b/backend/internal/entity/host/filters.go
new file mode 100644
index 00000000..7b023917
--- /dev/null
+++ b/backend/internal/entity/host/filters.go
@@ -0,0 +1,25 @@
+package host
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/host/methods.go b/backend/internal/entity/host/methods.go
new file mode 100644
index 00000000..ddf64dc1
--- /dev/null
+++ b/backend/internal/entity/host/methods.go
@@ -0,0 +1,183 @@
+package host
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a Host by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// create will create a Host from this model
+func create(host *Model) (int, error) {
+ if host.ID != 0 {
+ return 0, goerrors.New("Cannot create host when model already has an ID")
+ }
+
+ host.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ type,
+ host_template_id,
+ listen_interface,
+ domain_names,
+ upstream_id,
+ certificate_id,
+ access_list_id,
+ ssl_forced,
+ caching_enabled,
+ block_exploits,
+ allow_websocket_upgrade,
+ http2_support,
+ hsts_enabled,
+ hsts_subdomains,
+ paths,
+ upstream_options,
+ advanced_config,
+ is_disabled,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :type,
+ :host_template_id,
+ :listen_interface,
+ :domain_names,
+ :upstream_id,
+ :certificate_id,
+ :access_list_id,
+ :ssl_forced,
+ :caching_enabled,
+ :block_exploits,
+ :allow_websocket_upgrade,
+ :http2_support,
+ :hsts_enabled,
+ :hsts_subdomains,
+ :paths,
+ :upstream_options,
+ :advanced_config,
+ :is_disabled,
+ :is_deleted
+ )`, host)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// update will Update a Host from this model
+func update(host *Model) error {
+ if host.ID == 0 {
+ return goerrors.New("Cannot update host when model doesn't have an ID")
+ }
+
+ host.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ user_id = :user_id,
+ type = :type,
+ host_template_id = :host_template_id,
+ listen_interface = :listen_interface,
+ domain_names = :domain_names,
+ upstream_id = :upstream_id,
+ certificate_id = :certificate_id,
+ access_list_id = :access_list_id,
+ ssl_forced = :ssl_forced,
+ caching_enabled = :caching_enabled,
+ block_exploits = :block_exploits,
+ allow_websocket_upgrade = :allow_websocket_upgrade,
+ http2_support = :http2_support,
+ hsts_enabled = :hsts_enabled,
+ hsts_subdomains = :hsts_subdomains,
+ paths = :paths,
+ upstream_options = :upstream_options,
+ advanced_config = :advanced_config,
+ is_disabled = :is_disabled,
+ is_deleted = :is_deleted
+ WHERE id = :id`, host)
+
+ return err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "domain_names",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ if expand != nil {
+ for idx := range items {
+ expandErr := items[idx].Expand(expand)
+ if expandErr != nil {
+ logger.Error("HostsExpansionError", expandErr)
+ }
+ }
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/host/model.go b/backend/internal/entity/host/model.go
new file mode 100644
index 00000000..e9eb88fe
--- /dev/null
+++ b/backend/internal/entity/host/model.go
@@ -0,0 +1,120 @@
+package host
+
+import (
+ "fmt"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/user"
+ "npm/internal/types"
+ "npm/internal/util"
+)
+
+const (
+ tableName = "host"
+
+ // ProxyHostType is self explanatory
+ ProxyHostType = "proxy"
+ // RedirectionHostType is self explanatory
+ RedirectionHostType = "redirection"
+ // DeadHostType is self explanatory
+ DeadHostType = "dead"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ UserID int `json:"user_id" db:"user_id" filter:"user_id,integer"`
+ Type string `json:"type" db:"type" filter:"type,string"`
+ HostTemplateID int `json:"host_template_id" db:"host_template_id" filter:"host_template_id,integer"`
+ ListenInterface string `json:"listen_interface" db:"listen_interface" filter:"listen_interface,string"`
+ DomainNames types.JSONB `json:"domain_names" db:"domain_names" filter:"domain_names,string"`
+ UpstreamID int `json:"upstream_id" db:"upstream_id" filter:"upstream_id,integer"`
+ CertificateID int `json:"certificate_id" db:"certificate_id" filter:"certificate_id,integer"`
+ AccessListID int `json:"access_list_id" db:"access_list_id" filter:"access_list_id,integer"`
+ SSLForced bool `json:"ssl_forced" db:"ssl_forced" filter:"ssl_forced,boolean"`
+ CachingEnabled bool `json:"caching_enabled" db:"caching_enabled" filter:"caching_enabled,boolean"`
+ BlockExploits bool `json:"block_exploits" db:"block_exploits" filter:"block_exploits,boolean"`
+ AllowWebsocketUpgrade bool `json:"allow_websocket_upgrade" db:"allow_websocket_upgrade" filter:"allow_websocket_upgrade,boolean"`
+ HTTP2Support bool `json:"http2_support" db:"http2_support" filter:"http2_support,boolean"`
+ HSTSEnabled bool `json:"hsts_enabled" db:"hsts_enabled" filter:"hsts_enabled,boolean"`
+ HSTSSubdomains bool `json:"hsts_subdomains" db:"hsts_subdomains" filter:"hsts_subdomains,boolean"`
+ Paths string `json:"paths" db:"paths" filter:"paths,string"`
+ UpstreamOptions string `json:"upstream_options" db:"upstream_options" filter:"upstream_options,string"`
+ AdvancedConfig string `json:"advanced_config" db:"advanced_config" filter:"advanced_config,string"`
+ IsDisabled bool `json:"is_disabled" db:"is_disabled" filter:"is_disabled,boolean"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+ // Expansions
+ Certificate *certificate.Model `json:"certificate,omitempty"`
+ User *user.Model `json:"user,omitempty"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.UserID == 0 {
+ return fmt.Errorf("User ID must be specified")
+ }
+
+ if m.ID == 0 {
+ m.ID, err = create(m)
+ } else {
+ err = update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a host as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
+
+// Expand will fill in more properties
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ if util.SliceContainsItem(items, "user") && m.ID > 0 {
+ var usr user.Model
+ usr, err = user.GetByID(m.UserID)
+ m.User = &usr
+ }
+
+ if util.SliceContainsItem(items, "certificate") && m.CertificateID > 0 {
+ var cert certificate.Model
+ cert, err = certificate.GetByID(m.CertificateID)
+ m.Certificate = &cert
+ }
+
+ return err
+}
diff --git a/backend/internal/entity/host/structs.go b/backend/internal/entity/host/structs.go
new file mode 100644
index 00000000..dda3d6fe
--- /dev/null
+++ b/backend/internal/entity/host/structs.go
@@ -0,0 +1,15 @@
+package host
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for this list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/hosttemplate/filters.go b/backend/internal/entity/hosttemplate/filters.go
new file mode 100644
index 00000000..3b4d512e
--- /dev/null
+++ b/backend/internal/entity/hosttemplate/filters.go
@@ -0,0 +1,25 @@
+package hosttemplate
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/hosttemplate/methods.go b/backend/internal/entity/hosttemplate/methods.go
new file mode 100644
index 00000000..e75fcfd3
--- /dev/null
+++ b/backend/internal/entity/hosttemplate/methods.go
@@ -0,0 +1,129 @@
+package hosttemplate
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a Host by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// Create will create a Host from this model
+func Create(host *Model) (int, error) {
+ if host.ID != 0 {
+ return 0, goerrors.New("Cannot create host template when model already has an ID")
+ }
+
+ host.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ name,
+ host_type,
+ template,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :name,
+ :host_type,
+ :template,
+ :is_deleted
+ )`, host)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a Host from this model
+func Update(host *Model) error {
+ if host.ID == 0 {
+ return goerrors.New("Cannot update host template when model doesn't have an ID")
+ }
+
+ host.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ user_id = :user_id,
+ name = :name,
+ host_type = :host_type,
+ template = :template,
+ is_deleted = :is_deleted
+ WHERE id = :id`, host)
+
+ return err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "created_on",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/hosttemplate/model.go b/backend/internal/entity/hosttemplate/model.go
new file mode 100644
index 00000000..c791537a
--- /dev/null
+++ b/backend/internal/entity/hosttemplate/model.go
@@ -0,0 +1,73 @@
+package hosttemplate
+
+import (
+ "fmt"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "host_template"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ UserID int `json:"user_id" db:"user_id" filter:"user_id,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ Type string `json:"host_type" db:"host_type" filter:"host_type,string"`
+ Template string `json:"template" db:"template" filter:"template,string"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.UserID == 0 {
+ return fmt.Errorf("User ID must be specified")
+ }
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a host as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
diff --git a/backend/internal/entity/hosttemplate/structs.go b/backend/internal/entity/hosttemplate/structs.go
new file mode 100644
index 00000000..4cc54b91
--- /dev/null
+++ b/backend/internal/entity/hosttemplate/structs.go
@@ -0,0 +1,15 @@
+package hosttemplate
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for this list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/lists_query.go b/backend/internal/entity/lists_query.go
new file mode 100644
index 00000000..345f8584
--- /dev/null
+++ b/backend/internal/entity/lists_query.go
@@ -0,0 +1,80 @@
+package entity
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/model"
+)
+
+// ListQueryBuilder should be able to return the query and params to get items agnostically based
+// on given params.
+func ListQueryBuilder(modelExample interface{}, tableName string, pageInfo *model.PageInfo, defaultSort model.Sort, filters []model.Filter, filterMapFunctions map[string]FilterMapFunction, returnCount bool) (string, []interface{}) {
+ var queryStrings []string
+ var whereStrings []string
+ var params []interface{}
+
+ if returnCount {
+ queryStrings = append(queryStrings, "SELECT COUNT(*)")
+ } else {
+ queryStrings = append(queryStrings, "SELECT *")
+ }
+
+ // nolint: gosec
+ queryStrings = append(queryStrings, fmt.Sprintf("FROM `%s`", tableName))
+
+ // Append filters to where clause:
+ if filters != nil {
+ filterMap := GetFilterMap(modelExample)
+ filterQuery, filterParams := GenerateSQLFromFilters(filters, filterMap, filterMapFunctions)
+ whereStrings = []string{filterQuery}
+ params = append(params, filterParams...)
+ }
+
+ // Add is deletee check if model has the field
+ if hasDeletedField(modelExample) {
+ params = append(params, 0)
+ whereStrings = append(whereStrings, "`is_deleted` = ?")
+ }
+
+ // Append where clauses to query
+ if len(whereStrings) > 0 {
+ // nolint: gosec
+ queryStrings = append(queryStrings, fmt.Sprintf("WHERE %s", strings.Join(whereStrings, " AND ")))
+ }
+
+ if !returnCount {
+ var orderBy string
+ columns := GetDBColumns(modelExample)
+ orderBy, pageInfo.Sort = database.BuildOrderBySQL(columns, &pageInfo.Sort)
+
+ if orderBy != "" {
+ queryStrings = append(queryStrings, orderBy)
+ } else {
+ pageInfo.Sort = append(pageInfo.Sort, defaultSort)
+ queryStrings = append(queryStrings, fmt.Sprintf("ORDER BY `%v` COLLATE NOCASE %v", defaultSort.Field, defaultSort.Direction))
+ }
+
+ params = append(params, pageInfo.Offset)
+ params = append(params, pageInfo.Limit)
+ queryStrings = append(queryStrings, "LIMIT ?, ?")
+ }
+
+ return strings.Join(queryStrings, " "), params
+}
+
+func hasDeletedField(modelExample interface{}) bool {
+ t := reflect.TypeOf(modelExample)
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ dbTag := field.Tag.Get(DBTagName)
+ if dbTag == "is_deleted" {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/backend/internal/entity/setting/apply.go b/backend/internal/entity/setting/apply.go
new file mode 100644
index 00000000..29428975
--- /dev/null
+++ b/backend/internal/entity/setting/apply.go
@@ -0,0 +1,19 @@
+package setting
+
+import (
+ "npm/internal/config"
+ "npm/internal/logger"
+)
+
+// ApplySettings will load settings from the DB and apply them where required
+func ApplySettings() {
+ logger.Debug("Applying Settings")
+
+ // Error-reporting
+ m, err := GetByName("error-reporting")
+ if err != nil {
+ logger.Error("ApplySettingsError", err)
+ } else {
+ config.ErrorReporting = m.Value.Decoded.(bool)
+ }
+}
diff --git a/backend/internal/entity/setting/filters.go b/backend/internal/entity/setting/filters.go
new file mode 100644
index 00000000..c9e92416
--- /dev/null
+++ b/backend/internal/entity/setting/filters.go
@@ -0,0 +1,25 @@
+package setting
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/setting/methods.go b/backend/internal/entity/setting/methods.go
new file mode 100644
index 00000000..51b75177
--- /dev/null
+++ b/backend/internal/entity/setting/methods.go
@@ -0,0 +1,127 @@
+package setting
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a setting by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByName finds a setting by name
+func GetByName(name string) (Model, error) {
+ var m Model
+ err := m.LoadByName(name)
+ return m, err
+}
+
+// Create will Create a Setting from this model
+func Create(setting *Model) (int, error) {
+ if setting.ID != 0 {
+ return 0, goerrors.New("Cannot create setting when model already has an ID")
+ }
+
+ setting.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ name,
+ value
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :name,
+ :value
+ )`, setting)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a Setting from this model
+func Update(setting *Model) error {
+ if setting.ID == 0 {
+ return goerrors.New("Cannot update setting when model doesn't have an ID")
+ }
+
+ setting.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ name = :name,
+ value = :value
+ WHERE id = :id`, setting)
+
+ return err
+}
+
+// List will return a list of settings
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("%+v", queryErr)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("%+v", err)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/setting/model.go b/backend/internal/entity/setting/model.go
new file mode 100644
index 00000000..e66d7276
--- /dev/null
+++ b/backend/internal/entity/setting/model.go
@@ -0,0 +1,70 @@
+package setting
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "setting"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ Description string `json:"description" db:"description" filter:"description,string"`
+ Value types.JSONB `json:"value" db:"value"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE `id` = ? LIMIT 1", tableName)
+ params := []interface{}{id}
+ return m.getByQuery(query, params)
+}
+
+// LoadByName will load from a Name
+func (m *Model) LoadByName(name string) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE LOWER(`name`) = ? LIMIT 1", tableName)
+ params := []interface{}{strings.TrimSpace(strings.ToLower(name))}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ // Reapply settings
+ if err == nil {
+ ApplySettings()
+ }
+
+ return err
+}
diff --git a/backend/internal/entity/setting/structs.go b/backend/internal/entity/setting/structs.go
new file mode 100644
index 00000000..ba9851bd
--- /dev/null
+++ b/backend/internal/entity/setting/structs.go
@@ -0,0 +1,15 @@
+package setting
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for settings list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/stream/filters.go b/backend/internal/entity/stream/filters.go
new file mode 100644
index 00000000..bf4c8832
--- /dev/null
+++ b/backend/internal/entity/stream/filters.go
@@ -0,0 +1,25 @@
+package stream
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/stream/methods.go b/backend/internal/entity/stream/methods.go
new file mode 100644
index 00000000..02ddd4d6
--- /dev/null
+++ b/backend/internal/entity/stream/methods.go
@@ -0,0 +1,135 @@
+package stream
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a auth by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// Create will create a Auth from this model
+func Create(host *Model) (int, error) {
+ if host.ID != 0 {
+ return 0, goerrors.New("Cannot create stream when model already has an ID")
+ }
+
+ host.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ user_id,
+ provider,
+ name,
+ domain_names,
+ expires_on,
+ meta,
+ is_deleted
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :user_id,
+ :provider,
+ :name,
+ :domain_names,
+ :expires_on,
+ :meta,
+ :is_deleted
+ )`, host)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a Host from this model
+func Update(host *Model) error {
+ if host.ID == 0 {
+ return goerrors.New("Cannot update stream when model doesn't have an ID")
+ }
+
+ host.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ user_id = :user_id,
+ provider = :provider,
+ name = :name,
+ domain_names = :domain_names,
+ expires_on = :expires_on,
+ meta = :meta,
+ is_deleted = :is_deleted
+ WHERE id = :id`, host)
+
+ return err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("%s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("%s -- %+v", query, params)
+ return result, err
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/stream/model.go b/backend/internal/entity/stream/model.go
new file mode 100644
index 00000000..60f47f75
--- /dev/null
+++ b/backend/internal/entity/stream/model.go
@@ -0,0 +1,75 @@
+package stream
+
+import (
+ "fmt"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/types"
+)
+
+const (
+ tableName = "stream"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ ExpiresOn types.DBDate `json:"expires_on" db:"expires_on" filter:"expires_on,integer"`
+ UserID int `json:"user_id" db:"user_id" filter:"user_id,integer"`
+ Provider string `json:"provider" db:"provider" filter:"provider,string"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ DomainNames types.JSONB `json:"domain_names" db:"domain_names" filter:"domain_names,string"`
+ Meta types.JSONB `json:"-" db:"meta"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ return database.GetByQuery(m, query, params)
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, 0}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+
+ if m.UserID == 0 {
+ return fmt.Errorf("User ID must be specified")
+ }
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a host as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
diff --git a/backend/internal/entity/stream/structs.go b/backend/internal/entity/stream/structs.go
new file mode 100644
index 00000000..ec732c13
--- /dev/null
+++ b/backend/internal/entity/stream/structs.go
@@ -0,0 +1,15 @@
+package stream
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for this list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/entity/user/capabilities.go b/backend/internal/entity/user/capabilities.go
new file mode 100644
index 00000000..704e7b6f
--- /dev/null
+++ b/backend/internal/entity/user/capabilities.go
@@ -0,0 +1,40 @@
+package user
+
+const (
+ // CapabilityFullAdmin can do anything
+ CapabilityFullAdmin = "full-admin"
+ // CapabilityAccessListsView access lists view
+ CapabilityAccessListsView = "access-lists.view"
+ // CapabilityAccessListsManage access lists manage
+ CapabilityAccessListsManage = "access-lists.manage"
+ // CapabilityAuditLogView audit log view
+ CapabilityAuditLogView = "audit-log.view"
+ // CapabilityCertificatesView certificates view
+ CapabilityCertificatesView = "certificates.view"
+ // CapabilityCertificatesManage certificates manage
+ CapabilityCertificatesManage = "certificates.manage"
+ // CapabilityCertificateAuthoritiesView certificate authorities view
+ CapabilityCertificateAuthoritiesView = "certificate-authorities.view"
+ // CapabilityCertificateAuthoritiesManage certificate authorities manage
+ CapabilityCertificateAuthoritiesManage = "certificate-authorities.manage"
+ // CapabilityDNSProvidersView dns providers view
+ CapabilityDNSProvidersView = "dns-providers.view"
+ // CapabilityDNSProvidersManage dns providers manage
+ CapabilityDNSProvidersManage = "dns-providers.manage"
+ // CapabilityHostsView hosts view
+ CapabilityHostsView = "hosts.view"
+ // CapabilityHostsManage hosts manage
+ CapabilityHostsManage = "hosts.manage"
+ // CapabilityHostTemplatesView host-templates view
+ CapabilityHostTemplatesView = "host-templates.view"
+ // CapabilityHostTemplatesManage host-templates manage
+ CapabilityHostTemplatesManage = "host-templates.manage"
+ // CapabilitySettingsManage settings manage
+ CapabilitySettingsManage = "settings.manage"
+ // CapabilityStreamsView streams view
+ CapabilityStreamsView = "streams.view"
+ // CapabilityStreamsManage streams manage
+ CapabilityStreamsManage = "streams.manage"
+ // CapabilityUsersManage users manage
+ CapabilityUsersManage = "users.manage"
+)
diff --git a/backend/internal/entity/user/filters.go b/backend/internal/entity/user/filters.go
new file mode 100644
index 00000000..bc38ef69
--- /dev/null
+++ b/backend/internal/entity/user/filters.go
@@ -0,0 +1,25 @@
+package user
+
+import (
+ "npm/internal/entity"
+)
+
+var filterMapFunctions = make(map[string]entity.FilterMapFunction)
+
+// getFilterMapFunctions is a map of functions that should be executed
+// during the filtering process, if a field is defined here then the value in
+// the filter will be given to the defined function and it will return a new
+// value for use in the sql query.
+func getFilterMapFunctions() map[string]entity.FilterMapFunction {
+ // if len(filterMapFunctions) == 0 {
+ // TODO: See internal/model/file_item.go:620 for an example
+ // }
+
+ return filterMapFunctions
+}
+
+// GetFilterSchema returns filter schema
+func GetFilterSchema() string {
+ var m Model
+ return entity.GetFilterSchema(m)
+}
diff --git a/backend/internal/entity/user/methods.go b/backend/internal/entity/user/methods.go
new file mode 100644
index 00000000..42b52476
--- /dev/null
+++ b/backend/internal/entity/user/methods.go
@@ -0,0 +1,229 @@
+package user
+
+import (
+ "database/sql"
+ goerrors "errors"
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a user by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByEmail finds a user by email
+func GetByEmail(email string) (Model, error) {
+ var m Model
+ err := m.LoadByEmail(email)
+ return m, err
+}
+
+// Create will create a User from given model
+func Create(user *Model) (int, error) {
+ // We need to ensure that a user can't be created with the same email
+ // as an existing non-deleted user. Usually you would do this with the
+ // database schema, but it's a bit more complex because of the is_deleted field.
+
+ if user.ID != 0 {
+ return 0, goerrors.New("Cannot create user when model already has an ID")
+ }
+
+ // Check if an existing user with this email exists
+ _, err := GetByEmail(user.Email)
+ if err == nil {
+ return 0, errors.ErrDuplicateEmailUser
+ }
+
+ user.Touch(true)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ result, err := db.NamedExec(`INSERT INTO `+fmt.Sprintf("`%s`", tableName)+` (
+ created_on,
+ modified_on,
+ name,
+ nickname,
+ email,
+ is_disabled
+ ) VALUES (
+ :created_on,
+ :modified_on,
+ :name,
+ :nickname,
+ :email,
+ :is_disabled
+ )`, user)
+
+ if err != nil {
+ return 0, err
+ }
+
+ last, lastErr := result.LastInsertId()
+ if lastErr != nil {
+ return 0, lastErr
+ }
+
+ return int(last), nil
+}
+
+// Update will Update a User from this model
+func Update(user *Model) error {
+ if user.ID == 0 {
+ return goerrors.New("Cannot update user when model doesn't have an ID")
+ }
+
+ // Check that the email address isn't associated with another user
+ if existingUser, _ := GetByEmail(user.Email); existingUser.ID != 0 && existingUser.ID != user.ID {
+ return errors.ErrDuplicateEmailUser
+ }
+
+ user.Touch(false)
+
+ db := database.GetInstance()
+ // nolint: gosec
+ _, err := db.NamedExec(`UPDATE `+fmt.Sprintf("`%s`", tableName)+` SET
+ created_on = :created_on,
+ modified_on = :modified_on,
+ name = :name,
+ nickname = :nickname,
+ email = :email,
+ is_disabled = :is_disabled,
+ is_deleted = :is_deleted
+ WHERE id = :id`, user)
+
+ return err
+}
+
+// IsEnabled is used by middleware to ensure the user is still enabled
+// returns (userExist, isEnabled)
+func IsEnabled(userID int) (bool, bool) {
+ // nolint: gosec
+ query := `SELECT is_disabled FROM ` + fmt.Sprintf("`%s`", tableName) + ` WHERE id = ? AND is_deleted = ?`
+ disabled := true
+ db := database.GetInstance()
+ err := db.QueryRowx(query, userID, 0).Scan(&disabled)
+
+ if err == sql.ErrNoRows {
+ return false, false
+ } else if err != nil {
+ logger.Error("QueryError", err)
+ }
+
+ return true, !disabled
+}
+
+// List will return a list of users
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (ListResponse, error) {
+ var result ListResponse
+ var exampleModel Model
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ db := database.GetInstance()
+ if db == nil {
+ return result, errors.ErrDatabaseUnavailable
+ }
+
+ /*
+ filters = append(filters, model.Filter{
+ Field: "is_system",
+ Modifier: "equals",
+ Value: []string{"0"},
+ })
+ */
+
+ // Get count of items in this search
+ query, params := entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), true)
+ countRow := db.QueryRowx(query, params...)
+ var totalRows int
+ queryErr := countRow.Scan(&totalRows)
+ if queryErr != nil && queryErr != sql.ErrNoRows {
+ logger.Debug("Query: %s -- %+v", query, params)
+ return result, queryErr
+ }
+
+ // Get rows
+ var items []Model
+ query, params = entity.ListQueryBuilder(exampleModel, tableName, &pageInfo, defaultSort, filters, getFilterMapFunctions(), false)
+ err := db.Select(&items, query, params...)
+ if err != nil {
+ logger.Debug("Query: %s -- %+v", query, params)
+ return result, err
+ }
+
+ for idx := range items {
+ items[idx].generateGravatar()
+ }
+
+ if expand != nil {
+ for idx := range items {
+ expandErr := items[idx].Expand(expand)
+ if expandErr != nil {
+ logger.Error("UsersExpansionError", expandErr)
+ }
+ }
+ }
+
+ result = ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.Sort,
+ Filter: filters,
+ }
+
+ return result, nil
+}
+
+// DeleteAll will do just that, and should only be used for testing purposes.
+func DeleteAll() error {
+ db := database.GetInstance()
+ _, err := db.Exec(fmt.Sprintf("DELETE FROM `%s`", tableName))
+ return err
+}
+
+// GetCapabilities gets capabilities for a user
+func GetCapabilities(userID int) ([]string, error) {
+ var capabilities []string
+ db := database.GetInstance()
+ if db == nil {
+ return []string{}, errors.ErrDatabaseUnavailable
+ }
+
+ query := `SELECT c.name FROM "user_has_capability" h
+ INNER JOIN "capability" c ON c.id = h.capability_id
+ WHERE h.user_id = ?`
+
+ rows, err := db.Query(query, userID)
+ if err != nil && err != sql.ErrNoRows {
+ logger.Debug("QUERY: %v -- %v", query, userID)
+ return []string{}, err
+ }
+
+ // nolint: errcheck
+ defer rows.Close()
+
+ for rows.Next() {
+ var name string
+ err := rows.Scan(&name)
+ if err != nil {
+ return []string{}, err
+ }
+
+ capabilities = append(capabilities, name)
+ }
+
+ return capabilities, nil
+}
diff --git a/backend/internal/entity/user/model.go b/backend/internal/entity/user/model.go
new file mode 100644
index 00000000..bf2f4c84
--- /dev/null
+++ b/backend/internal/entity/user/model.go
@@ -0,0 +1,191 @@
+package user
+
+import (
+ goerrors "errors"
+ "fmt"
+ "strings"
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/entity/auth"
+ "npm/internal/errors"
+ "npm/internal/logger"
+ "npm/internal/types"
+ "npm/internal/util"
+
+ "github.com/drexedam/gravatar"
+)
+
+const (
+ tableName = "user"
+)
+
+// Model is the user model
+type Model struct {
+ ID int `json:"id" db:"id" filter:"id,integer"`
+ Name string `json:"name" db:"name" filter:"name,string"`
+ Nickname string `json:"nickname" db:"nickname" filter:"nickname,string"`
+ Email string `json:"email" db:"email" filter:"email,email"`
+ CreatedOn types.DBDate `json:"created_on" db:"created_on" filter:"created_on,integer"`
+ ModifiedOn types.DBDate `json:"modified_on" db:"modified_on" filter:"modified_on,integer"`
+ GravatarURL string `json:"gravatar_url"`
+ IsDisabled bool `json:"is_disabled" db:"is_disabled" filter:"is_disabled,boolean"`
+ IsSystem bool `json:"is_system,omitempty" db:"is_system"`
+ IsDeleted bool `json:"is_deleted,omitempty" db:"is_deleted"`
+ // Expansions
+ Auth *auth.Model `json:"auth,omitempty" db:"-"`
+ Capabilities []string `json:"capabilities,omitempty"`
+}
+
+func (m *Model) getByQuery(query string, params []interface{}) error {
+ err := database.GetByQuery(m, query, params)
+ m.generateGravatar()
+ return err
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE id = ? AND is_deleted = ? LIMIT 1", tableName)
+ params := []interface{}{id, false}
+ return m.getByQuery(query, params)
+}
+
+// LoadByEmail will load from an Email
+func (m *Model) LoadByEmail(email string) error {
+ query := fmt.Sprintf("SELECT * FROM `%s` WHERE email = ? AND is_deleted = ? AND is_system = ? LIMIT 1", tableName)
+ params := []interface{}{strings.TrimSpace(strings.ToLower(email)), false, false}
+ return m.getByQuery(query, params)
+}
+
+// Touch will update model's timestamp(s)
+func (m *Model) Touch(created bool) {
+ var d types.DBDate
+ d.Time = time.Now()
+ if created {
+ m.CreatedOn = d
+ }
+ m.ModifiedOn = d
+ m.generateGravatar()
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ var err error
+ // Ensure email is nice
+ m.Email = strings.TrimSpace(strings.ToLower(m.Email))
+
+ if m.IsSystem {
+ return errors.ErrSystemUserReadonly
+ }
+
+ if m.ID == 0 {
+ m.ID, err = Create(m)
+ } else {
+ err = Update(m)
+ }
+
+ return err
+}
+
+// Delete will mark a user as deleted
+func (m *Model) Delete() bool {
+ m.Touch(false)
+ m.IsDeleted = true
+ if err := m.Save(); err != nil {
+ return false
+ }
+ return true
+}
+
+// SetPermissions will wipe out any existing permissions and add new ones for this user
+func (m *Model) SetPermissions(permissions []string) error {
+ if m.ID == 0 {
+ return fmt.Errorf("Cannot set permissions without first saving the User")
+ }
+
+ db := database.GetInstance()
+
+ // Wipe out previous permissions
+ query := `DELETE FROM "user_has_capability" WHERE "user_id" = ?`
+ if _, err := db.Exec(query, m.ID); err != nil {
+ logger.Debug("QUERY: %v -- %v", query, m.ID)
+ return err
+ }
+
+ if len(permissions) > 0 {
+ // Add new permissions
+ for _, permission := range permissions {
+ query = `INSERT INTO "user_has_capability" (
+ "user_id", "capability_id"
+ ) VALUES (
+ ?,
+ (SELECT id FROM capability WHERE name = ?)
+ )`
+
+ _, err := db.Exec(query, m.ID, permission)
+ if err != nil {
+ logger.Debug("QUERY: %v -- %v -- %v", query, m.ID, permission)
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+// Expand will fill in more properties
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ if util.SliceContainsItem(items, "capabilities") && m.ID > 0 {
+ m.Capabilities, err = GetCapabilities(m.ID)
+ }
+
+ return err
+}
+
+func (m *Model) generateGravatar() {
+ m.GravatarURL = gravatar.New(m.Email).
+ Size(128).
+ Default(gravatar.MysteryMan).
+ Rating(gravatar.Pg).
+ AvatarURL()
+}
+
+// SaveCapabilities will save the capabilities of the user.
+func (m *Model) SaveCapabilities() error {
+ // m.Capabilities
+ if m.ID == 0 {
+ return fmt.Errorf("Cannot save capabilities on unsaved user")
+ }
+
+ // there must be at least 1 capability
+ if len(m.Capabilities) == 0 {
+ return goerrors.New("At least 1 capability required for a user")
+ }
+
+ db := database.GetInstance()
+
+ // Get a full list of capabilities
+ var capabilities []string
+ query := `SELECT "name" from "capability"`
+ err := db.Select(&capabilities, query)
+ if err != nil {
+ return err
+ }
+
+ // Check that the capabilities defined exist in the db
+ for _, cap := range m.Capabilities {
+ found := false
+ for _, a := range capabilities {
+ if a == cap {
+ found = true
+ }
+ }
+ if !found {
+ return fmt.Errorf("Capability `%s` is not valid", cap)
+ }
+ }
+
+ return m.SetPermissions(m.Capabilities)
+}
diff --git a/backend/internal/entity/user/structs.go b/backend/internal/entity/user/structs.go
new file mode 100644
index 00000000..f9f4490e
--- /dev/null
+++ b/backend/internal/entity/user/structs.go
@@ -0,0 +1,15 @@
+package user
+
+import (
+ "npm/internal/model"
+)
+
+// ListResponse is the JSON response for users list
+type ListResponse struct {
+ Total int `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items []Model `json:"items,omitempty"`
+}
diff --git a/backend/internal/errors/errors.go b/backend/internal/errors/errors.go
new file mode 100644
index 00000000..39735a1d
--- /dev/null
+++ b/backend/internal/errors/errors.go
@@ -0,0 +1,16 @@
+package errors
+
+import "errors"
+
+// All error messages used by the service package to report
+// problems back to calling clients
+var (
+ ErrDatabaseUnavailable = errors.New("database-unavailable")
+ ErrDuplicateEmailUser = errors.New("email-already-exists")
+ ErrInvalidLogin = errors.New("invalid-login-credentials")
+ ErrUserDisabled = errors.New("user-disabled")
+ ErrSystemUserReadonly = errors.New("cannot-save-system-users")
+ ErrValidationFailed = errors.New("request-failed-validation")
+ ErrCurrentPasswordInvalid = errors.New("current-password-invalid")
+ ErrCABundleDoesNotExist = errors.New("ca-bundle-does-not-exist")
+)
diff --git a/backend/internal/host.js b/backend/internal/host.js
deleted file mode 100644
index 58e1d09a..00000000
--- a/backend/internal/host.js
+++ /dev/null
@@ -1,235 +0,0 @@
-const _ = require('lodash');
-const proxyHostModel = require('../models/proxy_host');
-const redirectionHostModel = require('../models/redirection_host');
-const deadHostModel = require('../models/dead_host');
-
-const internalHost = {
-
- /**
- * Makes sure that the ssl_* and hsts_* fields play nicely together.
- * ie: if there is no cert, then force_ssl is off.
- * if force_ssl is off, then hsts_enabled is definitely off.
- *
- * @param {object} data
- * @param {object} [existing_data]
- * @returns {object}
- */
- cleanSslHstsData: function (data, existing_data) {
- existing_data = existing_data === undefined ? {} : existing_data;
-
- let combined_data = _.assign({}, existing_data, data);
-
- if (!combined_data.certificate_id) {
- combined_data.ssl_forced = false;
- combined_data.http2_support = false;
- }
-
- if (!combined_data.ssl_forced) {
- combined_data.hsts_enabled = false;
- }
-
- if (!combined_data.hsts_enabled) {
- combined_data.hsts_subdomains = false;
- }
-
- return combined_data;
- },
-
- /**
- * used by the getAll functions of hosts, this removes the certificate meta if present
- *
- * @param {Array} rows
- * @returns {Array}
- */
- cleanAllRowsCertificateMeta: function (rows) {
- rows.map(function (row, idx) {
- if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
- rows[idx].certificate.meta = {};
- }
- });
-
- return rows;
- },
-
- /**
- * used by the get/update functions of hosts, this removes the certificate meta if present
- *
- * @param {Object} row
- * @returns {Object}
- */
- cleanRowCertificateMeta: function (row) {
- if (typeof row.certificate !== 'undefined' && row.certificate) {
- row.certificate.meta = {};
- }
-
- return row;
- },
-
- /**
- * This returns all the host types with any domain listed in the provided domain_names array.
- * This is used by the certificates to temporarily disable any host that is using the domain
- *
- * @param {Array} domain_names
- * @returns {Promise}
- */
- getHostsWithDomains: function (domain_names) {
- let promises = [
- proxyHostModel
- .query()
- .where('is_deleted', 0),
- redirectionHostModel
- .query()
- .where('is_deleted', 0),
- deadHostModel
- .query()
- .where('is_deleted', 0)
- ];
-
- return Promise.all(promises)
- .then((promises_results) => {
- let response_object = {
- total_count: 0,
- dead_hosts: [],
- proxy_hosts: [],
- redirection_hosts: []
- };
-
- if (promises_results[0]) {
- // Proxy Hosts
- response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
- response_object.total_count += response_object.proxy_hosts.length;
- }
-
- if (promises_results[1]) {
- // Redirection Hosts
- response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
- response_object.total_count += response_object.redirection_hosts.length;
- }
-
- if (promises_results[2]) {
- // Dead Hosts
- response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
- response_object.total_count += response_object.dead_hosts.length;
- }
-
- return response_object;
- });
- },
-
- /**
- * Internal use only, checks to see if the domain is already taken by any other record
- *
- * @param {String} hostname
- * @param {String} [ignore_type] 'proxy', 'redirection', 'dead'
- * @param {Integer} [ignore_id] Must be supplied if type was also supplied
- * @returns {Promise}
- */
- isHostnameTaken: function (hostname, ignore_type, ignore_id) {
- let promises = [
- proxyHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%'),
- redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%'),
- deadHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%')
- ];
-
- return Promise.all(promises)
- .then((promises_results) => {
- let is_taken = false;
-
- if (promises_results[0]) {
- // Proxy Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- if (promises_results[1]) {
- // Redirection Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- if (promises_results[2]) {
- // Dead Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- return {
- hostname: hostname,
- is_taken: is_taken
- };
- });
- },
-
- /**
- * Private call only
- *
- * @param {String} hostname
- * @param {Array} existing_rows
- * @param {Integer} [ignore_id]
- * @returns {Boolean}
- */
- _checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
- let is_taken = false;
-
- if (existing_rows && existing_rows.length) {
- existing_rows.map(function (existing_row) {
- existing_row.domain_names.map(function (existing_hostname) {
- // Does this domain match?
- if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
- if (!ignore_id || ignore_id !== existing_row.id) {
- is_taken = true;
- }
- }
- });
- });
- }
-
- return is_taken;
- },
-
- /**
- * Private call only
- *
- * @param {Array} hosts
- * @param {Array} domain_names
- * @returns {Array}
- */
- _getHostsWithDomains: function (hosts, domain_names) {
- let response = [];
-
- if (hosts && hosts.length) {
- hosts.map(function (host) {
- let host_matches = false;
-
- domain_names.map(function (domain_name) {
- host.domain_names.map(function (host_domain_name) {
- if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
- host_matches = true;
- }
- });
- });
-
- if (host_matches) {
- response.push(host);
- }
- });
- }
-
- return response;
- }
-
-};
-
-module.exports = internalHost;
diff --git a/backend/internal/ip_ranges.js b/backend/internal/ip_ranges.js
deleted file mode 100644
index 40e63ea4..00000000
--- a/backend/internal/ip_ranges.js
+++ /dev/null
@@ -1,150 +0,0 @@
-const https = require('https');
-const fs = require('fs');
-const logger = require('../logger').ip_ranges;
-const error = require('../lib/error');
-const internalNginx = require('./nginx');
-const { Liquid } = require('liquidjs');
-
-const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
-const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
-const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
-
-const regIpV4 = /^(\d+\.?){4}\/\d+/;
-const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
-
-const internalIpRanges = {
-
- interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
- interval: null,
- interval_processing: false,
- iteration_count: 0,
-
- initTimer: () => {
- logger.info('IP Ranges Renewal Timer initialized');
- internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
- },
-
- fetchUrl: (url) => {
- return new Promise((resolve, reject) => {
- logger.info('Fetching ' + url);
- return https.get(url, (res) => {
- res.setEncoding('utf8');
- let raw_data = '';
- res.on('data', (chunk) => {
- raw_data += chunk;
- });
-
- res.on('end', () => {
- resolve(raw_data);
- });
- }).on('error', (err) => {
- reject(err);
- });
- });
- },
-
- /**
- * Triggered at startup and then later by a timer, this will fetch the ip ranges from services and apply them to nginx.
- */
- fetch: () => {
- if (!internalIpRanges.interval_processing) {
- internalIpRanges.interval_processing = true;
- logger.info('Fetching IP Ranges from online services...');
-
- let ip_ranges = [];
-
- return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
- .then((cloudfront_data) => {
- let data = JSON.parse(cloudfront_data);
-
- if (data && typeof data.prefixes !== 'undefined') {
- data.prefixes.map((item) => {
- if (item.service === 'CLOUDFRONT') {
- ip_ranges.push(item.ip_prefix);
- }
- });
- }
-
- if (data && typeof data.ipv6_prefixes !== 'undefined') {
- data.ipv6_prefixes.map((item) => {
- if (item.service === 'CLOUDFRONT') {
- ip_ranges.push(item.ipv6_prefix);
- }
- });
- }
- })
- .then(() => {
- return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
- })
- .then((cloudfare_data) => {
- let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
- ip_ranges = [... ip_ranges, ... items];
- })
- .then(() => {
- return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
- })
- .then((cloudfare_data) => {
- let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
- ip_ranges = [... ip_ranges, ... items];
- })
- .then(() => {
- let clean_ip_ranges = [];
- ip_ranges.map((range) => {
- if (range) {
- clean_ip_ranges.push(range);
- }
- });
-
- return internalIpRanges.generateConfig(clean_ip_ranges)
- .then(() => {
- if (internalIpRanges.iteration_count) {
- // Reload nginx
- return internalNginx.reload();
- }
- });
- })
- .then(() => {
- internalIpRanges.interval_processing = false;
- internalIpRanges.iteration_count++;
- })
- .catch((err) => {
- logger.error(err.message);
- internalIpRanges.interval_processing = false;
- });
- }
- },
-
- /**
- * @param {Array} ip_ranges
- * @returns {Promise}
- */
- generateConfig: (ip_ranges) => {
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
- try {
- template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- renderEngine
- .parseAndRender(template, {ip_ranges: ip_ranges})
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
- resolve(true);
- })
- .catch((err) => {
- logger.warn('Could not write ' + filename + ':', err.message);
- reject(new error.ConfigurationError(err.message));
- });
- });
- }
-};
-
-module.exports = internalIpRanges;
diff --git a/backend/internal/jwt/jwt.go b/backend/internal/jwt/jwt.go
new file mode 100644
index 00000000..9dcb6c7b
--- /dev/null
+++ b/backend/internal/jwt/jwt.go
@@ -0,0 +1,60 @@
+package jwt
+
+import (
+ "fmt"
+ "time"
+
+ "npm/internal/entity/user"
+ "npm/internal/logger"
+
+ "github.com/dgrijalva/jwt-go"
+)
+
+// UserJWTClaims is the structure of a JWT for a User
+type UserJWTClaims struct {
+ UserID int `json:"uid"`
+ Roles []string `json:"roles"`
+ jwt.StandardClaims
+}
+
+// GeneratedResponse is the response of a generated token, usually used in http response
+type GeneratedResponse struct {
+ Expires int64 `json:"expires"`
+ Token string `json:"token"`
+}
+
+// Generate will create a JWT
+func Generate(userObj *user.Model) (GeneratedResponse, error) {
+ var response GeneratedResponse
+
+ key, _ := GetPrivateKey()
+ expires := time.Now().AddDate(0, 0, 1) // 1 day
+
+ // Create the Claims
+ claims := UserJWTClaims{
+ userObj.ID,
+ []string{"user"},
+ jwt.StandardClaims{
+ IssuedAt: time.Now().Unix(),
+ ExpiresAt: expires.Unix(),
+ Issuer: "api",
+ },
+ }
+
+ // Create a new token object, specifying signing method and the claims
+ // you would like it to contain.
+ token := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
+ var err error
+ token.Signature, err = token.SignedString(key)
+ if err != nil {
+ logger.Error("JWTError", fmt.Errorf("Error signing token: %v", err))
+ return response, err
+ }
+
+ response = GeneratedResponse{
+ Expires: expires.Unix(),
+ Token: token.Signature,
+ }
+
+ return response, nil
+}
diff --git a/backend/internal/jwt/keys.go b/backend/internal/jwt/keys.go
new file mode 100644
index 00000000..e48c334d
--- /dev/null
+++ b/backend/internal/jwt/keys.go
@@ -0,0 +1,86 @@
+package jwt
+
+import (
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/pem"
+ "errors"
+
+ "npm/internal/config"
+)
+
+var (
+ privateKey *rsa.PrivateKey
+ publicKey *rsa.PublicKey
+)
+
+// GetPrivateKey will load the key from config package and return a usable object
+// It should only load from file once per program execution
+func GetPrivateKey() (*rsa.PrivateKey, error) {
+ if privateKey == nil {
+ var blankKey *rsa.PrivateKey
+
+ if config.PrivateKey == "" {
+ return blankKey, errors.New("Could not get Private Key from configuration")
+ }
+
+ var err error
+ privateKey, err = LoadPemPrivateKey(config.PrivateKey)
+ if err != nil {
+ return blankKey, err
+ }
+ }
+
+ pub, pubErr := GetPublicKey()
+ if pubErr != nil {
+ return privateKey, pubErr
+ }
+
+ privateKey.PublicKey = *pub
+
+ return privateKey, pubErr
+}
+
+// GetPublicKey will load the key from config package and return a usable object
+// It should only load once per program execution
+func GetPublicKey() (*rsa.PublicKey, error) {
+ if publicKey == nil {
+ var blankKey *rsa.PublicKey
+
+ if config.PublicKey == "" {
+ return blankKey, errors.New("Could not get Public Key filename, check environment variables")
+ }
+
+ var err error
+ publicKey, err = LoadPemPublicKey(config.PublicKey)
+ if err != nil {
+ return blankKey, err
+ }
+ }
+
+ return publicKey, nil
+}
+
+// LoadPemPrivateKey reads a key from a PEM encoded string and returns a private key
+func LoadPemPrivateKey(content string) (*rsa.PrivateKey, error) {
+ var key *rsa.PrivateKey
+ data, _ := pem.Decode([]byte(content))
+ var err error
+ key, err = x509.ParsePKCS1PrivateKey(data.Bytes)
+ if err != nil {
+ return key, err
+ }
+ return key, nil
+}
+
+// LoadPemPublicKey reads a key from a PEM encoded string and returns a public key
+func LoadPemPublicKey(content string) (*rsa.PublicKey, error) {
+ var key *rsa.PublicKey
+ data, _ := pem.Decode([]byte(content))
+ publicKeyFileImported, err := x509.ParsePKCS1PublicKey(data.Bytes)
+ if err != nil {
+ return key, err
+ }
+
+ return publicKeyFileImported, nil
+}
diff --git a/backend/internal/logger/config.go b/backend/internal/logger/config.go
new file mode 100644
index 00000000..c0f8a35a
--- /dev/null
+++ b/backend/internal/logger/config.go
@@ -0,0 +1,40 @@
+package logger
+
+import "github.com/getsentry/sentry-go"
+
+// Level type
+type Level int
+
+// Log level definitions
+const (
+ // DebugLevel usually only enabled when debugging. Very verbose logging.
+ DebugLevel Level = 10
+ // InfoLevel general operational entries about what's going on inside the application.
+ InfoLevel Level = 20
+ // WarnLevel non-critical entries that deserve eyes.
+ WarnLevel Level = 30
+ // ErrorLevel used for errors that should definitely be noted.
+ ErrorLevel Level = 40
+)
+
+// Config options for the logger.
+type Config struct {
+ LogThreshold Level
+ Formatter string
+ SentryConfig sentry.ClientOptions
+}
+
+// Interface for a logger
+type Interface interface {
+ GetLogLevel() Level
+ Debug(format string, args ...interface{})
+ Info(format string, args ...interface{})
+ Warn(format string, args ...interface{})
+ Error(errorClass string, err error, args ...interface{})
+ Errorf(errorClass, format string, err error, args ...interface{})
+}
+
+// ConfigurableLogger is an interface for a logger that can be configured
+type ConfigurableLogger interface {
+ Configure(c *Config) error
+}
diff --git a/backend/internal/logger/logger.go b/backend/internal/logger/logger.go
new file mode 100644
index 00000000..f82145ee
--- /dev/null
+++ b/backend/internal/logger/logger.go
@@ -0,0 +1,242 @@
+package logger
+
+import (
+ "encoding/json"
+ "fmt"
+ stdlog "log"
+ "os"
+ "runtime/debug"
+ "sync"
+ "time"
+
+ "github.com/fatih/color"
+ "github.com/getsentry/sentry-go"
+)
+
+var colorReset, colorGray, colorYellow, colorBlue, colorRed, colorMagenta, colorBlack, colorWhite *color.Color
+
+// Log message structure.
+type Log struct {
+ Timestamp string `json:"timestamp"`
+ Level string `json:"level"`
+ Message string `json:"message"`
+ Pid int `json:"pid"`
+ Summary string `json:"summary,omitempty"`
+ Caller string `json:"caller,omitempty"`
+ StackTrace []string `json:"stack_trace,omitempty"`
+}
+
+// Logger instance
+type Logger struct {
+ Config
+ mux sync.Mutex
+}
+
+// global logging configuration.
+var logger = NewLogger()
+
+// NewLogger creates a new logger instance
+func NewLogger() *Logger {
+ color.NoColor = false
+ colorReset = color.New(color.Reset)
+ colorGray = color.New(color.FgWhite)
+ colorYellow = color.New(color.Bold, color.FgYellow)
+ colorBlue = color.New(color.Bold, color.FgBlue)
+ colorRed = color.New(color.Bold, color.FgRed)
+ colorMagenta = color.New(color.Bold, color.FgMagenta)
+ colorBlack = color.New(color.Bold, color.FgBlack)
+ colorWhite = color.New(color.Bold, color.FgWhite)
+
+ return &Logger{
+ Config: NewConfig(),
+ }
+}
+
+// NewConfig returns the default config
+func NewConfig() Config {
+ return Config{
+ LogThreshold: InfoLevel,
+ Formatter: "json",
+ }
+}
+
+// Configure logger and will return error if missing required fields.
+func Configure(c *Config) error {
+ return logger.Configure(c)
+}
+
+// GetLogLevel currently configured
+func GetLogLevel() Level {
+ return logger.GetLogLevel()
+}
+
+// Debug logs if the log level is set to DebugLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Debug(format string, args ...interface{}) {
+ logger.Debug(format, args...)
+}
+
+// Info logs if the log level is set to InfoLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Info(format string, args ...interface{}) {
+ logger.Info(format, args...)
+}
+
+// Warn logs if the log level is set to WarnLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Warn(format string, args ...interface{}) {
+ logger.Warn(format, args...)
+}
+
+// Error logs error given if the log level is set to ErrorLevel or below. Arguments are not logged.
+// Attempts to log to bugsang.
+func Error(errorClass string, err error) {
+ logger.Error(errorClass, err)
+}
+
+// Configure logger and will return error if missing required fields.
+func (l *Logger) Configure(c *Config) error {
+ // ensure updates to the config are atomic
+ l.mux.Lock()
+ defer l.mux.Unlock()
+
+ if c == nil {
+ return fmt.Errorf("a non nil Config is mandatory")
+ }
+
+ if err := c.LogThreshold.validate(); err != nil {
+ return err
+ }
+
+ l.LogThreshold = c.LogThreshold
+ l.Formatter = c.Formatter
+ l.SentryConfig = c.SentryConfig
+
+ if c.SentryConfig.Dsn != "" {
+ if sentryErr := sentry.Init(c.SentryConfig); sentryErr != nil {
+ fmt.Printf("Sentry initialization failed: %v\n", sentryErr)
+ }
+ }
+
+ stdlog.SetFlags(0) // this removes timestamp prefixes from logs
+ return nil
+}
+
+// validate the log level is in the accepted list.
+func (l Level) validate() error {
+ switch l {
+ case DebugLevel, InfoLevel, WarnLevel, ErrorLevel:
+ return nil
+ default:
+ return fmt.Errorf("invalid \"Level\" %d", l)
+ }
+}
+
+var logLevels = map[Level]string{
+ DebugLevel: "DEBUG",
+ InfoLevel: "INFO",
+ WarnLevel: "WARN",
+ ErrorLevel: "ERROR",
+}
+
+func (l *Logger) logLevel(logLevel Level, format string, args ...interface{}) {
+ if logLevel < l.LogThreshold {
+ return
+ }
+
+ errorClass := ""
+ if logLevel == ErrorLevel {
+ // First arg is the errorClass
+ errorClass = args[0].(string)
+ if len(args) > 1 {
+ args = args[1:]
+ } else {
+ args = []interface{}{}
+ }
+ }
+
+ stringMessage := fmt.Sprintf(format, args...)
+
+ if l.Formatter == "json" {
+ // JSON Log Format
+ jsonLog, _ := json.Marshal(
+ Log{
+ Timestamp: time.Now().Format(time.RFC3339Nano),
+ Level: logLevels[logLevel],
+ Message: stringMessage,
+ Pid: os.Getpid(),
+ },
+ )
+
+ stdlog.Println(string(jsonLog))
+ } else {
+ // Nice Log Format
+ var colorLevel *color.Color
+ switch logLevel {
+ case DebugLevel:
+ colorLevel = colorMagenta
+ case InfoLevel:
+ colorLevel = colorBlue
+ case WarnLevel:
+ colorLevel = colorYellow
+ case ErrorLevel:
+ colorLevel = colorRed
+ stringMessage = fmt.Sprintf("%s: %s", errorClass, stringMessage)
+ }
+
+ t := time.Now()
+ stdlog.Println(
+ colorBlack.Sprint("["),
+ colorWhite.Sprint(t.Format("2006-01-02 15:04:05")),
+ colorBlack.Sprint("] "),
+ colorLevel.Sprintf("%-8v", logLevels[logLevel]),
+ colorGray.Sprint(stringMessage),
+ colorReset.Sprint(""),
+ )
+
+ if logLevel == ErrorLevel && l.LogThreshold == DebugLevel {
+ // Print a stack trace too
+ debug.PrintStack()
+ }
+ }
+}
+
+// GetLogLevel currently configured
+func (l *Logger) GetLogLevel() Level {
+ return l.LogThreshold
+}
+
+// Debug logs if the log level is set to DebugLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Debug(format string, args ...interface{}) {
+ l.logLevel(DebugLevel, format, args...)
+}
+
+// Info logs if the log level is set to InfoLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Info(format string, args ...interface{}) {
+ l.logLevel(InfoLevel, format, args...)
+}
+
+// Warn logs if the log level is set to WarnLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Warn(format string, args ...interface{}) {
+ l.logLevel(WarnLevel, format, args...)
+}
+
+// Error logs error given if the log level is set to ErrorLevel or below. Arguments are not logged.
+// Attempts to log to bugsang.
+func (l *Logger) Error(errorClass string, err error) {
+ l.logLevel(ErrorLevel, err.Error(), errorClass)
+ l.notifySentry(errorClass, err)
+}
+
+func (l *Logger) notifySentry(errorClass string, err error) {
+ if l.SentryConfig.Dsn != "" && l.SentryConfig.Dsn != "-" {
+
+ sentry.ConfigureScope(func(scope *sentry.Scope) {
+ scope.SetLevel(sentry.LevelError)
+ scope.SetTag("service", "backend")
+ scope.SetTag("error_class", errorClass)
+ })
+
+ sentry.CaptureException(err)
+ // Since sentry emits events in the background we need to make sure
+ // they are sent before we shut down
+ sentry.Flush(time.Second * 5)
+ }
+}
diff --git a/backend/internal/logger/logger_test.go b/backend/internal/logger/logger_test.go
new file mode 100644
index 00000000..e0019696
--- /dev/null
+++ b/backend/internal/logger/logger_test.go
@@ -0,0 +1,168 @@
+package logger
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "testing"
+
+ "github.com/getsentry/sentry-go"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGetLogLevel(t *testing.T) {
+ assert.Equal(t, InfoLevel, GetLogLevel())
+}
+
+func TestThreshold(t *testing.T) {
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Debug("this should not display")
+ assert.Empty(t, buf.String())
+
+ Info("this should display")
+ assert.NotEmpty(t, buf.String())
+
+ Error("ErrorClass", errors.New("this should display"))
+ assert.NotEmpty(t, buf.String())
+}
+
+func TestDebug(t *testing.T) {
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: DebugLevel,
+ }))
+
+ Debug("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "DEBUG")
+ assert.Contains(t, buf.String(), "This is a test message")
+}
+
+func TestInfo(t *testing.T) {
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Info("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "INFO")
+ assert.Contains(t, buf.String(), "This is a test message")
+}
+
+func TestWarn(t *testing.T) {
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Warn("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "WARN")
+ assert.Contains(t, buf.String(), "This is a test message")
+}
+
+func TestError(t *testing.T) {
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: ErrorLevel,
+ }))
+
+ Error("TestErrorClass", fmt.Errorf("this is a %s error", "test"))
+ assert.Contains(t, buf.String(), "ERROR")
+ assert.Contains(t, buf.String(), "this is a test error")
+}
+
+func TestConfigure(t *testing.T) {
+ type args struct {
+ c *Config
+ }
+ tests := []struct {
+ name string
+ args args
+ wantErr bool
+ }{
+ {
+ name: "configure",
+ args: args{
+ &Config{
+ LogThreshold: InfoLevel,
+ SentryConfig: sentry.ClientOptions{},
+ },
+ },
+ wantErr: false,
+ },
+ {
+ name: "invalid log level",
+ args: args{
+ &Config{
+ SentryConfig: sentry.ClientOptions{},
+ },
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ tt := tt
+ t.Run(tt.name, func(t *testing.T) {
+ if err := Configure(tt.args.c); (err != nil) != tt.wantErr {
+ t.Errorf("Configure() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func BenchmarkLogLevelBelowThreshold(b *testing.B) {
+ l := NewLogger()
+
+ log.SetOutput(ioutil.Discard)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ for i := 0; i < b.N; i++ {
+ l.logLevel(DebugLevel, "benchmark %d", i)
+ }
+}
+
+func BenchmarkLogLevelAboveThreshold(b *testing.B) {
+ l := NewLogger()
+
+ log.SetOutput(ioutil.Discard)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ for i := 0; i < b.N; i++ {
+ l.logLevel(InfoLevel, "benchmark %d", i)
+ }
+}
diff --git a/backend/internal/model/filter.go b/backend/internal/model/filter.go
new file mode 100644
index 00000000..8b88b70c
--- /dev/null
+++ b/backend/internal/model/filter.go
@@ -0,0 +1,8 @@
+package model
+
+// Filter is the structure of a field/modifier/value item
+type Filter struct {
+ Field string `json:"field"`
+ Modifier string `json:"modifier"`
+ Value []string `json:"value"`
+}
diff --git a/backend/internal/model/pageinfo.go b/backend/internal/model/pageinfo.go
new file mode 100644
index 00000000..3f1fd26f
--- /dev/null
+++ b/backend/internal/model/pageinfo.go
@@ -0,0 +1,22 @@
+package model
+
+import (
+ "time"
+)
+
+// PageInfo is the model used by Api Handlers and passed on to other parts
+// of the application
+type PageInfo struct {
+ FromDate time.Time `json:"from_date"`
+ ToDate time.Time `json:"to_date"`
+ Sort []Sort `json:"sort"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Expand []string `json:"expand"`
+}
+
+// Sort holds the sorting data
+type Sort struct {
+ Field string `json:"field"`
+ Direction string `json:"direction"`
+}
diff --git a/backend/internal/nginx.js b/backend/internal/nginx.js
deleted file mode 100644
index 52bdd66d..00000000
--- a/backend/internal/nginx.js
+++ /dev/null
@@ -1,435 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const logger = require('../logger').nginx;
-const utils = require('../lib/utils');
-const error = require('../lib/error');
-const { Liquid } = require('liquidjs');
-const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
-
-const internalNginx = {
-
- /**
- * This will:
- * - test the nginx config first to make sure it's OK
- * - create / recreate the config for the host
- * - test again
- * - IF OK: update the meta with online status
- * - IF BAD: update the meta with offline status and remove the config entirely
- * - then reload nginx
- *
- * @param {Object|String} model
- * @param {String} host_type
- * @param {Object} host
- * @returns {Promise}
- */
- configure: (model, host_type, host) => {
- let combined_meta = {};
-
- return internalNginx.test()
- .then(() => {
- // Nginx is OK
- // We're deleting this config regardless.
- return internalNginx.deleteConfig(host_type, host); // Don't throw errors, as the file may not exist at all
- })
- .then(() => {
- return internalNginx.generateConfig(host_type, host);
- })
- .then(() => {
- // Test nginx again and update meta with result
- return internalNginx.test()
- .then(() => {
- // nginx is ok
- combined_meta = _.assign({}, host.meta, {
- nginx_online: true,
- nginx_err: null
- });
-
- return model
- .query()
- .where('id', host.id)
- .patch({
- meta: combined_meta
- });
- })
- .catch((err) => {
- // Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
- // It will always look like this:
- // nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
-
- let valid_lines = [];
- let err_lines = err.message.split('\n');
- err_lines.map(function (line) {
- if (line.indexOf('/var/log/nginx/error.log') === -1) {
- valid_lines.push(line);
- }
- });
-
- if (debug_mode) {
- logger.error('Nginx test failed:', valid_lines.join('\n'));
- }
-
- // config is bad, update meta and delete config
- combined_meta = _.assign({}, host.meta, {
- nginx_online: false,
- nginx_err: valid_lines.join('\n')
- });
-
- return model
- .query()
- .where('id', host.id)
- .patch({
- meta: combined_meta
- })
- .then(() => {
- return internalNginx.deleteConfig(host_type, host, true);
- });
- });
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- return combined_meta;
- });
- },
-
- /**
- * @returns {Promise}
- */
- test: () => {
- if (debug_mode) {
- logger.info('Testing Nginx configuration');
- }
-
- return utils.exec('/usr/sbin/nginx -t -g "error_log off;"');
- },
-
- /**
- * @returns {Promise}
- */
- reload: () => {
- return internalNginx.test()
- .then(() => {
- logger.info('Reloading Nginx');
- return utils.exec('/usr/sbin/nginx -s reload');
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Integer} host_id
- * @returns {String}
- */
- getConfigName: (host_type, host_id) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- if (host_type === 'default') {
- return '/data/nginx/default_host/site.conf';
- }
-
- return '/data/nginx/' + host_type + '/' + host_id + '.conf';
- },
-
- /**
- * Generates custom locations
- * @param {Object} host
- * @returns {Promise}
- */
- renderLocations: (host) => {
-
- //logger.info('host = ' + JSON.stringify(host, null, 2));
- return new Promise((resolve, reject) => {
- let template;
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/_location.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- let renderer = new Liquid({
- root: __dirname + '/../templates/'
- });
- let renderedLocations = '';
-
- const locationRendering = async () => {
- for (let i = 0; i < host.locations.length; i++) {
- let locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
- {ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
- {allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
- {hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
- {certificate: host.certificate}, host.locations[i]);
-
- if (locationCopy.forward_host.indexOf('/') > -1) {
- const splitted = locationCopy.forward_host.split('/');
-
- locationCopy.forward_host = splitted.shift();
- locationCopy.forward_path = `/${splitted.join('/')}`;
- }
-
- //logger.info('locationCopy = ' + JSON.stringify(locationCopy, null, 2));
-
- // eslint-disable-next-line
- renderedLocations += await renderer.parseAndRender(template, locationCopy);
- }
-
- };
-
- locationRendering().then(() => resolve(renderedLocations));
-
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Object} host
- * @returns {Promise}
- */
- generateConfig: (host_type, host) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- if (debug_mode) {
- logger.info('Generating ' + host_type + ' Config:', host);
- }
-
- // logger.info('host = ' + JSON.stringify(host, null, 2));
-
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = internalNginx.getConfigName(host_type, host.id);
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/' + host_type + '.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- let locationsPromise;
- let origLocations;
-
- // Manipulate the data a bit before sending it to the template
- if (host_type !== 'default') {
- host.use_default_location = true;
- if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
- host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
- }
- }
-
- if (host.locations) {
- //logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
- origLocations = [].concat(host.locations);
- locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
- host.locations = renderedLocations;
- });
-
- // Allow someone who is using / custom location path to use it, and skip the default / location
- _.map(host.locations, (location) => {
- if (location.path === '/') {
- host.use_default_location = false;
- }
- });
-
- } else {
- locationsPromise = Promise.resolve();
- }
-
- // Set the IPv6 setting for the host
- host.ipv6 = internalNginx.ipv6Enabled();
-
- locationsPromise.then(() => {
- renderEngine
- .parseAndRender(template, host)
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
- if (debug_mode) {
- logger.success('Wrote config:', filename, config_text);
- }
-
- // Restore locations array
- host.locations = origLocations;
-
- resolve(true);
- })
- .catch((err) => {
- if (debug_mode) {
- logger.warn('Could not write ' + filename + ':', err.message);
- }
-
- reject(new error.ConfigurationError(err.message));
- });
- });
- });
- },
-
- /**
- * This generates a temporary nginx config listening on port 80 for the domain names listed
- * in the certificate setup. It allows the letsencrypt acme challenge to be requested by letsencrypt
- * when requesting a certificate without having a hostname set up already.
- *
- * @param {Object} certificate
- * @returns {Promise}
- */
- generateLetsEncryptRequestConfig: (certificate) => {
- if (debug_mode) {
- logger.info('Generating LetsEncrypt Request Config:', certificate);
- }
-
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/letsencrypt-request.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- certificate.ipv6 = internalNginx.ipv6Enabled();
-
- renderEngine
- .parseAndRender(template, certificate)
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
- if (debug_mode) {
- logger.success('Wrote config:', filename, config_text);
- }
-
- resolve(true);
- })
- .catch((err) => {
- if (debug_mode) {
- logger.warn('Could not write ' + filename + ':', err.message);
- }
-
- reject(new error.ConfigurationError(err.message));
- });
- });
- },
-
- /**
- * This removes the temporary nginx config file generated by `generateLetsEncryptRequestConfig`
- *
- * @param {Object} certificate
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- deleteLetsEncryptRequestConfig: (certificate, throw_errors) => {
- return new Promise((resolve, reject) => {
- try {
- let config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
-
- if (debug_mode) {
- logger.warn('Deleting nginx config: ' + config_file);
- }
-
- fs.unlinkSync(config_file);
- } catch (err) {
- if (debug_mode) {
- logger.warn('Could not delete config:', err.message);
- }
-
- if (throw_errors) {
- reject(err);
- }
- }
-
- resolve();
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Object} [host]
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- deleteConfig: (host_type, host, throw_errors) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- return new Promise((resolve, reject) => {
- try {
- let config_file = internalNginx.getConfigName(host_type, typeof host === 'undefined' ? 0 : host.id);
-
- if (debug_mode) {
- logger.warn('Deleting nginx config: ' + config_file);
- }
-
- fs.unlinkSync(config_file);
- } catch (err) {
- if (debug_mode) {
- logger.warn('Could not delete config:', err.message);
- }
-
- if (throw_errors) {
- reject(err);
- }
- }
-
- resolve();
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Array} hosts
- * @returns {Promise}
- */
- bulkGenerateConfigs: (host_type, hosts) => {
- let promises = [];
- hosts.map(function (host) {
- promises.push(internalNginx.generateConfig(host_type, host));
- });
-
- return Promise.all(promises);
- },
-
- /**
- * @param {String} host_type
- * @param {Array} hosts
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- bulkDeleteConfigs: (host_type, hosts, throw_errors) => {
- let promises = [];
- hosts.map(function (host) {
- promises.push(internalNginx.deleteConfig(host_type, host, throw_errors));
- });
-
- return Promise.all(promises);
- },
-
- /**
- * @param {string} config
- * @returns {boolean}
- */
- advancedConfigHasDefaultLocation: function (config) {
- return !!config.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
- },
-
- /**
- * @returns {boolean}
- */
- ipv6Enabled: function () {
- if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
- const disabled = process.env.DISABLE_IPV6.toLowerCase();
- return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
- }
-
- return true;
- }
-};
-
-module.exports = internalNginx;
diff --git a/backend/internal/nginx/templates.go b/backend/internal/nginx/templates.go
new file mode 100644
index 00000000..3c87b2b2
--- /dev/null
+++ b/backend/internal/nginx/templates.go
@@ -0,0 +1,31 @@
+package nginx
+
+import (
+ "io/fs"
+ "io/ioutil"
+
+ "npm/embed"
+
+ "github.com/aymerick/raymond"
+)
+
+// WriteTemplate will load, parse and write a template file
+func WriteTemplate(templateName, outputFilename string, data map[string]interface{}) error {
+ // get template file content
+ subFs, _ := fs.Sub(embed.NginxFiles, "nginx")
+ template, err := fs.ReadFile(subFs, templateName)
+
+ if err != nil {
+ return err
+ }
+
+ // Render
+ parsedFile, err := raymond.Render(string(template), data)
+ if err != nil {
+ return err
+ }
+
+ // Write it
+ // nolint: gosec
+ return ioutil.WriteFile(outputFilename, []byte(parsedFile), 0644)
+}
diff --git a/backend/internal/proxy-host.js b/backend/internal/proxy-host.js
deleted file mode 100644
index 09b8bca5..00000000
--- a/backend/internal/proxy-host.js
+++ /dev/null
@@ -1,466 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const proxyHostModel = require('../models/proxy_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalProxyHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('proxy_hosts:create', data)
- .then(() => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return proxyHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalProxyHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalProxyHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner', 'access_list.[clients,items]']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- // Audit log
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('proxy_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return proxyHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalProxyHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate', 'access_list.[clients,items]']
- })
- .then((row) => {
- if (!row.enabled) {
- // No need to add nginx config if host is disabled
- return row;
- }
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('proxy_hosts:get', data.id)
- .then((access_data) => {
- let query = proxyHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,access_list,access_list.[clients,items],certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('proxy_hosts:delete', data.id)
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('proxy_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('proxy_hosts:update', data.id)
- .then(() => {
- return internalProxyHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner', 'access_list']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('proxy_hosts:update', data.id)
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('proxy_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('proxy_hosts:list')
- .then((access_data) => {
- let query = proxyHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,access_list,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = proxyHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalProxyHost;
diff --git a/backend/internal/redirection-host.js b/backend/internal/redirection-host.js
deleted file mode 100644
index f22c3668..00000000
--- a/backend/internal/redirection-host.js
+++ /dev/null
@@ -1,461 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const redirectionHostModel = require('../models/redirection_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalRedirectionHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('redirection_hosts:create', data)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return redirectionHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalRedirectionHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalRedirectionHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('redirection_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return redirectionHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalRedirectionHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate']
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('redirection_hosts:get', data.id)
- .then((access_data) => {
- let query = redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('redirection_hosts:delete', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('redirection_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('redirection_hosts:update', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('redirection_hosts:update', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('redirection_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('redirection_hosts:list')
- .then((access_data) => {
- let query = redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = redirectionHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalRedirectionHost;
diff --git a/backend/internal/report.js b/backend/internal/report.js
deleted file mode 100644
index 4dde659b..00000000
--- a/backend/internal/report.js
+++ /dev/null
@@ -1,38 +0,0 @@
-const internalProxyHost = require('./proxy-host');
-const internalRedirectionHost = require('./redirection-host');
-const internalDeadHost = require('./dead-host');
-const internalStream = require('./stream');
-
-const internalReport = {
-
- /**
- * @param {Access} access
- * @return {Promise}
- */
- getHostsReport: (access) => {
- return access.can('reports:hosts', 1)
- .then((access_data) => {
- let user_id = access.token.getUserId(1);
-
- let promises = [
- internalProxyHost.getCount(user_id, access_data.visibility),
- internalRedirectionHost.getCount(user_id, access_data.visibility),
- internalStream.getCount(user_id, access_data.visibility),
- internalDeadHost.getCount(user_id, access_data.visibility)
- ];
-
- return Promise.all(promises);
- })
- .then((counts) => {
- return {
- proxy: counts.shift(),
- redirection: counts.shift(),
- stream: counts.shift(),
- dead: counts.shift()
- };
- });
-
- }
-};
-
-module.exports = internalReport;
diff --git a/backend/internal/setting.js b/backend/internal/setting.js
deleted file mode 100644
index d4ac67d8..00000000
--- a/backend/internal/setting.js
+++ /dev/null
@@ -1,133 +0,0 @@
-const fs = require('fs');
-const error = require('../lib/error');
-const settingModel = require('../models/setting');
-const internalNginx = require('./nginx');
-
-const internalSetting = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('settings:update', data.id)
- .then((/*access_data*/) => {
- return internalSetting.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return settingModel
- .query()
- .where({id: data.id})
- .patch(data);
- })
- .then(() => {
- return internalSetting.get(access, {
- id: data.id
- });
- })
- .then((row) => {
- if (row.id === 'default-site') {
- // write the html if we need to
- if (row.value === 'html') {
- fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
- }
-
- // Configure nginx
- return internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.generateConfig('default', row);
- })
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- return row;
- })
- .catch((/*err*/) => {
- internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- // I'm being slack here I know..
- throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
- });
- });
- } else {
- return row;
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.id
- * @return {Promise}
- */
- get: (access, data) => {
- return access.can('settings:get', data.id)
- .then(() => {
- return settingModel
- .query()
- .where('id', data.id)
- .first();
- })
- .then((row) => {
- if (row) {
- return row;
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * This will only count the settings
- *
- * @param {Access} access
- * @returns {*}
- */
- getCount: (access) => {
- return access.can('settings:list')
- .then(() => {
- return settingModel
- .query()
- .count('id as count')
- .first();
- })
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * All settings
- *
- * @param {Access} access
- * @returns {Promise}
- */
- getAll: (access) => {
- return access.can('settings:list')
- .then(() => {
- return settingModel
- .query()
- .orderBy('description', 'ASC');
- });
- }
-};
-
-module.exports = internalSetting;
diff --git a/backend/internal/state/state.go b/backend/internal/state/state.go
new file mode 100644
index 00000000..0fa1433f
--- /dev/null
+++ b/backend/internal/state/state.go
@@ -0,0 +1,31 @@
+package state
+
+import (
+ "sync"
+)
+
+// AppState holds pointers to channels and waitGroups
+// shared by all goroutines of the application
+type AppState struct {
+ waitGroup sync.WaitGroup
+ termSig chan bool
+}
+
+// NewState creates a new app state
+func NewState() *AppState {
+ state := &AppState{
+ // buffered channel
+ termSig: make(chan bool, 1),
+ }
+ return state
+}
+
+// GetWaitGroup returns the state's wg
+func (state *AppState) GetWaitGroup() *sync.WaitGroup {
+ return &state.waitGroup
+}
+
+// GetTermSig returns the state's term signal
+func (state *AppState) GetTermSig() chan bool {
+ return state.termSig
+}
diff --git a/backend/internal/stream.js b/backend/internal/stream.js
deleted file mode 100644
index 9c458a10..00000000
--- a/backend/internal/stream.js
+++ /dev/null
@@ -1,348 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const streamModel = require('../models/stream');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalStream = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('streams:create', data)
- .then((/*access_data*/) => {
- // TODO: At this point the existing ports should have been checked
- data.owner_user_id = access.token.getUserId(1);
-
- if (typeof data.meta === 'undefined') {
- data.meta = {};
- }
-
- return streamModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(streamModel, 'stream', row)
- .then(() => {
- return internalStream.get(access, {id: row.id, expand: ['owner']});
- });
- })
- .then((row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'stream',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('streams:update', data.id)
- .then((/*access_data*/) => {
- // TODO: at this point the existing streams should have been checked
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return streamModel
- .query()
- .omit(omissions())
- .patchAndFetchById(row.id, data)
- .then((saved_row) => {
- return internalNginx.configure(streamModel, 'stream', saved_row)
- .then(() => {
- return internalStream.get(access, {id: row.id, expand: ['owner']});
- });
- })
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'stream',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('streams:get', data.id)
- .then((access_data) => {
- let query = streamModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('streams:delete', data.id)
- .then(() => {
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('stream', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'stream',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('streams:update', data.id)
- .then(() => {
- return internalStream.get(access, {
- id: data.id,
- expand: ['owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(streamModel, 'stream', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'stream',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('streams:update', data.id)
- .then(() => {
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('stream', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'stream-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Streams
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('streams:list')
- .then((access_data) => {
- let query = streamModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner]')
- .orderBy('incoming_port', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('incoming_port', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = streamModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalStream;
diff --git a/backend/internal/token.js b/backend/internal/token.js
deleted file mode 100644
index a64b9010..00000000
--- a/backend/internal/token.js
+++ /dev/null
@@ -1,162 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const userModel = require('../models/user');
-const authModel = require('../models/auth');
-const helpers = require('../lib/helpers');
-const TokenModel = require('../models/token');
-
-module.exports = {
-
- /**
- * @param {Object} data
- * @param {String} data.identity
- * @param {String} data.secret
- * @param {String} [data.scope]
- * @param {String} [data.expiry]
- * @param {String} [issuer]
- * @returns {Promise}
- */
- getTokenFromEmail: (data, issuer) => {
- let Token = new TokenModel();
-
- data.scope = data.scope || 'user';
- data.expiry = data.expiry || '1d';
-
- return userModel
- .query()
- .where('email', data.identity)
- .andWhere('is_deleted', 0)
- .andWhere('is_disabled', 0)
- .first()
- .then((user) => {
- if (user) {
- // Get auth
- return authModel
- .query()
- .where('user_id', '=', user.id)
- .where('type', '=', 'password')
- .first()
- .then((auth) => {
- if (auth) {
- return auth.verifyPassword(data.secret)
- .then((valid) => {
- if (valid) {
-
- if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
- // The scope requested doesn't exist as a role against the user,
- // you shall not pass.
- throw new error.AuthError('Invalid scope: ' + data.scope);
- }
-
- // Create a moment of the expiry expression
- let expiry = helpers.parseDatePeriod(data.expiry);
- if (expiry === null) {
- throw new error.AuthError('Invalid expiry time: ' + data.expiry);
- }
-
- return Token.create({
- iss: issuer || 'api',
- attrs: {
- id: user.id
- },
- scope: [data.scope],
- expiresIn: data.expiry
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString()
- };
- });
- } else {
- throw new error.AuthError('Invalid password');
- }
- });
- } else {
- throw new error.AuthError('No password auth for user');
- }
- });
- } else {
- throw new error.AuthError('No relevant user found');
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} [data]
- * @param {String} [data.expiry]
- * @param {String} [data.scope] Only considered if existing token scope is admin
- * @returns {Promise}
- */
- getFreshToken: (access, data) => {
- let Token = new TokenModel();
-
- data = data || {};
- data.expiry = data.expiry || '1d';
-
- if (access && access.token.getUserId(0)) {
-
- // Create a moment of the expiry expression
- let expiry = helpers.parseDatePeriod(data.expiry);
- if (expiry === null) {
- throw new error.AuthError('Invalid expiry time: ' + data.expiry);
- }
-
- let token_attrs = {
- id: access.token.getUserId(0)
- };
-
- // Only admins can request otherwise scoped tokens
- let scope = access.token.get('scope');
- if (data.scope && access.token.hasScope('admin')) {
- scope = [data.scope];
-
- if (data.scope === 'job-board' || data.scope === 'worker') {
- token_attrs.id = 0;
- }
- }
-
- return Token.create({
- iss: 'api',
- scope: scope,
- attrs: token_attrs,
- expiresIn: data.expiry
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString()
- };
- });
- } else {
- throw new error.AssertionFailedError('Existing token contained invalid user data');
- }
- },
-
- /**
- * @param {Object} user
- * @returns {Promise}
- */
- getTokenFromUser: (user) => {
- const expire = '1d';
- const Token = new TokenModel();
- const expiry = helpers.parseDatePeriod(expire);
-
- return Token.create({
- iss: 'api',
- attrs: {
- id: user.id
- },
- scope: ['user'],
- expiresIn: expire
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString(),
- user: user
- };
- });
- }
-};
diff --git a/backend/internal/types/db_date.go b/backend/internal/types/db_date.go
new file mode 100644
index 00000000..9339868e
--- /dev/null
+++ b/backend/internal/types/db_date.go
@@ -0,0 +1,39 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "time"
+)
+
+// DBDate is a date time
+// type DBDate time.Time
+type DBDate struct {
+ Time time.Time
+}
+
+// Value encodes the type ready for the database
+func (d DBDate) Value() (driver.Value, error) {
+ return driver.Value(d.Time.Unix()), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *DBDate) Scan(src interface{}) error {
+ d.Time = time.Unix(src.(int64), 0)
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *DBDate) UnmarshalJSON(data []byte) error {
+ var u int64
+ if err := json.Unmarshal(data, &u); err != nil {
+ return err
+ }
+ d.Time = time.Unix(u, 0)
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d DBDate) MarshalJSON() ([]byte, error) {
+ return json.Marshal(d.Time.Unix())
+}
diff --git a/backend/internal/types/jsonb.go b/backend/internal/types/jsonb.go
new file mode 100644
index 00000000..ff3f7930
--- /dev/null
+++ b/backend/internal/types/jsonb.go
@@ -0,0 +1,71 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "fmt"
+)
+
+// JSONB can be anything
+type JSONB struct {
+ Encoded string `json:"decoded"`
+ Decoded interface{} `json:"encoded"`
+}
+
+// Value encodes the type ready for the database
+func (j JSONB) Value() (driver.Value, error) {
+ json, err := json.Marshal(j.Decoded)
+ return driver.Value(string(json)), err
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (j *JSONB) Scan(src interface{}) error {
+ var jsonb JSONB
+ var srcString string
+ switch v := src.(type) {
+ case string:
+ srcString = src.(string)
+ case []uint8:
+ srcString = string(src.([]uint8))
+ default:
+ return fmt.Errorf("Incompatible type for JSONB: %v", v)
+ }
+
+ jsonb.Encoded = srcString
+
+ if err := json.Unmarshal([]byte(srcString), &jsonb.Decoded); err != nil {
+ return err
+ }
+
+ *j = jsonb
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (j *JSONB) UnmarshalJSON(data []byte) error {
+ var jsonb JSONB
+ jsonb.Encoded = string(data)
+ if err := json.Unmarshal(data, &jsonb.Decoded); err != nil {
+ return err
+ }
+ *j = jsonb
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (j JSONB) MarshalJSON() ([]byte, error) {
+ return json.Marshal(j.Decoded)
+}
+
+// AsStringArray will attempt to return as []string
+func (j JSONB) AsStringArray() ([]string, error) {
+ var strs []string
+
+ // Encode then Decode onto this type
+ b, _ := j.MarshalJSON()
+ if err := json.Unmarshal(b, &strs); err != nil {
+ return strs, err
+ }
+
+ return strs, nil
+}
diff --git a/backend/internal/types/nullable_db_date.go b/backend/internal/types/nullable_db_date.go
new file mode 100644
index 00000000..9a0022c2
--- /dev/null
+++ b/backend/internal/types/nullable_db_date.go
@@ -0,0 +1,54 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "time"
+)
+
+// NullableDBDate is a date time that can be null in the db
+// type DBDate time.Time
+type NullableDBDate struct {
+ Time *time.Time
+}
+
+// Value encodes the type ready for the database
+func (d NullableDBDate) Value() (driver.Value, error) {
+ if d.Time == nil {
+ return nil, nil
+ }
+ return driver.Value(d.Time.Unix()), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *NullableDBDate) Scan(src interface{}) error {
+ var tme time.Time
+ if src != nil {
+ tme = time.Unix(src.(int64), 0)
+ }
+
+ d.Time = &tme
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *NullableDBDate) UnmarshalJSON(data []byte) error {
+ var t time.Time
+ var u int64
+ if err := json.Unmarshal(data, &u); err != nil {
+ d.Time = &t
+ return nil
+ }
+ t = time.Unix(u, 0)
+ d.Time = &t
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d NullableDBDate) MarshalJSON() ([]byte, error) {
+ if d.Time == nil || d.Time.IsZero() {
+ return json.Marshal(nil)
+ }
+
+ return json.Marshal(d.Time.Unix())
+}
diff --git a/backend/internal/user.js b/backend/internal/user.js
deleted file mode 100644
index 2e2d8abf..00000000
--- a/backend/internal/user.js
+++ /dev/null
@@ -1,518 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const userModel = require('../models/user');
-const userPermissionModel = require('../models/user_permission');
-const authModel = require('../models/auth');
-const gravatar = require('gravatar');
-const internalToken = require('./token');
-const internalAuditLog = require('./audit-log');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalUser = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let auth = data.auth || null;
- delete data.auth;
-
- data.avatar = data.avatar || '';
- data.roles = data.roles || [];
-
- if (typeof data.is_disabled !== 'undefined') {
- data.is_disabled = data.is_disabled ? 1 : 0;
- }
-
- return access.can('users:create', data)
- .then(() => {
- data.avatar = gravatar.url(data.email, {default: 'mm'});
-
- return userModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((user) => {
- if (auth) {
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: auth.type,
- secret: auth.secret,
- meta: {}
- })
- .then(() => {
- return user;
- });
- } else {
- return user;
- }
- })
- .then((user) => {
- // Create permissions row as well
- let is_admin = data.roles.indexOf('admin') !== -1;
-
- return userPermissionModel
- .query()
- .insert({
- user_id: user.id,
- visibility: is_admin ? 'all' : 'user',
- proxy_hosts: 'manage',
- redirection_hosts: 'manage',
- dead_hosts: 'manage',
- streams: 'manage',
- access_lists: 'manage',
- certificates: 'manage'
- })
- .then(() => {
- return internalUser.get(access, {id: user.id, expand: ['permissions']});
- });
- })
- .then((user) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'user',
- object_id: user.id,
- meta: user
- })
- .then(() => {
- return user;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.email]
- * @param {String} [data.name]
- * @return {Promise}
- */
- update: (access, data) => {
- if (typeof data.is_disabled !== 'undefined') {
- data.is_disabled = data.is_disabled ? 1 : 0;
- }
-
- return access.can('users:update', data.id)
- .then(() => {
-
- // Make sure that the user being updated doesn't change their email to another user that is already using it
- // 1. get user we want to update
- return internalUser.get(access, {id: data.id})
- .then((user) => {
-
- // 2. if email is to be changed, find other users with that email
- if (typeof data.email !== 'undefined') {
- data.email = data.email.toLowerCase().trim();
-
- if (user.email !== data.email) {
- return internalUser.isEmailAvailable(data.email, data.id)
- .then((available) => {
- if (!available) {
- throw new error.ValidationError('Email address already in use - ' + data.email);
- }
-
- return user;
- });
- }
- }
-
- // No change to email:
- return user;
- });
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
-
- return userModel
- .query()
- .omit(omissions())
- .patchAndFetchById(user.id, data)
- .then((saved_user) => {
- return _.omit(saved_user, omissions());
- });
- })
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: data
- })
- .then(() => {
- return user;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} [data]
- * @param {Integer} [data.id] Defaults to the token user
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- if (typeof data.id === 'undefined' || !data.id) {
- data.id = access.token.getUserId(0);
- }
-
- return access.can('users:get', data.id)
- .then(() => {
- let query = userModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[permissions]')
- .first();
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * Checks if an email address is available, but if a user_id is supplied, it will ignore checking
- * against that user.
- *
- * @param email
- * @param user_id
- */
- isEmailAvailable: (email, user_id) => {
- let query = userModel
- .query()
- .where('email', '=', email.toLowerCase().trim())
- .where('is_deleted', 0)
- .first();
-
- if (typeof user_id !== 'undefined') {
- query.where('id', '!=', user_id);
- }
-
- return query
- .then((user) => {
- return !user;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('users:delete', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (!user) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- // Make sure user can't delete themselves
- if (user.id === access.token.getUserId(0)) {
- throw new error.PermissionError('You cannot delete yourself.');
- }
-
- return userModel
- .query()
- .where('id', user.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'user',
- object_id: user.id,
- meta: _.omit(user, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * This will only count the users
- *
- * @param {Access} access
- * @param {String} [search_query]
- * @returns {*}
- */
- getCount: (access, search_query) => {
- return access.can('users:list')
- .then(() => {
- let query = userModel
- .query()
- .count('id as count')
- .where('is_deleted', 0)
- .first();
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('user.name', 'like', '%' + search_query + '%')
- .orWhere('user.email', 'like', '%' + search_query + '%');
- });
- }
-
- return query;
- })
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * All users
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('users:list')
- .then(() => {
- let query = userModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[permissions]')
- .orderBy('name', 'ASC');
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('name', 'like', '%' + search_query + '%')
- .orWhere('email', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Integer} [id_requested]
- * @returns {[String]}
- */
- getUserOmisionsByAccess: (access, id_requested) => {
- let response = []; // Admin response
-
- if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
- response = ['roles', 'is_deleted']; // Restricted response
- }
-
- return response;
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} data.type
- * @param {String} data.secret
- * @return {Promise}
- */
- setPassword: (access, data) => {
- return access.can('users:password', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- if (user.id === access.token.getUserId(0)) {
- // they're setting their own password. Make sure their current password is correct
- if (typeof data.current === 'undefined' || !data.current) {
- throw new error.ValidationError('Current password was not supplied');
- }
-
- return internalToken.getTokenFromEmail({
- identity: user.email,
- secret: data.current
- })
- .then(() => {
- return user;
- });
- }
-
- return user;
- })
- .then((user) => {
- // Get auth, patch if it exists
- return authModel
- .query()
- .where('user_id', user.id)
- .andWhere('type', data.type)
- .first()
- .then((existing_auth) => {
- if (existing_auth) {
- // patch
- return authModel
- .query()
- .where('user_id', user.id)
- .andWhere('type', data.type)
- .patch({
- type: data.type, // This is required for the model to encrypt on save
- secret: data.secret
- });
- } else {
- // insert
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: data.type,
- secret: data.secret,
- meta: {}
- });
- }
- })
- .then(() => {
- // Add to Audit Log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: {
- name: user.name,
- password_changed: true,
- auth_type: data.type
- }
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @return {Promise}
- */
- setPermissions: (access, data) => {
- return access.can('users:permissions', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- return user;
- })
- .then((user) => {
- // Get perms row, patch if it exists
- return userPermissionModel
- .query()
- .where('user_id', user.id)
- .first()
- .then((existing_auth) => {
- if (existing_auth) {
- // patch
- return userPermissionModel
- .query()
- .where('user_id', user.id)
- .patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
- } else {
- // insert
- return userPermissionModel
- .query()
- .insertAndFetch(_.assign({user_id: user.id}, data));
- }
- })
- .then((permissions) => {
- // Add to Audit Log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: {
- name: user.name,
- permissions: permissions
- }
- });
-
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- */
- loginAs: (access, data) => {
- return access.can('users:loginas', data.id)
- .then(() => {
- return internalUser.get(access, data);
- })
- .then((user) => {
- return internalToken.getTokenFromUser(user);
- });
- }
-};
-
-module.exports = internalUser;
diff --git a/backend/internal/util/interfaces.go b/backend/internal/util/interfaces.go
new file mode 100644
index 00000000..ebb6ffa3
--- /dev/null
+++ b/backend/internal/util/interfaces.go
@@ -0,0 +1,36 @@
+package util
+
+// FindItemInInterface Find key in interface (recursively) and return value as interface
+func FindItemInInterface(key string, obj interface{}) (interface{}, bool) {
+ // if the argument is not a map, ignore it
+ mobj, ok := obj.(map[string]interface{})
+ if !ok {
+ return nil, false
+ }
+
+ for k, v := range mobj {
+ // key match, return value
+ if k == key {
+ return v, true
+ }
+
+ // if the value is a map, search recursively
+ if m, ok := v.(map[string]interface{}); ok {
+ if res, ok := FindItemInInterface(key, m); ok {
+ return res, true
+ }
+ }
+ // if the value is an array, search recursively
+ // from each element
+ if va, ok := v.([]interface{}); ok {
+ for _, a := range va {
+ if res, ok := FindItemInInterface(key, a); ok {
+ return res, true
+ }
+ }
+ }
+ }
+
+ // element not found
+ return nil, false
+}
diff --git a/backend/internal/util/maps.go b/backend/internal/util/maps.go
new file mode 100644
index 00000000..1ff211ec
--- /dev/null
+++ b/backend/internal/util/maps.go
@@ -0,0 +1,9 @@
+package util
+
+// MapContainsKey is fairly self explanatory
+func MapContainsKey(dict map[string]interface{}, key string) bool {
+ if _, ok := dict[key]; ok {
+ return true
+ }
+ return false
+}
diff --git a/backend/internal/util/maps_test.go b/backend/internal/util/maps_test.go
new file mode 100644
index 00000000..fdb5d2ff
--- /dev/null
+++ b/backend/internal/util/maps_test.go
@@ -0,0 +1,45 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type rect struct {
+ width int
+ height int
+}
+
+func TestMapContainsKey(t *testing.T) {
+ var r rect
+ r.width = 5
+ r.height = 5
+ m := map[string]interface{}{
+ "rect_width": r.width,
+ "rect_height": r.height,
+ }
+ tests := []struct {
+ name string
+ pass string
+ want bool
+ }{
+ {
+ name: "exists",
+ pass: "rect_width",
+ want: true,
+ },
+ {
+ name: "Does not exist",
+ pass: "rect_perimeter",
+ want: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := MapContainsKey(m, tt.pass)
+
+ assert.Equal(t, result, tt.want)
+ })
+ }
+}
diff --git a/backend/internal/util/slices.go b/backend/internal/util/slices.go
new file mode 100644
index 00000000..9dfdcb8d
--- /dev/null
+++ b/backend/internal/util/slices.go
@@ -0,0 +1,44 @@
+package util
+
+import (
+ "strconv"
+ "strings"
+)
+
+// SliceContainsItem returns whether the slice given contains the item given
+func SliceContainsItem(slice []string, item string) bool {
+ for _, a := range slice {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// SliceContainsInt returns whether the slice given contains the item given
+func SliceContainsInt(slice []int, item int) bool {
+ for _, a := range slice {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// ConvertIntSliceToString returns a comma separated string of all items in the slice
+func ConvertIntSliceToString(slice []int) string {
+ strs := []string{}
+ for _, item := range slice {
+ strs = append(strs, strconv.Itoa(item))
+ }
+ return strings.Join(strs, ",")
+}
+
+// ConvertStringSliceToInterface is required in some special cases
+func ConvertStringSliceToInterface(slice []string) []interface{} {
+ res := make([]interface{}, len(slice))
+ for i := range slice {
+ res[i] = slice[i]
+ }
+ return res
+}
diff --git a/backend/internal/util/slices_test.go b/backend/internal/util/slices_test.go
new file mode 100644
index 00000000..f2f18714
--- /dev/null
+++ b/backend/internal/util/slices_test.go
@@ -0,0 +1,92 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSliceContainsItem(t *testing.T) {
+ type want struct {
+ result bool
+ }
+ tests := []struct {
+ name string
+ inputString string
+ inputArray []string
+ want want
+ }{
+ {
+ name: "In array",
+ inputString: "test",
+ inputArray: []string{"no", "more", "tests", "test"},
+ want: want{
+ result: true,
+ },
+ },
+ {
+ name: "Not in array",
+ inputString: "test",
+ inputArray: []string{"no", "more", "tests"},
+ want: want{
+ result: false,
+ },
+ },
+ {
+ name: "Case sensitive",
+ inputString: "test",
+ inputArray: []string{"no", "TEST", "more"},
+ want: want{
+ result: false,
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := SliceContainsItem(tt.inputArray, tt.inputString)
+ assert.Equal(t, tt.want.result, got)
+ })
+ }
+}
+
+func TestSliceContainsInt(t *testing.T) {
+ type want struct {
+ result bool
+ }
+ tests := []struct {
+ name string
+ inputInt int
+ inputArray []int
+ want want
+ }{
+ {
+ name: "In array",
+ inputInt: 1,
+ inputArray: []int{1, 2, 3, 4},
+ want: want{
+ result: true,
+ },
+ },
+ {
+ name: "Not in array",
+ inputInt: 1,
+ inputArray: []int{10, 2, 3, 4},
+ want: want{
+ result: false,
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := SliceContainsInt(tt.inputArray, tt.inputInt)
+ assert.Equal(t, tt.want.result, got)
+ })
+ }
+}
+
+func TestConvertIntSliceToString(t *testing.T) {
+ items := []int{1, 2, 3, 4, 5, 6, 7}
+ expectedStr := "1,2,3,4,5,6,7"
+ str := ConvertIntSliceToString(items)
+ assert.Equal(t, expectedStr, str)
+}
diff --git a/backend/internal/validator/hosts.go b/backend/internal/validator/hosts.go
new file mode 100644
index 00000000..f830d2de
--- /dev/null
+++ b/backend/internal/validator/hosts.go
@@ -0,0 +1,33 @@
+package validator
+
+import (
+ "fmt"
+
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/hosttemplate"
+)
+
+// ValidateHost will check if associated objects exist and other checks
+// will return a nil error if things are OK
+func ValidateHost(h host.Model) error {
+ if h.CertificateID > 0 {
+ // Check certificate exists and is valid
+ // This will not determine if the certificate is Ready to use,
+ // as this validation only cares that the row exists.
+ if _, cErr := certificate.GetByID(h.CertificateID); cErr != nil {
+ return fmt.Errorf("Certificate #%d does not exist", h.CertificateID)
+ }
+ }
+
+ // Check the host template exists and has the same type.
+ hostTemplate, tErr := hosttemplate.GetByID(h.HostTemplateID)
+ if tErr != nil {
+ return fmt.Errorf("Host Template #%d does not exist", h.HostTemplateID)
+ }
+ if hostTemplate.Type != h.Type {
+ return fmt.Errorf("Host Template #%d is not valid for this host type", h.HostTemplateID)
+ }
+
+ return nil
+}
diff --git a/backend/internal/worker/certificate.go b/backend/internal/worker/certificate.go
new file mode 100644
index 00000000..a108010f
--- /dev/null
+++ b/backend/internal/worker/certificate.go
@@ -0,0 +1,63 @@
+package worker
+
+import (
+ "time"
+
+ "npm/internal/entity/certificate"
+ "npm/internal/logger"
+ "npm/internal/state"
+)
+
+type certificateWorker struct {
+ state *state.AppState
+}
+
+// StartCertificateWorker starts the CertificateWorker
+func StartCertificateWorker(state *state.AppState) {
+ worker := newCertificateWorker(state)
+ logger.Info("CertificateWorker Started")
+ worker.Run()
+}
+
+func newCertificateWorker(state *state.AppState) *certificateWorker {
+ return &certificateWorker{
+ state: state,
+ }
+}
+
+// Run the CertificateWorker
+func (w *certificateWorker) Run() {
+ // global wait group
+ gwg := w.state.GetWaitGroup()
+ gwg.Add(1)
+
+ ticker := time.NewTicker(15 * time.Second)
+mainLoop:
+ for {
+ select {
+ case _, more := <-w.state.GetTermSig():
+ if !more {
+ logger.Info("Terminating CertificateWorker ... ")
+ break mainLoop
+ }
+ case <-ticker.C:
+ // Can confirm that this will wait for completion before the next loop
+ requestCertificates()
+ }
+ }
+}
+
+func requestCertificates() {
+ // logger.Debug("requestCertificates fired")
+ rows, err := certificate.GetByStatus(certificate.StatusReady)
+ if err != nil {
+ logger.Error("requestCertificatesError", err)
+ return
+ }
+
+ for _, row := range rows {
+ if err := row.Request(); err != nil {
+ logger.Error("CertificateRequestError", err)
+ }
+ }
+}
diff --git a/backend/knexfile.js b/backend/knexfile.js
deleted file mode 100644
index 391ca005..00000000
--- a/backend/knexfile.js
+++ /dev/null
@@ -1,19 +0,0 @@
-module.exports = {
- development: {
- client: 'mysql',
- migrations: {
- tableName: 'migrations',
- stub: 'lib/migrate_template.js',
- directory: 'migrations'
- }
- },
-
- production: {
- client: 'mysql',
- migrations: {
- tableName: 'migrations',
- stub: 'lib/migrate_template.js',
- directory: 'migrations'
- }
- }
-};
diff --git a/backend/lib/access.js b/backend/lib/access.js
deleted file mode 100644
index 9d7329d9..00000000
--- a/backend/lib/access.js
+++ /dev/null
@@ -1,314 +0,0 @@
-/**
- * Some Notes: This is a friggin complicated piece of code.
- *
- * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
- * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
- * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
- *
- *
- */
-
-const _ = require('lodash');
-const logger = require('../logger').access;
-const validator = require('ajv');
-const error = require('./error');
-const userModel = require('../models/user');
-const proxyHostModel = require('../models/proxy_host');
-const TokenModel = require('../models/token');
-const roleSchema = require('./access/roles.json');
-const permsSchema = require('./access/permissions.json');
-
-module.exports = function (token_string) {
- let Token = new TokenModel();
- let token_data = null;
- let initialised = false;
- let object_cache = {};
- let allow_internal_access = false;
- let user_roles = [];
- let permissions = {};
-
- /**
- * Loads the Token object from the token string
- *
- * @returns {Promise}
- */
- this.init = () => {
- return new Promise((resolve, reject) => {
- if (initialised) {
- resolve();
- } else if (!token_string) {
- reject(new error.PermissionError('Permission Denied'));
- } else {
- resolve(Token.load(token_string)
- .then((data) => {
- token_data = data;
-
- // At this point we need to load the user from the DB and make sure they:
- // - exist (and not soft deleted)
- // - still have the appropriate scopes for this token
- // This is only required when the User ID is supplied or if the token scope has `user`
-
- if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
- // Has token user id or token user scope
- return userModel
- .query()
- .where('id', token_data.attrs.id)
- .andWhere('is_deleted', 0)
- .andWhere('is_disabled', 0)
- .allowEager('[permissions]')
- .eager('[permissions]')
- .first()
- .then((user) => {
- if (user) {
- // make sure user has all scopes of the token
- // The `user` role is not added against the user row, so we have to just add it here to get past this check.
- user.roles.push('user');
-
- let is_ok = true;
- _.forEach(token_data.scope, (scope_item) => {
- if (_.indexOf(user.roles, scope_item) === -1) {
- is_ok = false;
- }
- });
-
- if (!is_ok) {
- throw new error.AuthError('Invalid token scope for User');
- } else {
- initialised = true;
- user_roles = user.roles;
- permissions = user.permissions;
- }
-
- } else {
- throw new error.AuthError('User cannot be loaded for Token');
- }
- });
- } else {
- initialised = true;
- }
- }));
- }
- });
- };
-
- /**
- * Fetches the object ids from the database, only once per object type, for this token.
- * This only applies to USER token scopes, as all other tokens are not really bound
- * by object scopes
- *
- * @param {String} object_type
- * @returns {Promise}
- */
- this.loadObjects = (object_type) => {
- return new Promise((resolve, reject) => {
- if (Token.hasScope('user')) {
- if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
- reject(new error.AuthError('User Token supplied without a User ID'));
- } else {
- let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
- let query;
-
- if (typeof object_cache[object_type] === 'undefined') {
- switch (object_type) {
-
- // USERS - should only return yourself
- case 'users':
- resolve(token_user_id ? [token_user_id] : []);
- break;
-
- // Proxy Hosts
- case 'proxy_hosts':
- query = proxyHostModel
- .query()
- .select('id')
- .andWhere('is_deleted', 0);
-
- if (permissions.visibility === 'user') {
- query.andWhere('owner_user_id', token_user_id);
- }
-
- resolve(query
- .then((rows) => {
- let result = [];
- _.forEach(rows, (rule_row) => {
- result.push(rule_row.id);
- });
-
- // enum should not have less than 1 item
- if (!result.length) {
- result.push(0);
- }
-
- return result;
- })
- );
- break;
-
- // DEFAULT: null
- default:
- resolve(null);
- break;
- }
- } else {
- resolve(object_cache[object_type]);
- }
- }
- } else {
- resolve(null);
- }
- })
- .then((objects) => {
- object_cache[object_type] = objects;
- return objects;
- });
- };
-
- /**
- * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
- *
- * @param {String} permission_label
- * @returns {Object}
- */
- this.getObjectSchema = (permission_label) => {
- let base_object_type = permission_label.split(':').shift();
-
- let schema = {
- $id: 'objects',
- $schema: 'http://json-schema.org/draft-07/schema#',
- description: 'Actor Properties',
- type: 'object',
- additionalProperties: false,
- properties: {
- user_id: {
- anyOf: [
- {
- type: 'number',
- enum: [Token.get('attrs').id]
- }
- ]
- },
- scope: {
- type: 'string',
- pattern: '^' + Token.get('scope') + '$'
- }
- }
- };
-
- return this.loadObjects(base_object_type)
- .then((object_result) => {
- if (typeof object_result === 'object' && object_result !== null) {
- schema.properties[base_object_type] = {
- type: 'number',
- enum: object_result,
- minimum: 1
- };
- } else {
- schema.properties[base_object_type] = {
- type: 'number',
- minimum: 1
- };
- }
-
- return schema;
- });
- };
-
- return {
-
- token: Token,
-
- /**
- *
- * @param {Boolean} [allow_internal]
- * @returns {Promise}
- */
- load: (allow_internal) => {
- return new Promise(function (resolve/*, reject*/) {
- if (token_string) {
- resolve(Token.load(token_string));
- } else {
- allow_internal_access = allow_internal;
- resolve(allow_internal_access || null);
- }
- });
- },
-
- reloadObjects: this.loadObjects,
-
- /**
- *
- * @param {String} permission
- * @param {*} [data]
- * @returns {Promise}
- */
- can: (permission, data) => {
- if (allow_internal_access === true) {
- return Promise.resolve(true);
- //return true;
- } else {
- return this.init()
- .then(() => {
- // Initialised, token decoded ok
- return this.getObjectSchema(permission)
- .then((objectSchema) => {
- let data_schema = {
- [permission]: {
- data: data,
- scope: Token.get('scope'),
- roles: user_roles,
- permission_visibility: permissions.visibility,
- permission_proxy_hosts: permissions.proxy_hosts,
- permission_redirection_hosts: permissions.redirection_hosts,
- permission_dead_hosts: permissions.dead_hosts,
- permission_streams: permissions.streams,
- permission_access_lists: permissions.access_lists,
- permission_certificates: permissions.certificates
- }
- };
-
- let permissionSchema = {
- $schema: 'http://json-schema.org/draft-07/schema#',
- $async: true,
- $id: 'permissions',
- additionalProperties: false,
- properties: {}
- };
-
- permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
-
- // logger.info('objectSchema', JSON.stringify(objectSchema, null, 2));
- // logger.info('permissionSchema', JSON.stringify(permissionSchema, null, 2));
- // logger.info('data_schema', JSON.stringify(data_schema, null, 2));
-
- let ajv = validator({
- verbose: true,
- allErrors: true,
- format: 'full',
- missingRefs: 'fail',
- breakOnError: true,
- coerceTypes: true,
- schemas: [
- roleSchema,
- permsSchema,
- objectSchema,
- permissionSchema
- ]
- });
-
- return ajv.validate('permissions', data_schema)
- .then(() => {
- return data_schema[permission];
- });
- });
- })
- .catch((err) => {
- err.permission = permission;
- err.permission_data = data;
- logger.error(permission, data, err.message);
-
- throw new error.PermissionError('Permission Denied', err);
- });
- }
- }
- };
-};
diff --git a/backend/lib/access/access_lists-create.json b/backend/lib/access/access_lists-create.json
deleted file mode 100644
index 5a16a864..00000000
--- a/backend/lib/access/access_lists-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-delete.json b/backend/lib/access/access_lists-delete.json
deleted file mode 100644
index 5a16a864..00000000
--- a/backend/lib/access/access_lists-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-get.json b/backend/lib/access/access_lists-get.json
deleted file mode 100644
index 8f6dd8cc..00000000
--- a/backend/lib/access/access_lists-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-list.json b/backend/lib/access/access_lists-list.json
deleted file mode 100644
index 8f6dd8cc..00000000
--- a/backend/lib/access/access_lists-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-update.json b/backend/lib/access/access_lists-update.json
deleted file mode 100644
index 5a16a864..00000000
--- a/backend/lib/access/access_lists-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/auditlog-list.json b/backend/lib/access/auditlog-list.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/auditlog-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/certificates-create.json b/backend/lib/access/certificates-create.json
deleted file mode 100644
index bcdf6674..00000000
--- a/backend/lib/access/certificates-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-delete.json b/backend/lib/access/certificates-delete.json
deleted file mode 100644
index bcdf6674..00000000
--- a/backend/lib/access/certificates-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-get.json b/backend/lib/access/certificates-get.json
deleted file mode 100644
index 9ccfa4f1..00000000
--- a/backend/lib/access/certificates-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-list.json b/backend/lib/access/certificates-list.json
deleted file mode 100644
index 9ccfa4f1..00000000
--- a/backend/lib/access/certificates-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-update.json b/backend/lib/access/certificates-update.json
deleted file mode 100644
index bcdf6674..00000000
--- a/backend/lib/access/certificates-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-create.json b/backend/lib/access/dead_hosts-create.json
deleted file mode 100644
index a276c681..00000000
--- a/backend/lib/access/dead_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-delete.json b/backend/lib/access/dead_hosts-delete.json
deleted file mode 100644
index a276c681..00000000
--- a/backend/lib/access/dead_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-get.json b/backend/lib/access/dead_hosts-get.json
deleted file mode 100644
index 87aa12e7..00000000
--- a/backend/lib/access/dead_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-list.json b/backend/lib/access/dead_hosts-list.json
deleted file mode 100644
index 87aa12e7..00000000
--- a/backend/lib/access/dead_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-update.json b/backend/lib/access/dead_hosts-update.json
deleted file mode 100644
index a276c681..00000000
--- a/backend/lib/access/dead_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/permissions.json b/backend/lib/access/permissions.json
deleted file mode 100644
index 8480f9a1..00000000
--- a/backend/lib/access/permissions.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "perms",
- "definitions": {
- "view": {
- "type": "string",
- "pattern": "^(view|manage)$"
- },
- "manage": {
- "type": "string",
- "pattern": "^(manage)$"
- }
- }
-}
diff --git a/backend/lib/access/proxy_hosts-create.json b/backend/lib/access/proxy_hosts-create.json
deleted file mode 100644
index 166527a3..00000000
--- a/backend/lib/access/proxy_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-delete.json b/backend/lib/access/proxy_hosts-delete.json
deleted file mode 100644
index 166527a3..00000000
--- a/backend/lib/access/proxy_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-get.json b/backend/lib/access/proxy_hosts-get.json
deleted file mode 100644
index d88e4cff..00000000
--- a/backend/lib/access/proxy_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-list.json b/backend/lib/access/proxy_hosts-list.json
deleted file mode 100644
index d88e4cff..00000000
--- a/backend/lib/access/proxy_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-update.json b/backend/lib/access/proxy_hosts-update.json
deleted file mode 100644
index 166527a3..00000000
--- a/backend/lib/access/proxy_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-create.json b/backend/lib/access/redirection_hosts-create.json
deleted file mode 100644
index 342babc8..00000000
--- a/backend/lib/access/redirection_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-delete.json b/backend/lib/access/redirection_hosts-delete.json
deleted file mode 100644
index 342babc8..00000000
--- a/backend/lib/access/redirection_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-get.json b/backend/lib/access/redirection_hosts-get.json
deleted file mode 100644
index ba229206..00000000
--- a/backend/lib/access/redirection_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-list.json b/backend/lib/access/redirection_hosts-list.json
deleted file mode 100644
index ba229206..00000000
--- a/backend/lib/access/redirection_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-update.json b/backend/lib/access/redirection_hosts-update.json
deleted file mode 100644
index 342babc8..00000000
--- a/backend/lib/access/redirection_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/reports-hosts.json b/backend/lib/access/reports-hosts.json
deleted file mode 100644
index dbc9e0c0..00000000
--- a/backend/lib/access/reports-hosts.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/user"
- }
- ]
-}
diff --git a/backend/lib/access/roles.json b/backend/lib/access/roles.json
deleted file mode 100644
index 16b33b55..00000000
--- a/backend/lib/access/roles.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "roles",
- "definitions": {
- "admin": {
- "type": "object",
- "required": ["scope", "roles"],
- "properties": {
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- },
- "roles": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^admin$"
- }
- }
- }
- },
- "user": {
- "type": "object",
- "required": ["scope"],
- "properties": {
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- }
-}
diff --git a/backend/lib/access/settings-get.json b/backend/lib/access/settings-get.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/settings-get.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/settings-list.json b/backend/lib/access/settings-list.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/settings-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/settings-update.json b/backend/lib/access/settings-update.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/settings-update.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/streams-create.json b/backend/lib/access/streams-create.json
deleted file mode 100644
index fbeb1cc9..00000000
--- a/backend/lib/access/streams-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-delete.json b/backend/lib/access/streams-delete.json
deleted file mode 100644
index fbeb1cc9..00000000
--- a/backend/lib/access/streams-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-get.json b/backend/lib/access/streams-get.json
deleted file mode 100644
index 7e996287..00000000
--- a/backend/lib/access/streams-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-list.json b/backend/lib/access/streams-list.json
deleted file mode 100644
index 7e996287..00000000
--- a/backend/lib/access/streams-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-update.json b/backend/lib/access/streams-update.json
deleted file mode 100644
index fbeb1cc9..00000000
--- a/backend/lib/access/streams-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-create.json b/backend/lib/access/users-create.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/users-create.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-delete.json b/backend/lib/access/users-delete.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/users-delete.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-get.json b/backend/lib/access/users-get.json
deleted file mode 100644
index 2a2f0423..00000000
--- a/backend/lib/access/users-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-list.json b/backend/lib/access/users-list.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/users-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-loginas.json b/backend/lib/access/users-loginas.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/users-loginas.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-password.json b/backend/lib/access/users-password.json
deleted file mode 100644
index 2a2f0423..00000000
--- a/backend/lib/access/users-password.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-permissions.json b/backend/lib/access/users-permissions.json
deleted file mode 100644
index aeadc94b..00000000
--- a/backend/lib/access/users-permissions.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-update.json b/backend/lib/access/users-update.json
deleted file mode 100644
index 2a2f0423..00000000
--- a/backend/lib/access/users-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/error.js b/backend/lib/error.js
deleted file mode 100644
index 9e456f05..00000000
--- a/backend/lib/error.js
+++ /dev/null
@@ -1,90 +0,0 @@
-const _ = require('lodash');
-const util = require('util');
-
-module.exports = {
-
- PermissionError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = 'Permission Denied';
- this.public = true;
- this.status = 403;
- },
-
- ItemNotFoundError: function (id, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = 'Item Not Found - ' + id;
- this.public = true;
- this.status = 404;
- },
-
- AuthError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = true;
- this.status = 401;
- },
-
- InternalError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 500;
- this.public = false;
- },
-
- InternalValidationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 400;
- this.public = false;
- },
-
- ConfigurationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 400;
- this.public = true;
- },
-
- CacheError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.message = message;
- this.previous = previous;
- this.status = 500;
- this.public = false;
- },
-
- ValidationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = true;
- this.status = 400;
- },
-
- AssertionFailedError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = false;
- this.status = 400;
- }
-};
-
-_.forEach(module.exports, function (error) {
- util.inherits(error, Error);
-});
diff --git a/backend/lib/express/cors.js b/backend/lib/express/cors.js
deleted file mode 100644
index c9befeec..00000000
--- a/backend/lib/express/cors.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const validator = require('../validator');
-
-module.exports = function (req, res, next) {
-
- if (req.headers.origin) {
-
- const originSchema = {
- oneOf: [
- {
- type: 'string',
- pattern: '^[a-z\\-]+:\\/\\/(?:[\\w\\-\\.]+(:[0-9]+)?/?)?$'
- },
- {
- type: 'string',
- pattern: '^[a-z\\-]+:\\/\\/(?:\\[([a-z0-9]{0,4}\\:?)+\\])?/?(:[0-9]+)?$'
- }
- ]
- };
-
- // very relaxed validation....
- validator(originSchema, req.headers.origin)
- .then(function () {
- res.set({
- 'Access-Control-Allow-Origin': req.headers.origin,
- 'Access-Control-Allow-Credentials': true,
- 'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
- 'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
- 'Access-Control-Max-Age': 5 * 60,
- 'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
- });
- next();
- })
- .catch(next);
-
- } else {
- // No origin
- next();
- }
-
-};
diff --git a/backend/lib/express/jwt-decode.js b/backend/lib/express/jwt-decode.js
deleted file mode 100644
index 17edccec..00000000
--- a/backend/lib/express/jwt-decode.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const Access = require('../access');
-
-module.exports = () => {
- return function (req, res, next) {
- res.locals.access = null;
- let access = new Access(res.locals.token || null);
- access.load()
- .then(() => {
- res.locals.access = access;
- next();
- })
- .catch(next);
- };
-};
-
diff --git a/backend/lib/express/jwt.js b/backend/lib/express/jwt.js
deleted file mode 100644
index 44aa3693..00000000
--- a/backend/lib/express/jwt.js
+++ /dev/null
@@ -1,13 +0,0 @@
-module.exports = function () {
- return function (req, res, next) {
- if (req.headers.authorization) {
- let parts = req.headers.authorization.split(' ');
-
- if (parts && parts[0] === 'Bearer' && parts[1]) {
- res.locals.token = parts[1];
- }
- }
-
- next();
- };
-};
diff --git a/backend/lib/express/pagination.js b/backend/lib/express/pagination.js
deleted file mode 100644
index 24ffa58d..00000000
--- a/backend/lib/express/pagination.js
+++ /dev/null
@@ -1,55 +0,0 @@
-let _ = require('lodash');
-
-module.exports = function (default_sort, default_offset, default_limit, max_limit) {
-
- /**
- * This will setup the req query params with filtered data and defaults
- *
- * sort will be an array of fields and their direction
- * offset will be an int, defaulting to zero if no other default supplied
- * limit will be an int, defaulting to 50 if no other default supplied, and limited to the max if that was supplied
- *
- */
-
- return function (req, res, next) {
-
- req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
- req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
-
- if (max_limit && req.query.limit > max_limit) {
- req.query.limit = max_limit;
- }
-
- // Sorting
- let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
- let myRegexp = /.*\.(asc|desc)$/ig;
- let sort_array = [];
-
- sort = sort.split(',');
- _.map(sort, function (val) {
- let matches = myRegexp.exec(val);
-
- if (matches !== null) {
- let dir = matches[1];
- sort_array.push({
- field: val.substr(0, val.length - (dir.length + 1)),
- dir: dir.toLowerCase()
- });
- } else {
- sort_array.push({
- field: val,
- dir: 'asc'
- });
- }
- });
-
- // Sort will now be in this format:
- // [
- // { field: 'field1', dir: 'asc' },
- // { field: 'field2', dir: 'desc' }
- // ]
-
- req.query.sort = sort_array;
- next();
- };
-};
diff --git a/backend/lib/express/user-id-from-me.js b/backend/lib/express/user-id-from-me.js
deleted file mode 100644
index 4a37a406..00000000
--- a/backend/lib/express/user-id-from-me.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = (req, res, next) => {
- if (req.params.user_id === 'me' && res.locals.access) {
- req.params.user_id = res.locals.access.token.get('attrs').id;
- } else {
- req.params.user_id = parseInt(req.params.user_id, 10);
- }
-
- next();
-};
diff --git a/backend/lib/helpers.js b/backend/lib/helpers.js
deleted file mode 100644
index e38be991..00000000
--- a/backend/lib/helpers.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const moment = require('moment');
-
-module.exports = {
-
- /**
- * Takes an expression such as 30d and returns a moment object of that date in future
- *
- * Key Shorthand
- * ==================
- * years y
- * quarters Q
- * months M
- * weeks w
- * days d
- * hours h
- * minutes m
- * seconds s
- * milliseconds ms
- *
- * @param {String} expression
- * @returns {Object}
- */
- parseDatePeriod: function (expression) {
- let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
- if (matches) {
- return moment().add(matches[1], matches[2]);
- }
-
- return null;
- }
-
-};
diff --git a/backend/lib/migrate_template.js b/backend/lib/migrate_template.js
deleted file mode 100644
index f75f77ef..00000000
--- a/backend/lib/migrate_template.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const migrate_name = 'identifier_for_migrate';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex, Promise) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- // Create Table example:
-
- /*return knex.schema.createTable('notification', (table) => {
- table.increments().primary();
- table.string('name').notNull();
- table.string('type').notNull();
- table.integer('created_on').notNull();
- table.integer('modified_on').notNull();
- })
- .then(function () {
- logger.info('[' + migrate_name + '] Notification Table created');
- });*/
-
- logger.info('[' + migrate_name + '] Migrating Up Complete');
-
- return Promise.resolve(true);
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- // Drop table example:
-
- /*return knex.schema.dropTable('notification')
- .then(() => {
- logger.info('[' + migrate_name + '] Notification Table dropped');
- });*/
-
- logger.info('[' + migrate_name + '] Migrating Down Complete');
-
- return Promise.resolve(true);
-};
diff --git a/backend/lib/utils.js b/backend/lib/utils.js
deleted file mode 100644
index 4c8b62a8..00000000
--- a/backend/lib/utils.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const exec = require('child_process').exec;
-
-module.exports = {
-
- /**
- * @param {String} cmd
- * @returns {Promise}
- */
- exec: function (cmd) {
- return new Promise((resolve, reject) => {
- exec(cmd, function (err, stdout, /*stderr*/) {
- if (err && typeof err === 'object') {
- reject(err);
- } else {
- resolve(stdout.trim());
- }
- });
- });
- }
-};
diff --git a/backend/lib/validator/api.js b/backend/lib/validator/api.js
deleted file mode 100644
index 3f51b596..00000000
--- a/backend/lib/validator/api.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const error = require('../error');
-const path = require('path');
-const parser = require('json-schema-ref-parser');
-
-const ajv = require('ajv')({
- verbose: true,
- validateSchema: true,
- allErrors: false,
- format: 'full',
- coerceTypes: true
-});
-
-/**
- * @param {Object} schema
- * @param {Object} payload
- * @returns {Promise}
- */
-function apiValidator (schema, payload/*, description*/) {
- return new Promise(function Promise_apiValidator (resolve, reject) {
- if (typeof payload === 'undefined') {
- reject(new error.ValidationError('Payload is undefined'));
- }
-
- let validate = ajv.compile(schema);
- let valid = validate(payload);
-
- if (valid && !validate.errors) {
- resolve(payload);
- } else {
- let message = ajv.errorsText(validate.errors);
- let err = new error.ValidationError(message);
- err.debug = [validate.errors, payload];
- reject(err);
- }
- });
-}
-
-apiValidator.loadSchemas = parser
- .dereference(path.resolve('schema/index.json'))
- .then((schema) => {
- ajv.addSchema(schema);
- return schema;
- });
-
-module.exports = apiValidator;
diff --git a/backend/lib/validator/index.js b/backend/lib/validator/index.js
deleted file mode 100644
index fca6f4bf..00000000
--- a/backend/lib/validator/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const _ = require('lodash');
-const error = require('../error');
-const definitions = require('../../schema/definitions.json');
-
-RegExp.prototype.toJSON = RegExp.prototype.toString;
-
-const ajv = require('ajv')({
- verbose: true, //process.env.NODE_ENV === 'development',
- allErrors: true,
- format: 'full', // strict regexes for format checks
- coerceTypes: true,
- schemas: [
- definitions
- ]
-});
-
-/**
- *
- * @param {Object} schema
- * @param {Object} payload
- * @returns {Promise}
- */
-function validator (schema, payload) {
- return new Promise(function (resolve, reject) {
- if (!payload) {
- reject(new error.InternalValidationError('Payload is falsy'));
- } else {
- try {
- let validate = ajv.compile(schema);
-
- let valid = validate(payload);
- if (valid && !validate.errors) {
- resolve(_.cloneDeep(payload));
- } else {
- let message = ajv.errorsText(validate.errors);
- reject(new error.InternalValidationError(message));
- }
-
- } catch (err) {
- reject(err);
- }
-
- }
-
- });
-
-}
-
-module.exports = validator;
diff --git a/backend/logger.js b/backend/logger.js
deleted file mode 100644
index 680af6d5..00000000
--- a/backend/logger.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const {Signale} = require('signale');
-
-module.exports = {
- global: new Signale({scope: 'Global '}),
- migrate: new Signale({scope: 'Migrate '}),
- express: new Signale({scope: 'Express '}),
- access: new Signale({scope: 'Access '}),
- nginx: new Signale({scope: 'Nginx '}),
- ssl: new Signale({scope: 'SSL '}),
- import: new Signale({scope: 'Importer '}),
- setup: new Signale({scope: 'Setup '}),
- ip_ranges: new Signale({scope: 'IP Ranges'})
-};
diff --git a/backend/migrate.js b/backend/migrate.js
deleted file mode 100644
index 263c8702..00000000
--- a/backend/migrate.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const db = require('./db');
-const logger = require('./logger').migrate;
-
-module.exports = {
- latest: function () {
- return db.migrate.currentVersion()
- .then((version) => {
- logger.info('Current database version:', version);
- return db.migrate.latest({
- tableName: 'migrations',
- directory: 'migrations'
- });
- });
- }
-};
diff --git a/backend/migrations/20180618015850_initial.js b/backend/migrations/20180618015850_initial.js
deleted file mode 100644
index a112e826..00000000
--- a/backend/migrations/20180618015850_initial.js
+++ /dev/null
@@ -1,205 +0,0 @@
-const migrate_name = 'initial-schema';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('auth', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('type', 30).notNull();
- table.string('secret').notNull();
- table.json('meta').notNull();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] auth Table created');
-
- return knex.schema.createTable('user', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.integer('is_disabled').notNull().unsigned().defaultTo(0);
- table.string('email').notNull();
- table.string('name').notNull();
- table.string('nickname').notNull();
- table.string('avatar').notNull();
- table.json('roles').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] user Table created');
-
- return knex.schema.createTable('user_permission', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('visibility').notNull();
- table.string('proxy_hosts').notNull();
- table.string('redirection_hosts').notNull();
- table.string('dead_hosts').notNull();
- table.string('streams').notNull();
- table.string('access_lists').notNull();
- table.string('certificates').notNull();
- table.unique('user_id');
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] user_permission Table created');
-
- return knex.schema.createTable('proxy_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.string('forward_ip').notNull();
- table.integer('forward_port').notNull().unsigned();
- table.integer('access_list_id').notNull().unsigned().defaultTo(0);
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
- table.integer('block_exploits').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table created');
-
- return knex.schema.createTable('redirection_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.string('forward_domain_name').notNull();
- table.integer('preserve_path').notNull().unsigned().defaultTo(0);
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.integer('block_exploits').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table created');
-
- return knex.schema.createTable('dead_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table created');
-
- return knex.schema.createTable('stream', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.integer('incoming_port').notNull().unsigned();
- table.string('forward_ip').notNull();
- table.integer('forwarding_port').notNull().unsigned();
- table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
- table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] stream Table created');
-
- return knex.schema.createTable('access_list', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.string('name').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table created');
-
- return knex.schema.createTable('certificate', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.string('provider').notNull();
- table.string('nice_name').notNull().defaultTo('');
- table.json('domain_names').notNull();
- table.dateTime('expires_on').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] certificate Table created');
-
- return knex.schema.createTable('access_list_auth', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('access_list_id').notNull().unsigned();
- table.string('username').notNull();
- table.string('password').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list_auth Table created');
-
- return knex.schema.createTable('audit_log', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('object_type').notNull().defaultTo('');
- table.integer('object_id').notNull().unsigned().defaultTo(0);
- table.string('action').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] audit_log Table created');
- });
-
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20180929054513_websockets.js b/backend/migrations/20180929054513_websockets.js
deleted file mode 100644
index 06054850..00000000
--- a/backend/migrations/20180929054513_websockets.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const migrate_name = 'websockets';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
\ No newline at end of file
diff --git a/backend/migrations/20181019052346_forward_host.js b/backend/migrations/20181019052346_forward_host.js
deleted file mode 100644
index 05c27739..00000000
--- a/backend/migrations/20181019052346_forward_host.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'forward_host';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.renameColumn('forward_ip', 'forward_host');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
\ No newline at end of file
diff --git a/backend/migrations/20181113041458_http2_support.js b/backend/migrations/20181113041458_http2_support.js
deleted file mode 100644
index 9f6b4336..00000000
--- a/backend/migrations/20181113041458_http2_support.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const migrate_name = 'http2_support';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
-
diff --git a/backend/migrations/20181213013211_forward_scheme.js b/backend/migrations/20181213013211_forward_scheme.js
deleted file mode 100644
index 22ae619e..00000000
--- a/backend/migrations/20181213013211_forward_scheme.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'forward_scheme';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.string('forward_scheme').notNull().defaultTo('http');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190104035154_disabled.js b/backend/migrations/20190104035154_disabled.js
deleted file mode 100644
index 2780c4df..00000000
--- a/backend/migrations/20190104035154_disabled.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const migrate_name = 'disabled';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
-
- return knex.schema.table('stream', function (stream) {
- stream.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190215115310_customlocations.js b/backend/migrations/20190215115310_customlocations.js
deleted file mode 100644
index 4bcfd51a..00000000
--- a/backend/migrations/20190215115310_customlocations.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const migrate_name = 'custom_locations';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- * Extends proxy_host table with locations field
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.json('locations');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190218060101_hsts.js b/backend/migrations/20190218060101_hsts.js
deleted file mode 100644
index 648b162a..00000000
--- a/backend/migrations/20190218060101_hsts.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const migrate_name = 'hsts';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190227065017_settings.js b/backend/migrations/20190227065017_settings.js
deleted file mode 100644
index 7dc9c192..00000000
--- a/backend/migrations/20190227065017_settings.js
+++ /dev/null
@@ -1,38 +0,0 @@
-const migrate_name = 'settings';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('setting', (table) => {
- table.string('id').notNull().primary();
- table.string('name', 100).notNull();
- table.string('description', 255).notNull();
- table.string('value', 255).notNull();
- table.json('meta').notNull();
- })
- .then(() => {
- logger.info('[' + migrate_name + '] setting Table created');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20200410143839_access_list_client.js b/backend/migrations/20200410143839_access_list_client.js
deleted file mode 100644
index 3511e35b..00000000
--- a/backend/migrations/20200410143839_access_list_client.js
+++ /dev/null
@@ -1,53 +0,0 @@
-const migrate_name = 'access_list_client';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('access_list_client', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('access_list_id').notNull().unsigned();
- table.string('address').notNull();
- table.string('directive').notNull();
- table.json('meta').notNull();
-
- })
- .then(function () {
- logger.info('[' + migrate_name + '] access_list_client Table created');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.integer('satify_any').notNull().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.dropTable('access_list_client')
- .then(() => {
- logger.info('[' + migrate_name + '] access_list_client Table dropped');
- });
-};
diff --git a/backend/migrations/20200410143840_access_list_client_fix.js b/backend/migrations/20200410143840_access_list_client_fix.js
deleted file mode 100644
index ee0f0906..00000000
--- a/backend/migrations/20200410143840_access_list_client_fix.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'access_list_client_fix';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.renameColumn('satify_any', 'satisfy_any');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20201014143841_pass_auth.js b/backend/migrations/20201014143841_pass_auth.js
deleted file mode 100644
index a7767eb1..00000000
--- a/backend/migrations/20201014143841_pass_auth.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'pass_auth';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.integer('pass_auth').notNull().defaultTo(1);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.dropColumn('pass_auth');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
- });
-};
diff --git a/backend/migrations/20210210154702_redirection_scheme.js b/backend/migrations/20210210154702_redirection_scheme.js
deleted file mode 100644
index 0dad4876..00000000
--- a/backend/migrations/20210210154702_redirection_scheme.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'redirection_scheme';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.string('forward_scheme').notNull().defaultTo('$scheme');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.dropColumn('forward_scheme');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
diff --git a/backend/migrations/20210210154703_redirection_status_code.js b/backend/migrations/20210210154703_redirection_status_code.js
deleted file mode 100644
index b9bea0b9..00000000
--- a/backend/migrations/20210210154703_redirection_status_code.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'redirection_status_code';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.dropColumn('forward_http_code');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
diff --git a/backend/migrations/20210423103500_stream_domain.js b/backend/migrations/20210423103500_stream_domain.js
deleted file mode 100644
index a894ca5e..00000000
--- a/backend/migrations/20210423103500_stream_domain.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const migrate_name = 'stream_domain';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('stream', (table) => {
- table.renameColumn('forward_ip', 'forwarding_host');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('stream', (table) => {
- table.renameColumn('forwarding_host', 'forward_ip');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
diff --git a/backend/migrations/20211108145214_regenerate_default_host.js b/backend/migrations/20211108145214_regenerate_default_host.js
deleted file mode 100644
index 4c50941f..00000000
--- a/backend/migrations/20211108145214_regenerate_default_host.js
+++ /dev/null
@@ -1,50 +0,0 @@
-const migrate_name = 'stream_domain';
-const logger = require('../logger').migrate;
-const internalNginx = require('../internal/nginx');
-
-async function regenerateDefaultHost(knex) {
- const row = await knex('setting').select('*').where('id', 'default-site').first();
-
- if (!row) {
- return Promise.resolve();
- }
-
- return internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.generateConfig('default', row);
- })
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- });
-}
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return regenerateDefaultHost(knex);
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return regenerateDefaultHost(knex);
-};
\ No newline at end of file
diff --git a/backend/models/access_list.js b/backend/models/access_list.js
deleted file mode 100644
index 01974e86..00000000
--- a/backend/models/access_list.js
+++ /dev/null
@@ -1,102 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const AccessListAuth = require('./access_list_auth');
-const AccessListClient = require('./access_list_client');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessList extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessList';
- }
-
- static get tableName () {
- return 'access_list';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- const ProxyHost = require('./proxy_host');
-
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'access_list.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- items: {
- relation: Model.HasManyRelation,
- modelClass: AccessListAuth,
- join: {
- from: 'access_list.id',
- to: 'access_list_auth.access_list_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
- }
- },
- clients: {
- relation: Model.HasManyRelation,
- modelClass: AccessListClient,
- join: {
- from: 'access_list.id',
- to: 'access_list_client.access_list_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
- }
- },
- proxy_hosts: {
- relation: Model.HasManyRelation,
- modelClass: ProxyHost,
- join: {
- from: 'access_list.id',
- to: 'proxy_host.access_list_id'
- },
- modify: function (qb) {
- qb.where('proxy_host.is_deleted', 0);
- qb.omit(['is_deleted', 'meta']);
- }
- }
- };
- }
-
- get satisfy() {
- return this.satisfy_any ? 'satisfy any' : 'satisfy all';
- }
-
- get passauth() {
- return this.pass_auth ? '' : 'proxy_set_header Authorization "";';
- }
-}
-
-module.exports = AccessList;
diff --git a/backend/models/access_list_auth.js b/backend/models/access_list_auth.js
deleted file mode 100644
index 932371f3..00000000
--- a/backend/models/access_list_auth.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessListAuth extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessListAuth';
- }
-
- static get tableName () {
- return 'access_list_auth';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: require('./access_list'),
- join: {
- from: 'access_list_auth.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
- }
- }
- };
- }
-}
-
-module.exports = AccessListAuth;
diff --git a/backend/models/access_list_client.js b/backend/models/access_list_client.js
deleted file mode 100644
index e257213a..00000000
--- a/backend/models/access_list_client.js
+++ /dev/null
@@ -1,59 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessListClient extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessListClient';
- }
-
- static get tableName () {
- return 'access_list_client';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: require('./access_list'),
- join: {
- from: 'access_list_client.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
- }
- }
- };
- }
-
- get rule() {
- return `${this.directive} ${this.address}`;
- }
-}
-
-module.exports = AccessListClient;
diff --git a/backend/models/audit-log.js b/backend/models/audit-log.js
deleted file mode 100644
index a3a318c8..00000000
--- a/backend/models/audit-log.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AuditLog extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AuditLog';
- }
-
- static get tableName () {
- return 'audit_log';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- user: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'audit_log.user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = AuditLog;
diff --git a/backend/models/auth.js b/backend/models/auth.js
deleted file mode 100644
index 5ba5f380..00000000
--- a/backend/models/auth.js
+++ /dev/null
@@ -1,86 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const bcrypt = require('bcrypt');
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-function encryptPassword () {
- /* jshint -W040 */
- let _this = this;
-
- if (_this.type === 'password' && _this.secret) {
- return bcrypt.hash(_this.secret, 13)
- .then(function (hash) {
- _this.secret = hash;
- });
- }
-
- return null;
-}
-
-class Auth extends Model {
- $beforeInsert (queryContext) {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- return encryptPassword.apply(this, queryContext);
- }
-
- $beforeUpdate (queryContext) {
- this.modified_on = now();
- return encryptPassword.apply(this, queryContext);
- }
-
- /**
- * Verify a plain password against the encrypted password
- *
- * @param {String} password
- * @returns {Promise}
- */
- verifyPassword (password) {
- return bcrypt.compare(password, this.secret);
- }
-
- static get name () {
- return 'Auth';
- }
-
- static get tableName () {
- return 'auth';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- user: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'auth.user_id',
- to: 'user.id'
- },
- filter: {
- is_deleted: 0
- },
- modify: function (qb) {
- qb.omit(['is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = Auth;
diff --git a/backend/models/certificate.js b/backend/models/certificate.js
deleted file mode 100644
index 6084a995..00000000
--- a/backend/models/certificate.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class Certificate extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for expires_on
- if (typeof this.expires_on === 'undefined') {
- this.expires_on = now();
- }
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'Certificate';
- }
-
- static get tableName () {
- return 'certificate';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'certificate.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = Certificate;
diff --git a/backend/models/dead_host.js b/backend/models/dead_host.js
deleted file mode 100644
index 6de42a33..00000000
--- a/backend/models/dead_host.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class DeadHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'DeadHost';
- }
-
- static get tableName () {
- return 'dead_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'dead_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'dead_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = DeadHost;
diff --git a/backend/models/now_helper.js b/backend/models/now_helper.js
deleted file mode 100644
index def16d08..00000000
--- a/backend/models/now_helper.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const db = require('../db');
-const config = require('config');
-const Model = require('objection').Model;
-
-Model.knex(db);
-
-module.exports = function () {
- if (config.database.knex && config.database.knex.client === 'sqlite3') {
- return Model.raw('datetime(\'now\',\'localtime\')');
- } else {
- return Model.raw('NOW()');
- }
-};
diff --git a/backend/models/proxy_host.js b/backend/models/proxy_host.js
deleted file mode 100644
index a7583088..00000000
--- a/backend/models/proxy_host.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const AccessList = require('./access_list');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class ProxyHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'ProxyHost';
- }
-
- static get tableName () {
- return 'proxy_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta', 'locations'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'proxy_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: AccessList,
- join: {
- from: 'proxy_host.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'proxy_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = ProxyHost;
diff --git a/backend/models/redirection_host.js b/backend/models/redirection_host.js
deleted file mode 100644
index dd149b76..00000000
--- a/backend/models/redirection_host.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class RedirectionHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'RedirectionHost';
- }
-
- static get tableName () {
- return 'redirection_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'redirection_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'redirection_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = RedirectionHost;
diff --git a/backend/models/setting.js b/backend/models/setting.js
deleted file mode 100644
index 75aa9007..00000000
--- a/backend/models/setting.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-
-Model.knex(db);
-
-class Setting extends Model {
- $beforeInsert () {
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- static get name () {
- return 'Setting';
- }
-
- static get tableName () {
- return 'setting';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-}
-
-module.exports = Setting;
diff --git a/backend/models/stream.js b/backend/models/stream.js
deleted file mode 100644
index ed65de0f..00000000
--- a/backend/models/stream.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class Stream extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'Stream';
- }
-
- static get tableName () {
- return 'stream';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'stream.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = Stream;
diff --git a/backend/models/token.js b/backend/models/token.js
deleted file mode 100644
index 4e1b1826..00000000
--- a/backend/models/token.js
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- NOTE: This is not a database table, this is a model of a Token object that can be created/loaded
- and then has abilities after that.
- */
-
-const _ = require('lodash');
-const jwt = require('jsonwebtoken');
-const crypto = require('crypto');
-const error = require('../lib/error');
-const ALGO = 'RS256';
-
-let public_key = null;
-let private_key = null;
-
-function checkJWTKeyPair() {
- if (!public_key || !private_key) {
- let config = require('config');
- public_key = config.get('jwt.pub');
- private_key = config.get('jwt.key');
- }
-}
-
-module.exports = function () {
-
- let token_data = {};
-
- let self = {
- /**
- * @param {Object} payload
- * @returns {Promise}
- */
- create: (payload) => {
- // sign with RSA SHA256
- let options = {
- algorithm: ALGO,
- expiresIn: payload.expiresIn || '1d'
- };
-
- payload.jti = crypto.randomBytes(12)
- .toString('base64')
- .substr(-8);
-
- checkJWTKeyPair();
-
- return new Promise((resolve, reject) => {
- jwt.sign(payload, private_key, options, (err, token) => {
- if (err) {
- reject(err);
- } else {
- token_data = payload;
- resolve({
- token: token,
- payload: payload
- });
- }
- });
- });
- },
-
- /**
- * @param {String} token
- * @returns {Promise}
- */
- load: function (token) {
- return new Promise((resolve, reject) => {
- checkJWTKeyPair();
- try {
- if (!token || token === null || token === 'null') {
- reject(new error.AuthError('Empty token'));
- } else {
- jwt.verify(token, public_key, {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
- if (err) {
-
- if (err.name === 'TokenExpiredError') {
- reject(new error.AuthError('Token has expired', err));
- } else {
- reject(err);
- }
-
- } else {
- token_data = result;
-
- // Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
- // For 30 days at least, we need to replace 'all' with user.
- if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
- //console.log('Warning! Replacing "all" scope with "user"');
-
- token_data.scope = ['user'];
- }
-
- resolve(token_data);
- }
- });
- }
- } catch (err) {
- reject(err);
- }
- });
-
- },
-
- /**
- * Does the token have the specified scope?
- *
- * @param {String} scope
- * @returns {Boolean}
- */
- hasScope: function (scope) {
- return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
- },
-
- /**
- * @param {String} key
- * @return {*}
- */
- get: function (key) {
- if (typeof token_data[key] !== 'undefined') {
- return token_data[key];
- }
-
- return null;
- },
-
- /**
- * @param {String} key
- * @param {*} value
- */
- set: function (key, value) {
- token_data[key] = value;
- },
-
- /**
- * @param [default_value]
- * @returns {Integer}
- */
- getUserId: (default_value) => {
- let attrs = self.get('attrs');
- if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
- return attrs.id;
- }
-
- return default_value || 0;
- }
- };
-
- return self;
-};
diff --git a/backend/models/user.js b/backend/models/user.js
deleted file mode 100644
index c76f7dbf..00000000
--- a/backend/models/user.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const UserPermission = require('./user_permission');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class User extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for roles
- if (typeof this.roles === 'undefined') {
- this.roles = [];
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'User';
- }
-
- static get tableName () {
- return 'user';
- }
-
- static get jsonAttributes () {
- return ['roles'];
- }
-
- static get relationMappings () {
- return {
- permissions: {
- relation: Model.HasOneRelation,
- modelClass: UserPermission,
- join: {
- from: 'user.id',
- to: 'user_permission.user_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'user_id']);
- }
- }
- };
- }
-
-}
-
-module.exports = User;
diff --git a/backend/models/user_permission.js b/backend/models/user_permission.js
deleted file mode 100644
index bb87d5dc..00000000
--- a/backend/models/user_permission.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class UserPermission extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'UserPermission';
- }
-
- static get tableName () {
- return 'user_permission';
- }
-}
-
-module.exports = UserPermission;
diff --git a/backend/nodemon.json b/backend/nodemon.json
deleted file mode 100644
index 3d6d1342..00000000
--- a/backend/nodemon.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "verbose": false,
- "ignore": [
- "data"
- ],
- "ext": "js json ejs"
-}
diff --git a/backend/package.json b/backend/package.json
deleted file mode 100644
index 28b6f178..00000000
--- a/backend/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "nginx-proxy-manager",
- "version": "0.0.0",
- "description": "A beautiful interface for creating Nginx endpoints",
- "main": "js/index.js",
- "dependencies": {
- "ajv": "^6.12.0",
- "archiver": "^5.3.0",
- "batchflow": "^0.4.0",
- "bcrypt": "^5.0.0",
- "body-parser": "^1.19.0",
- "compression": "^1.7.4",
- "config": "^3.3.1",
- "express": "^4.17.1",
- "express-fileupload": "^1.1.9",
- "gravatar": "^1.8.0",
- "json-schema-ref-parser": "^8.0.0",
- "jsonwebtoken": "^8.5.1",
- "knex": "^0.20.13",
- "liquidjs": "^9.11.10",
- "lodash": "^4.17.21",
- "moment": "^2.24.0",
- "mysql": "^2.18.1",
- "node-rsa": "^1.0.8",
- "nodemon": "^2.0.2",
- "objection": "^2.2.16",
- "path": "^0.12.7",
- "signale": "^1.4.0",
- "sqlite3": "^4.1.1",
- "temp-write": "^4.0.0"
- },
- "signale": {
- "displayDate": true,
- "displayTimestamp": true
- },
- "author": "Jamie Curnow ",
- "license": "MIT",
- "devDependencies": {
- "eslint": "^6.8.0",
- "eslint-plugin-align-assignments": "^1.1.2",
- "prettier": "^2.0.4"
- }
-}
diff --git a/backend/routes/api/audit-log.js b/backend/routes/api/audit-log.js
deleted file mode 100644
index 8a2490c3..00000000
--- a/backend/routes/api/audit-log.js
+++ /dev/null
@@ -1,52 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalAuditLog = require('../../internal/audit-log');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/audit-log
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/audit-log
- *
- * Retrieve all logs
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/main.js b/backend/routes/api/main.js
deleted file mode 100644
index 33cbbc21..00000000
--- a/backend/routes/api/main.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const express = require('express');
-const pjson = require('../../package.json');
-const error = require('../../lib/error');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * Health Check
- * GET /api
- */
-router.get('/', (req, res/*, next*/) => {
- let version = pjson.version.split('-').shift().split('.');
-
- res.status(200).send({
- status: 'OK',
- version: {
- major: parseInt(version.shift(), 10),
- minor: parseInt(version.shift(), 10),
- revision: parseInt(version.shift(), 10)
- }
- });
-});
-
-router.use('/schema', require('./schema'));
-router.use('/tokens', require('./tokens'));
-router.use('/users', require('./users'));
-router.use('/audit-log', require('./audit-log'));
-router.use('/reports', require('./reports'));
-router.use('/settings', require('./settings'));
-router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
-router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
-router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
-router.use('/nginx/streams', require('./nginx/streams'));
-router.use('/nginx/access-lists', require('./nginx/access_lists'));
-router.use('/nginx/certificates', require('./nginx/certificates'));
-
-/**
- * API 404 for all other routes
- *
- * ALL /api/*
- */
-router.all(/(.+)/, function (req, res, next) {
- req.params.page = req.params['0'];
- next(new error.ItemNotFoundError(req.params.page));
-});
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/access_lists.js b/backend/routes/api/nginx/access_lists.js
deleted file mode 100644
index d55c3ae1..00000000
--- a/backend/routes/api/nginx/access_lists.js
+++ /dev/null
@@ -1,148 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalAccessList = require('../../../internal/access-list');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/access-lists
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/access-lists
- *
- * Retrieve all access-lists
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalAccessList.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/access-lists
- *
- * Create a new access-list
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/access-lists#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalAccessList.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific access-list
- *
- * /api/nginx/access-lists/123
- */
-router
- .route('/:list_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/access-lists/123
- *
- * Retrieve a specific access-list
- */
- .get((req, res, next) => {
- validator({
- required: ['list_id'],
- additionalProperties: false,
- properties: {
- list_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- list_id: req.params.list_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalAccessList.get(res.locals.access, {
- id: parseInt(data.list_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/access-lists/123
- *
- * Update and existing access-list
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/access-lists#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.list_id, 10);
- return internalAccessList.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/access-lists/123
- *
- * Delete and existing access-list
- */
- .delete((req, res, next) => {
- internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/certificates.js b/backend/routes/api/nginx/certificates.js
deleted file mode 100644
index ffdfb515..00000000
--- a/backend/routes/api/nginx/certificates.js
+++ /dev/null
@@ -1,299 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalCertificate = require('../../../internal/certificate');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/certificates
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates
- *
- * Retrieve all certificates
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalCertificate.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/certificates
- *
- * Create a new certificate
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/certificates#/links/1/schema'}, req.body)
- .then((payload) => {
- req.setTimeout(900000); // 15 minutes timeout
- return internalCertificate.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Test HTTP challenge for domains
- *
- * /api/nginx/certificates/test-http
- */
-router
- .route('/test-http')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
-/**
- * GET /api/nginx/certificates/test-http
- *
- * Test HTTP challenge for domains
- */
- .get((req, res, next) => {
- internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific certificate
- *
- * /api/nginx/certificates/123
- */
-router
- .route('/:certificate_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates/123
- *
- * Retrieve a specific certificate
- */
- .get((req, res, next) => {
- validator({
- required: ['certificate_id'],
- additionalProperties: false,
- properties: {
- certificate_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- certificate_id: req.params.certificate_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalCertificate.get(res.locals.access, {
- id: parseInt(data.certificate_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/certificates/123
- *
- * Update and existing certificate
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/certificates#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.certificate_id, 10);
- return internalCertificate.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/certificates/123
- *
- * Update and existing certificate
- */
- .delete((req, res, next) => {
- internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Upload Certs
- *
- * /api/nginx/certificates/123/upload
- */
-router
- .route('/:certificate_id/upload')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/123/upload
- *
- * Upload certificates
- */
- .post((req, res, next) => {
- if (!req.files) {
- res.status(400)
- .send({error: 'No files were uploaded'});
- } else {
- internalCertificate.upload(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10),
- files: req.files
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- }
- });
-
-/**
- * Renew LE Certs
- *
- * /api/nginx/certificates/123/renew
- */
-router
- .route('/:certificate_id/renew')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/123/renew
- *
- * Renew certificate
- */
- .post((req, res, next) => {
- req.setTimeout(900000); // 15 minutes timeout
- internalCertificate.renew(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10)
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Download LE Certs
- *
- * /api/nginx/certificates/123/download
- */
-router
- .route('/:certificate_id/download')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates/123/download
- *
- * Renew certificate
- */
- .get((req, res, next) => {
- internalCertificate.download(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10)
- })
- .then((result) => {
- res.status(200)
- .download(result.fileName);
- })
- .catch(next);
- });
-
-/**
- * Validate Certs before saving
- *
- * /api/nginx/certificates/validate
- */
-router
- .route('/validate')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/validate
- *
- * Validate certificates
- */
- .post((req, res, next) => {
- if (!req.files) {
- res.status(400)
- .send({error: 'No files were uploaded'});
- } else {
- internalCertificate.validate({
- files: req.files
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- }
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/dead_hosts.js b/backend/routes/api/nginx/dead_hosts.js
deleted file mode 100644
index 08b58f2d..00000000
--- a/backend/routes/api/nginx/dead_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalDeadHost = require('../../../internal/dead-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/dead-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/dead-hosts
- *
- * Retrieve all dead-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/dead-hosts
- *
- * Create a new dead-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/dead-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalDeadHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific dead-host
- *
- * /api/nginx/dead-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/dead-hosts/123
- *
- * Retrieve a specific dead-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalDeadHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/dead-hosts/123
- *
- * Update and existing dead-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/dead-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalDeadHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/dead-hosts/123
- *
- * Update and existing dead-host
- */
- .delete((req, res, next) => {
- internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable dead-host
- *
- * /api/nginx/dead-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/dead-hosts/123/enable
- */
- .post((req, res, next) => {
- internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable dead-host
- *
- * /api/nginx/dead-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/dead-hosts/123/disable
- */
- .post((req, res, next) => {
- internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/proxy_hosts.js b/backend/routes/api/nginx/proxy_hosts.js
deleted file mode 100644
index 6f933c3d..00000000
--- a/backend/routes/api/nginx/proxy_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalProxyHost = require('../../../internal/proxy-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/proxy-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/proxy-hosts
- *
- * Retrieve all proxy-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/proxy-hosts
- *
- * Create a new proxy-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/proxy-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalProxyHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific proxy-host
- *
- * /api/nginx/proxy-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/proxy-hosts/123
- *
- * Retrieve a specific proxy-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalProxyHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/proxy-hosts/123
- *
- * Update and existing proxy-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/proxy-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalProxyHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/proxy-hosts/123
- *
- * Update and existing proxy-host
- */
- .delete((req, res, next) => {
- internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable proxy-host
- *
- * /api/nginx/proxy-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/proxy-hosts/123/enable
- */
- .post((req, res, next) => {
- internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable proxy-host
- *
- * /api/nginx/proxy-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/proxy-hosts/123/disable
- */
- .post((req, res, next) => {
- internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/redirection_hosts.js b/backend/routes/api/nginx/redirection_hosts.js
deleted file mode 100644
index 4d44c112..00000000
--- a/backend/routes/api/nginx/redirection_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalRedirectionHost = require('../../../internal/redirection-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/redirection-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/redirection-hosts
- *
- * Retrieve all redirection-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/redirection-hosts
- *
- * Create a new redirection-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/redirection-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalRedirectionHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific redirection-host
- *
- * /api/nginx/redirection-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/redirection-hosts/123
- *
- * Retrieve a specific redirection-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalRedirectionHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/redirection-hosts/123
- *
- * Update and existing redirection-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/redirection-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalRedirectionHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/redirection-hosts/123
- *
- * Update and existing redirection-host
- */
- .delete((req, res, next) => {
- internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable redirection-host
- *
- * /api/nginx/redirection-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/redirection-hosts/123/enable
- */
- .post((req, res, next) => {
- internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable redirection-host
- *
- * /api/nginx/redirection-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/redirection-hosts/123/disable
- */
- .post((req, res, next) => {
- internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/streams.js b/backend/routes/api/nginx/streams.js
deleted file mode 100644
index 5e3fc28f..00000000
--- a/backend/routes/api/nginx/streams.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalStream = require('../../../internal/stream');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/streams
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
-
- /**
- * GET /api/nginx/streams
- *
- * Retrieve all streams
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalStream.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/streams
- *
- * Create a new stream
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/streams#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalStream.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific stream
- *
- * /api/nginx/streams/123
- */
-router
- .route('/:stream_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
-
- /**
- * GET /api/nginx/streams/123
- *
- * Retrieve a specific stream
- */
- .get((req, res, next) => {
- validator({
- required: ['stream_id'],
- additionalProperties: false,
- properties: {
- stream_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- stream_id: req.params.stream_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalStream.get(res.locals.access, {
- id: parseInt(data.stream_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/streams/123
- *
- * Update and existing stream
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/streams#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.stream_id, 10);
- return internalStream.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/streams/123
- *
- * Update and existing stream
- */
- .delete((req, res, next) => {
- internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable stream
- *
- * /api/nginx/streams/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/streams/123/enable
- */
- .post((req, res, next) => {
- internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable stream
- *
- * /api/nginx/streams/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/streams/123/disable
- */
- .post((req, res, next) => {
- internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/reports.js b/backend/routes/api/reports.js
deleted file mode 100644
index 9e2c98c8..00000000
--- a/backend/routes/api/reports.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const express = require('express');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalReport = require('../../internal/report');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/hosts')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /reports/hosts
- */
- .get(jwtdecode(), (req, res, next) => {
- internalReport.getHostsReport(res.locals.access)
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/schema.js b/backend/routes/api/schema.js
deleted file mode 100644
index fc6bd5bd..00000000
--- a/backend/routes/api/schema.js
+++ /dev/null
@@ -1,36 +0,0 @@
-const express = require('express');
-const swaggerJSON = require('../../doc/api.swagger.json');
-const PACKAGE = require('../../package.json');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /schema
- */
- .get((req, res/*, next*/) => {
- let proto = req.protocol;
- if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
- proto = req.headers['x-forwarded-proto'];
- }
-
- let origin = proto + '://' + req.hostname;
- if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
- origin = req.headers.origin;
- }
-
- swaggerJSON.info.version = PACKAGE.version;
- swaggerJSON.servers[0].url = origin + '/api';
- res.status(200).send(swaggerJSON);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/settings.js b/backend/routes/api/settings.js
deleted file mode 100644
index d08b2bf5..00000000
--- a/backend/routes/api/settings.js
+++ /dev/null
@@ -1,96 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalSetting = require('../../internal/setting');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/settings
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/settings
- *
- * Retrieve all settings
- */
- .get((req, res, next) => {
- internalSetting.getAll(res.locals.access)
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- });
-
-/**
- * Specific setting
- *
- * /api/settings/something
- */
-router
- .route('/:setting_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /settings/something
- *
- * Retrieve a specific setting
- */
- .get((req, res, next) => {
- validator({
- required: ['setting_id'],
- additionalProperties: false,
- properties: {
- setting_id: {
- $ref: 'definitions#/definitions/setting_id'
- }
- }
- }, {
- setting_id: req.params.setting_id
- })
- .then((data) => {
- return internalSetting.get(res.locals.access, {
- id: data.setting_id
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/settings/something
- *
- * Update and existing setting
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/settings#/links/1/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.setting_id;
- return internalSetting.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/tokens.js b/backend/routes/api/tokens.js
deleted file mode 100644
index a21f998a..00000000
--- a/backend/routes/api/tokens.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const express = require('express');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalToken = require('../../internal/token');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /tokens
- *
- * Get a new Token, given they already have a token they want to refresh
- * We also piggy back on to this method, allowing admins to get tokens
- * for services like Job board and Worker.
- */
- .get(jwtdecode(), (req, res, next) => {
- internalToken.getFreshToken(res.locals.access, {
- expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
- scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
- })
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- })
-
- /**
- * POST /tokens
- *
- * Create a new Token
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/tokens#/links/0/schema'}, req.body)
- .then((payload) => {
- return internalToken.getTokenFromEmail(payload);
- })
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/users.js b/backend/routes/api/users.js
deleted file mode 100644
index 1c6bd0ad..00000000
--- a/backend/routes/api/users.js
+++ /dev/null
@@ -1,239 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const userIdFromMe = require('../../lib/express/user-id-from-me');
-const internalUser = require('../../internal/user');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/users
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/users
- *
- * Retrieve all users
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalUser.getAll(res.locals.access, data.expand, data.query);
- })
- .then((users) => {
- res.status(200)
- .send(users);
- })
- .catch(next);
- })
-
- /**
- * POST /api/users
- *
- * Create a new User
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalUser.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user
- *
- * /api/users/123
- */
-router
- .route('/:user_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * GET /users/123 or /users/me
- *
- * Retrieve a specific user
- */
- .get((req, res, next) => {
- validator({
- required: ['user_id'],
- additionalProperties: false,
- properties: {
- user_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- user_id: req.params.user_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalUser.get(res.locals.access, {
- id: data.user_id,
- expand: data.expand,
- omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
- });
- })
- .then((user) => {
- res.status(200)
- .send(user);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/users/123
- *
- * Update and existing user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/users/123
- *
- * Update and existing user
- */
- .delete((req, res, next) => {
- internalUser.delete(res.locals.access, {id: req.params.user_id})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user auth
- *
- * /api/users/123/auth
- */
-router
- .route('/:user_id/auth')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * PUT /api/users/123/auth
- *
- * Update password for a user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/4/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.setPassword(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user permissions
- *
- * /api/users/123/permissions
- */
-router
- .route('/:user_id/permissions')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * PUT /api/users/123/permissions
- *
- * Set some or all permissions for a user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/5/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.setPermissions(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user login as
- *
- * /api/users/123/login
- */
-router
- .route('/:user_id/login')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/users/123/login
- *
- * Log in as a user
- */
- .post((req, res, next) => {
- internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/schema/definitions.json b/backend/schema/definitions.json
deleted file mode 100644
index 4b4f3405..00000000
--- a/backend/schema/definitions.json
+++ /dev/null
@@ -1,240 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "definitions",
- "definitions": {
- "id": {
- "description": "Unique identifier",
- "example": 123456,
- "readOnly": true,
- "type": "integer",
- "minimum": 1
- },
- "setting_id": {
- "description": "Unique identifier for a Setting",
- "example": "default-site",
- "readOnly": true,
- "type": "string",
- "minLength": 2
- },
- "token": {
- "type": "string",
- "minLength": 10
- },
- "expand": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "sort": {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "object",
- "required": [
- "field",
- "dir"
- ],
- "additionalProperties": false,
- "properties": {
- "field": {
- "type": "string"
- },
- "dir": {
- "type": "string",
- "pattern": "^(asc|desc)$"
- }
- }
- }
- },
- "query": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- }
- ]
- },
- "criteria": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "object"
- }
- ]
- },
- "fields": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "omit": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "created_on": {
- "description": "Date and time of creation",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "modified_on": {
- "description": "Date and time of last update",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "user_id": {
- "description": "User ID",
- "example": 1234,
- "type": "integer",
- "minimum": 1
- },
- "certificate_id": {
- "description": "Certificate ID",
- "example": 1234,
- "anyOf": [
- {
- "type": "integer",
- "minimum": 0
- },
- {
- "type": "string",
- "pattern": "^new$"
- }
- ]
- },
- "access_list_id": {
- "description": "Access List ID",
- "example": 1234,
- "type": "integer",
- "minimum": 0
- },
- "name": {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "email": {
- "description": "Email Address",
- "example": "john@example.com",
- "format": "email",
- "type": "string",
- "minLength": 6,
- "maxLength": 100
- },
- "password": {
- "description": "Password",
- "type": "string",
- "minLength": 8,
- "maxLength": 255
- },
- "domain_name": {
- "description": "Domain Name",
- "example": "jc21.com",
- "type": "string",
- "pattern": "^(?:[^.*]+\\.?)+[^.]$"
- },
- "domain_names": {
- "description": "Domain Names separated by a comma",
- "example": "*.jc21.com,blog.jc21.com",
- "type": "array",
- "maxItems": 15,
- "uniqueItems": true,
- "items": {
- "type": "string",
- "pattern": "^(?:\\*\\.)?(?:[^.*]+\\.?)+[^.]$"
- }
- },
- "http_code": {
- "description": "Redirect HTTP Status Code",
- "example": 302,
- "type": "integer",
- "minimum": 300,
- "maximum": 308
- },
- "scheme": {
- "description": "RFC Protocol",
- "example": "HTTPS or $scheme",
- "type": "string",
- "minLength": 4
- },
- "enabled": {
- "description": "Is Enabled",
- "example": true,
- "type": "boolean"
- },
- "ssl_enabled": {
- "description": "Is SSL Enabled",
- "example": true,
- "type": "boolean"
- },
- "ssl_forced": {
- "description": "Is SSL Forced",
- "example": false,
- "type": "boolean"
- },
- "hsts_enabled": {
- "description": "Is HSTS Enabled",
- "example": false,
- "type": "boolean"
- },
- "hsts_subdomains": {
- "description": "Is HSTS applicable to all subdomains",
- "example": false,
- "type": "boolean"
- },
- "ssl_provider": {
- "type": "string",
- "pattern": "^(letsencrypt|other)$"
- },
- "http2_support": {
- "description": "HTTP2 Protocol Support",
- "example": false,
- "type": "boolean"
- },
- "block_exploits": {
- "description": "Should we block common exploits",
- "example": true,
- "type": "boolean"
- },
- "caching_enabled": {
- "description": "Should we cache assets",
- "example": true,
- "type": "boolean"
- }
- }
-}
diff --git a/backend/schema/endpoints/access-lists.json b/backend/schema/endpoints/access-lists.json
deleted file mode 100644
index 404e3237..00000000
--- a/backend/schema/endpoints/access-lists.json
+++ /dev/null
@@ -1,236 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/access-lists",
- "title": "Access Lists",
- "description": "Endpoints relating to Access Lists",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "name": {
- "type": "string",
- "description": "Name of the Access List"
- },
- "directive": {
- "type": "string",
- "enum": ["allow", "deny"]
- },
- "address": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}(/([0-9]|[1-2][0-9]|3[0-2]))?$"
- },
- {
- "type": "string",
- "pattern": "^s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:)))(%.+)?s*(/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8]))?$"
- },
- {
- "type": "string",
- "pattern": "^all$"
- }
- ]
- },
- "satisfy_any": {
- "type": "boolean"
- },
- "pass_auth": {
- "type": "boolean"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "name": {
- "$ref": "#/definitions/name"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Access Lists",
- "href": "/nginx/access-lists",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Access List",
- "href": "/nginx/access-list",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": ["name"],
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "satisfy_any": {
- "$ref": "#/definitions/satisfy_any"
- },
- "pass_auth": {
- "$ref": "#/definitions/pass_auth"
- },
- "items": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "username": {
- "type": "string",
- "minLength": 1
- },
- "password": {
- "type": "string",
- "minLength": 1
- }
- }
- }
- },
- "clients": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "address": {
- "$ref": "#/definitions/address"
- },
- "directive": {
- "$ref": "#/definitions/directive"
- }
- }
- }
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Access List",
- "href": "/nginx/access-list/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "satisfy_any": {
- "$ref": "#/definitions/satisfy_any"
- },
- "pass_auth": {
- "$ref": "#/definitions/pass_auth"
- },
- "items": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "username": {
- "type": "string",
- "minLength": 1
- },
- "password": {
- "type": "string",
- "minLength": 0
- }
- }
- }
- },
- "clients": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "address": {
- "$ref": "#/definitions/address"
- },
- "directive": {
- "$ref": "#/definitions/directive"
- }
- }
- }
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Access List",
- "href": "/nginx/access-list/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/certificates.json b/backend/schema/endpoints/certificates.json
deleted file mode 100644
index 955ca75c..00000000
--- a/backend/schema/endpoints/certificates.json
+++ /dev/null
@@ -1,173 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/certificates",
- "title": "Certificates",
- "description": "Endpoints relating to Certificates",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "provider": {
- "$ref": "../definitions.json#/definitions/ssl_provider"
- },
- "nice_name": {
- "type": "string",
- "description": "Nice Name for the custom certificate"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "expires_on": {
- "description": "Date and time of expiration",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "meta": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "letsencrypt_email": {
- "type": "string",
- "format": "email"
- },
- "letsencrypt_agree": {
- "type": "boolean"
- },
- "dns_challenge": {
- "type": "boolean"
- },
- "dns_provider": {
- "type": "string"
- },
- "dns_provider_credentials": {
- "type": "string"
- },
- "propagation_seconds": {
- "anyOf": [
- {
- "type": "integer",
- "minimum": 0
- }
- ]
-
- }
- }
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "provider": {
- "$ref": "#/definitions/provider"
- },
- "nice_name": {
- "$ref": "#/definitions/nice_name"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "expires_on": {
- "$ref": "#/definitions/expires_on"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Certificates",
- "href": "/nginx/certificates",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Certificate",
- "href": "/nginx/certificates",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "provider"
- ],
- "properties": {
- "provider": {
- "$ref": "#/definitions/provider"
- },
- "nice_name": {
- "$ref": "#/definitions/nice_name"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Certificate",
- "href": "/nginx/certificates/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Test HTTP Challenge",
- "description": "Tests whether the HTTP challenge should work",
- "href": "/nginx/certificates/{definitions.identity.example}/test-http",
- "access": "private",
- "method": "GET",
- "rel": "info",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/dead-hosts.json b/backend/schema/endpoints/dead-hosts.json
deleted file mode 100644
index 0c73c3be..00000000
--- a/backend/schema/endpoints/dead-hosts.json
+++ /dev/null
@@ -1,240 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/dead-hosts",
- "title": "404 Hosts",
- "description": "Endpoints relating to 404 Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of 404 Hosts",
- "href": "/nginx/dead-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new 404 Host",
- "href": "/nginx/dead-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/proxy-hosts.json b/backend/schema/endpoints/proxy-hosts.json
deleted file mode 100644
index 9a3fff2f..00000000
--- a/backend/schema/endpoints/proxy-hosts.json
+++ /dev/null
@@ -1,387 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/proxy-hosts",
- "title": "Proxy Hosts",
- "description": "Endpoints relating to Proxy Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "forward_scheme": {
- "type": "string",
- "enum": ["http", "https"]
- },
- "forward_host": {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "forward_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "../definitions.json#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "../definitions.json#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "description": "Allow Websocket Upgrade for all paths",
- "example": true,
- "type": "boolean"
- },
- "access_list_id": {
- "$ref": "../definitions.json#/definitions/access_list_id"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- },
- "locations": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "required": [
- "forward_scheme",
- "forward_host",
- "forward_port",
- "path"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": ["integer", "null"]
- },
- "path": {
- "type": "string",
- "minLength": 1
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "forward_path": {
- "type": "string"
- },
- "advanced_config": {
- "type": "string"
- }
- }
- }
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Proxy Hosts",
- "href": "/nginx/proxy-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Proxy Host",
- "href": "/nginx/proxy-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names",
- "forward_scheme",
- "forward_host",
- "forward_port"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/redirection-hosts.json b/backend/schema/endpoints/redirection-hosts.json
deleted file mode 100644
index 14a46998..00000000
--- a/backend/schema/endpoints/redirection-hosts.json
+++ /dev/null
@@ -1,305 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/redirection-hosts",
- "title": "Redirection Hosts",
- "description": "Endpoints relating to Redirection Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "../definitions.json#/definitions/http_code"
- },
- "forward_scheme": {
- "$ref": "../definitions.json#/definitions/scheme"
- },
- "forward_domain_name": {
- "$ref": "../definitions.json#/definitions/domain_name"
- },
- "preserve_path": {
- "description": "Should the path be preserved",
- "example": true,
- "type": "boolean"
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "../definitions.json#/definitions/block_exploits"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Redirection Hosts",
- "href": "/nginx/redirection-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Redirection Host",
- "href": "/nginx/redirection-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names",
- "forward_scheme",
- "forward_http_code",
- "forward_domain_name"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/settings.json b/backend/schema/endpoints/settings.json
deleted file mode 100644
index 29e2865a..00000000
--- a/backend/schema/endpoints/settings.json
+++ /dev/null
@@ -1,99 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/settings",
- "title": "Settings",
- "description": "Endpoints relating to Settings",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/setting_id"
- },
- "name": {
- "description": "Name",
- "example": "Default Site",
- "type": "string",
- "minLength": 2,
- "maxLength": 100
- },
- "description": {
- "description": "Description",
- "example": "Default Site",
- "type": "string",
- "minLength": 2,
- "maxLength": 255
- },
- "value": {
- "description": "Value",
- "example": "404",
- "type": "string",
- "maxLength": 255
- },
- "meta": {
- "type": "object"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Settings",
- "href": "/settings",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Setting",
- "href": "/settings/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "value": {
- "$ref": "#/definitions/value"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- }
- ],
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "name": {
- "$ref": "#/definitions/description"
- },
- "description": {
- "$ref": "#/definitions/description"
- },
- "value": {
- "$ref": "#/definitions/value"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
-}
diff --git a/backend/schema/endpoints/streams.json b/backend/schema/endpoints/streams.json
deleted file mode 100644
index 159c8036..00000000
--- a/backend/schema/endpoints/streams.json
+++ /dev/null
@@ -1,234 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/streams",
- "title": "Streams",
- "description": "Endpoints relating to Streams",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "incoming_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "forwarding_host": {
- "anyOf": [
- {
- "$ref": "../definitions.json#/definitions/domain_name"
- },
- {
- "type": "string",
- "format": "ipv4"
- },
- {
- "type": "string",
- "format": "ipv6"
- }
- ]
- },
- "forwarding_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "tcp_forwarding": {
- "type": "boolean"
- },
- "udp_forwarding": {
- "type": "boolean"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Steams",
- "href": "/nginx/streams",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Stream",
- "href": "/nginx/streams",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "incoming_port",
- "forwarding_host",
- "forwarding_port"
- ],
- "properties": {
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/tokens.json b/backend/schema/endpoints/tokens.json
deleted file mode 100644
index 920af63f..00000000
--- a/backend/schema/endpoints/tokens.json
+++ /dev/null
@@ -1,100 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/tokens",
- "title": "Token",
- "description": "Tokens are required to authenticate against the API",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "identity": {
- "description": "Email Address or other 3rd party providers identifier",
- "example": "john@example.com",
- "type": "string"
- },
- "secret": {
- "description": "A password or key",
- "example": "correct horse battery staple",
- "type": "string"
- },
- "token": {
- "description": "JWT",
- "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk",
- "type": "string"
- },
- "expires": {
- "description": "Token expiry time",
- "format": "date-time",
- "type": "string"
- },
- "scope": {
- "description": "Scope of the Token, defaults to 'user'",
- "example": "user",
- "type": "string"
- }
- },
- "links": [
- {
- "title": "Create",
- "description": "Creates a new token.",
- "href": "/tokens",
- "access": "public",
- "method": "POST",
- "rel": "create",
- "schema": {
- "type": "object",
- "required": [
- "identity",
- "secret"
- ],
- "properties": {
- "identity": {
- "$ref": "#/definitions/identity"
- },
- "secret": {
- "$ref": "#/definitions/secret"
- },
- "scope": {
- "$ref": "#/definitions/scope"
- }
- }
- },
- "targetSchema": {
- "type": "object",
- "properties": {
- "token": {
- "$ref": "#/definitions/token"
- },
- "expires": {
- "$ref": "#/definitions/expires"
- }
- }
- }
- },
- {
- "title": "Refresh",
- "description": "Returns a new token.",
- "href": "/tokens",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {},
- "targetSchema": {
- "type": "object",
- "properties": {
- "token": {
- "$ref": "#/definitions/token"
- },
- "expires": {
- "$ref": "#/definitions/expires"
- },
- "scope": {
- "$ref": "#/definitions/scope"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/users.json b/backend/schema/endpoints/users.json
deleted file mode 100644
index 42f44eac..00000000
--- a/backend/schema/endpoints/users.json
+++ /dev/null
@@ -1,287 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/users",
- "title": "Users",
- "description": "Endpoints relating to Users",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "name": {
- "description": "Name",
- "example": "Jamie Curnow",
- "type": "string",
- "minLength": 2,
- "maxLength": 100
- },
- "nickname": {
- "description": "Nickname",
- "example": "Jamie",
- "type": "string",
- "minLength": 2,
- "maxLength": 50
- },
- "email": {
- "$ref": "../definitions.json#/definitions/email"
- },
- "avatar": {
- "description": "Avatar",
- "example": "http://somewhere.jpg",
- "type": "string",
- "minLength": 2,
- "maxLength": 150,
- "readOnly": true
- },
- "roles": {
- "description": "Roles",
- "example": [
- "admin"
- ],
- "type": "array"
- },
- "is_disabled": {
- "description": "Is Disabled",
- "example": false,
- "type": "boolean"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Users",
- "href": "/users",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new User",
- "href": "/users",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "required": [
- "name",
- "nickname",
- "email"
- ],
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- },
- "auth": {
- "type": "object",
- "description": "Auth Credentials",
- "example": {
- "type": "password",
- "secret": "bigredhorsebanana"
- }
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing User",
- "href": "/users/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing User",
- "href": "/users/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Set Password",
- "description": "Sets a password for an existing User",
- "href": "/users/{definitions.identity.example}/auth",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "required": [
- "type",
- "secret"
- ],
- "properties": {
- "type": {
- "type": "string",
- "pattern": "^password$"
- },
- "current": {
- "type": "string",
- "minLength": 1,
- "maxLength": 64
- },
- "secret": {
- "type": "string",
- "minLength": 8,
- "maxLength": 64
- }
- }
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Set Permissions",
- "description": "Sets Permissions for a User",
- "href": "/users/{definitions.identity.example}/permissions",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "visibility": {
- "type": "string",
- "pattern": "^(all|user)$"
- },
- "access_lists": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "dead_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "proxy_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "redirection_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "streams": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "certificates": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- }
- }
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ],
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "avatar": {
- "$ref": "#/definitions/avatar"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- }
- }
-}
diff --git a/backend/schema/examples.json b/backend/schema/examples.json
deleted file mode 100644
index 37bc6c4d..00000000
--- a/backend/schema/examples.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "examples",
- "type": "object",
- "definitions": {
- "name": {
- "description": "Name",
- "example": "John Smith",
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "auth_header": {
- "Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk",
- "X-API-Version": "next"
- },
- "token": {
- "type": "string",
- "description": "JWT",
- "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk"
- }
- }
-}
diff --git a/backend/schema/index.json b/backend/schema/index.json
deleted file mode 100644
index 6e7d1c8a..00000000
--- a/backend/schema/index.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "root",
- "title": "Nginx Proxy Manager REST API",
- "description": "This is the Nginx Proxy Manager REST API",
- "version": "2.0.0",
- "links": [
- {
- "href": "http://npm.example.com/api",
- "rel": "self"
- }
- ],
- "properties": {
- "tokens": {
- "$ref": "endpoints/tokens.json"
- },
- "users": {
- "$ref": "endpoints/users.json"
- },
- "proxy-hosts": {
- "$ref": "endpoints/proxy-hosts.json"
- },
- "redirection-hosts": {
- "$ref": "endpoints/redirection-hosts.json"
- },
- "dead-hosts": {
- "$ref": "endpoints/dead-hosts.json"
- },
- "streams": {
- "$ref": "endpoints/streams.json"
- },
- "certificates": {
- "$ref": "endpoints/certificates.json"
- },
- "access-lists": {
- "$ref": "endpoints/access-lists.json"
- },
- "settings": {
- "$ref": "endpoints/settings.json"
- }
- }
-}
diff --git a/backend/scripts/lint.sh b/backend/scripts/lint.sh
new file mode 100755
index 00000000..bf6bf4c8
--- /dev/null
+++ b/backend/scripts/lint.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+BLUE='\E[1;34m'
+YELLOW='\E[1;33m'
+RESET='\E[0m'
+RESULT=0
+
+# go files: incomplete comment check
+INCOMPLETE_COMMENTS=$(find . -iname "*.go*" | grep -v " " | xargs grep --colour -H -n -E "^\s*\/\/\s*[A-Z]\w+ \.{3}" 2>/dev/null)
+if [[ -n "$INCOMPLETE_COMMENTS" ]]; then
+ echo -e "${BLUE}❯ ${YELLOW}WARN: Please fix incomplete exported comments:${RESET}"
+ echo -e "${RED}${INCOMPLETE_COMMENTS}${RESET}"
+ echo
+ # RESULT=1
+fi
+
+if ! golangci-lint run -E goimports -E maligned ./...; then
+ exit 1
+fi
+
+exit "$RESULT"
diff --git a/backend/scripts/test.sh b/backend/scripts/test.sh
new file mode 100755
index 00000000..92a0a02f
--- /dev/null
+++ b/backend/scripts/test.sh
@@ -0,0 +1,5 @@
+#!/bin/bash -e
+
+export RICHGO_FORCE_COLOR=1
+
+richgo test -bench=. -cover -v ./internal/...
diff --git a/backend/setup.js b/backend/setup.js
deleted file mode 100644
index 47fd1e7b..00000000
--- a/backend/setup.js
+++ /dev/null
@@ -1,233 +0,0 @@
-const fs = require('fs');
-const NodeRSA = require('node-rsa');
-const config = require('config');
-const logger = require('./logger').setup;
-const certificateModel = require('./models/certificate');
-const userModel = require('./models/user');
-const userPermissionModel = require('./models/user_permission');
-const utils = require('./lib/utils');
-const authModel = require('./models/auth');
-const settingModel = require('./models/setting');
-const dns_plugins = require('./global/certbot-dns-plugins');
-const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
-
-/**
- * Creates a new JWT RSA Keypair if not alread set on the config
- *
- * @returns {Promise}
- */
-const setupJwt = () => {
- return new Promise((resolve, reject) => {
- // Now go and check if the jwt gpg keys have been created and if not, create them
- if (!config.has('jwt') || !config.has('jwt.key') || !config.has('jwt.pub')) {
- logger.info('Creating a new JWT key pair...');
-
- // jwt keys are not configured properly
- const filename = config.util.getEnv('NODE_CONFIG_DIR') + '/' + (config.util.getEnv('NODE_ENV') || 'default') + '.json';
- let config_data = {};
-
- try {
- config_data = require(filename);
- } catch (err) {
- // do nothing
- if (debug_mode) {
- logger.debug(filename + ' config file could not be required');
- }
- }
-
- // Now create the keys and save them in the config.
- let key = new NodeRSA({ b: 2048 });
- key.generateKeyPair();
-
- config_data.jwt = {
- key: key.exportKey('private').toString(),
- pub: key.exportKey('public').toString(),
- };
-
- // Write config
- fs.writeFile(filename, JSON.stringify(config_data, null, 2), (err) => {
- if (err) {
- logger.error('Could not write JWT key pair to config file: ' + filename);
- reject(err);
- } else {
- logger.info('Wrote JWT key pair to config file: ' + filename);
- delete require.cache[require.resolve('config')];
- resolve();
- }
- });
- } else {
- // JWT key pair exists
- if (debug_mode) {
- logger.debug('JWT Keypair already exists');
- }
-
- resolve();
- }
- });
-};
-
-/**
- * Creates a default admin users if one doesn't already exist in the database
- *
- * @returns {Promise}
- */
-const setupDefaultUser = () => {
- return userModel
- .query()
- .select(userModel.raw('COUNT(`id`) as `count`'))
- .where('is_deleted', 0)
- .first()
- .then((row) => {
- if (!row.count) {
- // Create a new user and set password
- logger.info('Creating a new user: admin@example.com with password: changeme');
-
- let data = {
- is_deleted: 0,
- email: 'admin@example.com',
- name: 'Administrator',
- nickname: 'Admin',
- avatar: '',
- roles: ['admin'],
- };
-
- return userModel
- .query()
- .insertAndFetch(data)
- .then((user) => {
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: 'password',
- secret: 'changeme',
- meta: {},
- })
- .then(() => {
- return userPermissionModel.query().insert({
- user_id: user.id,
- visibility: 'all',
- proxy_hosts: 'manage',
- redirection_hosts: 'manage',
- dead_hosts: 'manage',
- streams: 'manage',
- access_lists: 'manage',
- certificates: 'manage',
- });
- });
- })
- .then(() => {
- logger.info('Initial admin setup completed');
- });
- } else if (debug_mode) {
- logger.debug('Admin user setup not required');
- }
- });
-};
-
-/**
- * Creates default settings if they don't already exist in the database
- *
- * @returns {Promise}
- */
-const setupDefaultSettings = () => {
- return settingModel
- .query()
- .select(settingModel.raw('COUNT(`id`) as `count`'))
- .where({id: 'default-site'})
- .first()
- .then((row) => {
- if (!row.count) {
- settingModel
- .query()
- .insert({
- id: 'default-site',
- name: 'Default Site',
- description: 'What to show when Nginx is hit with an unknown Host',
- value: 'congratulations',
- meta: {},
- })
- .then(() => {
- logger.info('Default settings added');
- });
- }
- if (debug_mode) {
- logger.debug('Default setting setup not required');
- }
- });
-};
-
-/**
- * Installs all Certbot plugins which are required for an installed certificate
- *
- * @returns {Promise}
- */
-const setupCertbotPlugins = () => {
- return certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('provider', 'letsencrypt')
- .then((certificates) => {
- if (certificates && certificates.length) {
- let plugins = [];
- let promises = [];
-
- certificates.map(function (certificate) {
- if (certificate.meta && certificate.meta.dns_challenge === true) {
- const dns_plugin = dns_plugins[certificate.meta.dns_provider];
- const packages_to_install = `${dns_plugin.package_name}${dns_plugin.version_requirement || ''} ${dns_plugin.dependencies}`;
-
- if (plugins.indexOf(packages_to_install) === -1) plugins.push(packages_to_install);
-
- // Make sure credentials file exists
- const credentials_loc = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- // Escape single quotes and backslashes
- const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
- const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
- promises.push(utils.exec(credentials_cmd));
- }
- });
-
- if (plugins.length) {
- const install_cmd = 'pip install ' + plugins.join(' ');
- promises.push(utils.exec(install_cmd));
- }
-
- if (promises.length) {
- return Promise.all(promises)
- .then(() => {
- logger.info('Added Certbot plugins ' + plugins.join(', '));
- });
- }
- }
- });
-};
-
-
-/**
- * Starts a timer to call run the logrotation binary every two days
- * @returns {Promise}
- */
-const setupLogrotation = () => {
- const intervalTimeout = 1000 * 60 * 60 * 24 * 2; // 2 days
-
- const runLogrotate = async () => {
- try {
- await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager');
- logger.info('Logrotate completed.');
- } catch (e) { logger.warn(e); }
- };
-
- logger.info('Logrotate Timer initialized');
- setInterval(runLogrotate, intervalTimeout);
- // And do this now as well
- return runLogrotate();
-};
-
-module.exports = function () {
- return setupJwt()
- .then(setupDefaultUser)
- .then(setupDefaultSettings)
- .then(setupCertbotPlugins)
- .then(setupLogrotation);
-};
diff --git a/backend/templates/_assets.conf b/backend/templates/_assets.conf
deleted file mode 100644
index dcb183c5..00000000
--- a/backend/templates/_assets.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if caching_enabled == 1 or caching_enabled == true -%}
- # Asset Caching
- include conf.d/include/assets.conf;
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_certificates.conf b/backend/templates/_certificates.conf
deleted file mode 100644
index 06ca7bb8..00000000
--- a/backend/templates/_certificates.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if certificate.provider == "letsencrypt" %}
- # Let's Encrypt SSL
- include conf.d/include/letsencrypt-acme-challenge.conf;
- include conf.d/include/ssl-ciphers.conf;
- ssl_certificate /etc/letsencrypt/live/npm-{{ certificate_id }}/fullchain.pem;
- ssl_certificate_key /etc/letsencrypt/live/npm-{{ certificate_id }}/privkey.pem;
-{% else %}
- # Custom SSL
- ssl_certificate /data/custom_ssl/npm-{{ certificate_id }}/fullchain.pem;
- ssl_certificate_key /data/custom_ssl/npm-{{ certificate_id }}/privkey.pem;
-{% endif %}
-{% endif %}
-
diff --git a/backend/templates/_exploits.conf b/backend/templates/_exploits.conf
deleted file mode 100644
index 002970d5..00000000
--- a/backend/templates/_exploits.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if block_exploits == 1 or block_exploits == true %}
- # Block Exploits
- include conf.d/include/block-exploits.conf;
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_forced_ssl.conf b/backend/templates/_forced_ssl.conf
deleted file mode 100644
index 7fade20c..00000000
--- a/backend/templates/_forced_ssl.conf
+++ /dev/null
@@ -1,6 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if ssl_forced == 1 or ssl_forced == true %}
- # Force SSL
- include conf.d/include/force-ssl.conf;
-{% endif %}
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_header_comment.conf b/backend/templates/_header_comment.conf
deleted file mode 100644
index 8f996d34..00000000
--- a/backend/templates/_header_comment.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-# ------------------------------------------------------------
-# {{ domain_names | join: ", " }}
-# ------------------------------------------------------------
\ No newline at end of file
diff --git a/backend/templates/_hsts.conf b/backend/templates/_hsts.conf
deleted file mode 100644
index 11aecf24..00000000
--- a/backend/templates/_hsts.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if ssl_forced == 1 or ssl_forced == true %}
-{% if hsts_enabled == 1 or hsts_enabled == true %}
- # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
- add_header Strict-Transport-Security "max-age=63072000;{% if hsts_subdomains == 1 or hsts_subdomains == true -%} includeSubDomains;{% endif %} preload" always;
-{% endif %}
-{% endif %}
-{% endif %}
diff --git a/backend/templates/_listen.conf b/backend/templates/_listen.conf
deleted file mode 100644
index 730f3a7c..00000000
--- a/backend/templates/_listen.conf
+++ /dev/null
@@ -1,15 +0,0 @@
- listen 80;
-{% if ipv6 -%}
- listen [::]:80;
-{% else -%}
- #listen [::]:80;
-{% endif %}
-{% if certificate -%}
- listen 443 ssl{% if http2_support %} http2{% endif %};
-{% if ipv6 -%}
- listen [::]:443 ssl{% if http2_support %} http2{% endif %};
-{% else -%}
- #listen [::]:443;
-{% endif %}
-{% endif %}
- server_name {{ domain_names | join: " " }};
diff --git a/backend/templates/_location.conf b/backend/templates/_location.conf
deleted file mode 100644
index 5a7a6abe..00000000
--- a/backend/templates/_location.conf
+++ /dev/null
@@ -1,45 +0,0 @@
- location {{ path }} {
- proxy_set_header Host $host;
- proxy_set_header X-Forwarded-Scheme $scheme;
- proxy_set_header X-Forwarded-Proto $scheme;
- proxy_set_header X-Forwarded-For $remote_addr;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
-
- {% if access_list_id > 0 %}
- {% if access_list.items.length > 0 %}
- # Authorization
- auth_basic "Authorization required";
- auth_basic_user_file /data/access/{{ access_list_id }};
-
- {{ access_list.passauth }}
- {% endif %}
-
- # Access Rules
- {% for client in access_list.clients %}
- {{- client.rule -}};
- {% endfor %}deny all;
-
- # Access checks must...
- {% if access_list.satisfy %}
- {{ access_list.satisfy }};
- {% endif %}
-
- {% endif %}
-
- {% include "_assets.conf" %}
- {% include "_exploits.conf" %}
-
- {% include "_forced_ssl.conf" %}
- {% include "_hsts.conf" %}
-
- {% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
- proxy_set_header Upgrade $http_upgrade;
- proxy_set_header Connection $http_connection;
- proxy_http_version 1.1;
- {% endif %}
-
-
- {{ advanced_config }}
- }
-
diff --git a/backend/templates/dead_host.conf b/backend/templates/dead_host.conf
deleted file mode 100644
index d94dff57..00000000
--- a/backend/templates/dead_host.conf
+++ /dev/null
@@ -1,23 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
- access_log /data/logs/dead-host-{{ id }}_access.log standard;
- error_log /data/logs/dead-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{% if use_default_location %}
- location / {
-{% include "_hsts.conf" %}
- return 404;
- }
-{% endif %}
-
-}
-{% endif %}
diff --git a/backend/templates/default.conf b/backend/templates/default.conf
deleted file mode 100644
index ec68530c..00000000
--- a/backend/templates/default.conf
+++ /dev/null
@@ -1,40 +0,0 @@
-# ------------------------------------------------------------
-# Default Site
-# ------------------------------------------------------------
-{% if value == "congratulations" %}
-# Skipping output, congratulations page configration is baked in.
-{%- else %}
-server {
- listen 80 default;
-{% if ipv6 -%}
- listen [::]:80 default;
-{% else -%}
- #listen [::]:80 default;
-{% endif %}
- server_name default-host.localhost;
- access_log /data/logs/default-host_access.log combined;
- error_log /data/logs/default-host_error.log warn;
-{% include "_exploits.conf" %}
-
- include conf.d/include/letsencrypt-acme-challenge.conf;
-
-{%- if value == "404" %}
- location / {
- return 404;
- }
-{% endif %}
-
-{%- if value == "redirect" %}
- location / {
- return 301 {{ meta.redirect }};
- }
-{%- endif %}
-
-{%- if value == "html" %}
- root /data/nginx/default_www;
- location / {
- try_files $uri /index.html;
- }
-{%- endif %}
-}
-{% endif %}
diff --git a/backend/templates/ip_ranges.conf b/backend/templates/ip_ranges.conf
deleted file mode 100644
index 8ede2bd9..00000000
--- a/backend/templates/ip_ranges.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-{% for range in ip_ranges %}
-set_real_ip_from {{ range }};
-{% endfor %}
\ No newline at end of file
diff --git a/backend/templates/letsencrypt-request.conf b/backend/templates/letsencrypt-request.conf
deleted file mode 100644
index 676c8a60..00000000
--- a/backend/templates/letsencrypt-request.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-{% include "_header_comment.conf" %}
-
-server {
- listen 80;
-{% if ipv6 -%}
- listen [::]:80;
-{% endif %}
-
- server_name {{ domain_names | join: " " }};
-
- access_log /data/logs/letsencrypt-requests_access.log standard;
- error_log /data/logs/letsencrypt-requests_error.log warn;
-
- include conf.d/include/letsencrypt-acme-challenge.conf;
-
- location / {
- return 404;
- }
-}
diff --git a/backend/templates/proxy_host.conf b/backend/templates/proxy_host.conf
deleted file mode 100644
index ec30cca0..00000000
--- a/backend/templates/proxy_host.conf
+++ /dev/null
@@ -1,70 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
- set $forward_scheme {{ forward_scheme }};
- set $server "{{ forward_host }}";
- set $port {{ forward_port }};
-
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_assets.conf" %}
-{% include "_exploits.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
-{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
-proxy_set_header Upgrade $http_upgrade;
-proxy_set_header Connection $http_connection;
-proxy_http_version 1.1;
-{% endif %}
-
- access_log /data/logs/proxy-host-{{ id }}_access.log proxy;
- error_log /data/logs/proxy-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{{ locations }}
-
-{% if use_default_location %}
-
- location / {
-
- {% if access_list_id > 0 %}
- {% if access_list.items.length > 0 %}
- # Authorization
- auth_basic "Authorization required";
- auth_basic_user_file /data/access/{{ access_list_id }};
-
- {{ access_list.passauth }}
- {% endif %}
-
- # Access Rules
- {% for client in access_list.clients %}
- {{- client.rule -}};
- {% endfor %}deny all;
-
- # Access checks must...
- {% if access_list.satisfy %}
- {{ access_list.satisfy }};
- {% endif %}
-
- {% endif %}
-
-{% include "_hsts.conf" %}
-
- {% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
- proxy_set_header Upgrade $http_upgrade;
- proxy_set_header Connection $http_connection;
- proxy_http_version 1.1;
- {% endif %}
-
- # Proxy!
- include conf.d/include/proxy.conf;
- }
-{% endif %}
-
- # Custom
- include /data/nginx/custom/server_proxy[.]conf;
-}
-{% endif %}
diff --git a/backend/templates/redirection_host.conf b/backend/templates/redirection_host.conf
deleted file mode 100644
index 339fe72e..00000000
--- a/backend/templates/redirection_host.conf
+++ /dev/null
@@ -1,32 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_assets.conf" %}
-{% include "_exploits.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
- access_log /data/logs/redirection-host-{{ id }}_access.log standard;
- error_log /data/logs/redirection-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{% if use_default_location %}
- location / {
-{% include "_hsts.conf" %}
-
- {% if preserve_path == 1 or preserve_path == true %}
- return {{ forward_http_code }} {{ forward_scheme }}://{{ forward_domain_name }}$request_uri;
- {% else %}
- return {{ forward_http_code }} {{ forward_scheme }}://{{ forward_domain_name }};
- {% endif %}
- }
-{% endif %}
-
- # Custom
- include /data/nginx/custom/server_redirect[.]conf;
-}
-{% endif %}
diff --git a/backend/templates/stream.conf b/backend/templates/stream.conf
deleted file mode 100644
index 76159a64..00000000
--- a/backend/templates/stream.conf
+++ /dev/null
@@ -1,37 +0,0 @@
-# ------------------------------------------------------------
-# {{ incoming_port }} TCP: {{ tcp_forwarding }} UDP: {{ udp_forwarding }}
-# ------------------------------------------------------------
-
-{% if enabled %}
-{% if tcp_forwarding == 1 or tcp_forwarding == true -%}
-server {
- listen {{ incoming_port }};
-{% if ipv6 -%}
- listen [::]:{{ incoming_port }};
-{% else -%}
- #listen [::]:{{ incoming_port }};
-{% endif %}
-
- proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
-
- # Custom
- include /data/nginx/custom/server_stream[.]conf;
- include /data/nginx/custom/server_stream_tcp[.]conf;
-}
-{% endif %}
-{% if udp_forwarding == 1 or udp_forwarding == true %}
-server {
- listen {{ incoming_port }} udp;
-{% if ipv6 -%}
- listen [::]:{{ incoming_port }} udp;
-{% else -%}
- #listen [::]:{{ incoming_port }} udp;
-{% endif %}
- proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
-
- # Custom
- include /data/nginx/custom/server_stream[.]conf;
- include /data/nginx/custom/server_stream_udp[.]conf;
-}
-{% endif %}
-{% endif %}
\ No newline at end of file
diff --git a/backend/yarn.lock b/backend/yarn.lock
deleted file mode 100644
index 96883182..00000000
--- a/backend/yarn.lock
+++ /dev/null
@@ -1,3754 +0,0 @@
-# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
-# yarn lockfile v1
-
-
-"@apidevtools/json-schema-ref-parser@8.0.0":
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-8.0.0.tgz#9eb749499b3f8d919e90bb141e4b6f67aee4692d"
- integrity sha512-n4YBtwQhdpLto1BaUCyAeflizmIbaloGShsPyRtFf5qdFJxfssj+GgLavczgKJFa3Bq+3St2CKcpRJdjtB4EBw==
- dependencies:
- "@jsdevtools/ono" "^7.1.0"
- call-me-maybe "^1.0.1"
- js-yaml "^3.13.1"
-
-"@babel/code-frame@^7.0.0":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a"
- integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==
- dependencies:
- "@babel/highlight" "^7.10.4"
-
-"@babel/helper-validator-identifier@^7.10.4":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2"
- integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==
-
-"@babel/highlight@^7.10.4":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143"
- integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==
- dependencies:
- "@babel/helper-validator-identifier" "^7.10.4"
- chalk "^2.0.0"
- js-tokens "^4.0.0"
-
-"@jsdevtools/ono@^7.1.0":
- version "7.1.3"
- resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
- integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
-
-"@sindresorhus/is@^0.14.0":
- version "0.14.0"
- resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea"
- integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==
-
-"@szmarczak/http-timer@^1.1.2":
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
- integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==
- dependencies:
- defer-to-connect "^1.0.1"
-
-"@types/color-name@^1.1.1":
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
- integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
-
-abbrev@1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
- integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
-
-accepts@~1.3.5, accepts@~1.3.7:
- version "1.3.7"
- resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
- integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==
- dependencies:
- mime-types "~2.1.24"
- negotiator "0.6.2"
-
-acorn-jsx@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.2.0.tgz#4c66069173d6fdd68ed85239fc256226182b2ebe"
- integrity sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==
-
-acorn@^7.1.1:
- version "7.4.0"
- resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.0.tgz#e1ad486e6c54501634c6c397c5c121daa383607c"
- integrity sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w==
-
-ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.0, ajv@^6.12.6:
- version "6.12.6"
- resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
- integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
- dependencies:
- fast-deep-equal "^3.1.1"
- fast-json-stable-stringify "^2.0.0"
- json-schema-traverse "^0.4.1"
- uri-js "^4.2.2"
-
-ansi-align@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.0.tgz#b536b371cf687caaef236c18d3e21fe3797467cb"
- integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==
- dependencies:
- string-width "^3.0.0"
-
-ansi-escapes@^4.2.1:
- version "4.3.1"
- resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61"
- integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==
- dependencies:
- type-fest "^0.11.0"
-
-ansi-regex@^2.0.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
- integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8=
-
-ansi-regex@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
- integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
-
-ansi-regex@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
- integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
-
-ansi-regex@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
- integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
-
-ansi-styles@^3.2.0, ansi-styles@^3.2.1:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
- integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
- dependencies:
- color-convert "^1.9.0"
-
-ansi-styles@^4.0.0, ansi-styles@^4.1.0:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359"
- integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==
- dependencies:
- "@types/color-name" "^1.1.1"
- color-convert "^2.0.1"
-
-anymatch@~3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
- integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
- dependencies:
- normalize-path "^3.0.0"
- picomatch "^2.0.4"
-
-aproba@^1.0.3:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
- integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
-
-archiver-utils@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-2.1.0.tgz#e8a460e94b693c3e3da182a098ca6285ba9249e2"
- integrity sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==
- dependencies:
- glob "^7.1.4"
- graceful-fs "^4.2.0"
- lazystream "^1.0.0"
- lodash.defaults "^4.2.0"
- lodash.difference "^4.5.0"
- lodash.flatten "^4.4.0"
- lodash.isplainobject "^4.0.6"
- lodash.union "^4.6.0"
- normalize-path "^3.0.0"
- readable-stream "^2.0.0"
-
-archiver@^5.3.0:
- version "5.3.0"
- resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.0.tgz#dd3e097624481741df626267564f7dd8640a45ba"
- integrity sha512-iUw+oDwK0fgNpvveEsdQ0Ase6IIKztBJU2U0E9MzszMfmVVUyv1QJhS2ITW9ZCqx8dktAxVAjWWkKehuZE8OPg==
- dependencies:
- archiver-utils "^2.1.0"
- async "^3.2.0"
- buffer-crc32 "^0.2.1"
- readable-stream "^3.6.0"
- readdir-glob "^1.0.0"
- tar-stream "^2.2.0"
- zip-stream "^4.1.0"
-
-are-we-there-yet@~1.1.2:
- version "1.1.5"
- resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21"
- integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==
- dependencies:
- delegates "^1.0.0"
- readable-stream "^2.0.6"
-
-argparse@^1.0.7:
- version "1.0.10"
- resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
- integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
- dependencies:
- sprintf-js "~1.0.2"
-
-arr-diff@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
- integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=
-
-arr-flatten@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1"
- integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==
-
-arr-union@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
- integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=
-
-array-each@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f"
- integrity sha1-p5SvDAWrF1KEbudTofIRoFugxE8=
-
-array-flatten@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
- integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=
-
-array-slice@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4"
- integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w==
-
-array-unique@^0.3.2:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
- integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=
-
-asn1@^0.2.4:
- version "0.2.4"
- resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
- integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==
- dependencies:
- safer-buffer "~2.1.0"
-
-assign-symbols@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
- integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=
-
-astral-regex@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9"
- integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==
-
-async@^3.2.0:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/async/-/async-3.2.1.tgz#d3274ec66d107a47476a4c49136aacdb00665fc8"
- integrity sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==
-
-atob@^2.1.2:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
- integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==
-
-balanced-match@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
- integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
-
-base64-js@^1.3.1:
- version "1.5.1"
- resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
- integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
-
-base@^0.11.1:
- version "0.11.2"
- resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f"
- integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==
- dependencies:
- cache-base "^1.0.1"
- class-utils "^0.3.5"
- component-emitter "^1.2.1"
- define-property "^1.0.0"
- isobject "^3.0.1"
- mixin-deep "^1.2.0"
- pascalcase "^0.1.1"
-
-batchflow@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/batchflow/-/batchflow-0.4.0.tgz#7d419df79b6b7587b06f9ea34f96ccef6f74e5b5"
- integrity sha1-fUGd95trdYewb56jT5bM72905bU=
-
-bcrypt@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.0.tgz#051407c7cd5ffbfb773d541ca3760ea0754e37e2"
- integrity sha512-jB0yCBl4W/kVHM2whjfyqnxTmOHkCX4kHEa5nYKSoGeYe8YrjTYTc87/6bwt1g8cmV0QrbhKriETg9jWtcREhg==
- dependencies:
- node-addon-api "^3.0.0"
- node-pre-gyp "0.15.0"
-
-bignumber.js@9.0.0:
- version "9.0.0"
- resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.0.tgz#805880f84a329b5eac6e7cb6f8274b6d82bdf075"
- integrity sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==
-
-binary-extensions@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9"
- integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==
-
-bl@^4.0.3:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
- integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==
- dependencies:
- buffer "^5.5.0"
- inherits "^2.0.4"
- readable-stream "^3.4.0"
-
-blueimp-md5@^2.16.0:
- version "2.17.0"
- resolved "https://registry.yarnpkg.com/blueimp-md5/-/blueimp-md5-2.17.0.tgz#f4fcac088b115f7b4045f19f5da59e9d01b1bb96"
- integrity sha512-x5PKJHY5rHQYaADj6NwPUR2QRCUVSggPzrUKkeENpj871o9l9IefJbO2jkT5UvYykeOK9dx0VmkIo6dZ+vThYw==
-
-body-parser@1.19.0, body-parser@^1.19.0:
- version "1.19.0"
- resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a"
- integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==
- dependencies:
- bytes "3.1.0"
- content-type "~1.0.4"
- debug "2.6.9"
- depd "~1.1.2"
- http-errors "1.7.2"
- iconv-lite "0.4.24"
- on-finished "~2.3.0"
- qs "6.7.0"
- raw-body "2.4.0"
- type-is "~1.6.17"
-
-boxen@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64"
- integrity sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ==
- dependencies:
- ansi-align "^3.0.0"
- camelcase "^5.3.1"
- chalk "^3.0.0"
- cli-boxes "^2.2.0"
- string-width "^4.1.0"
- term-size "^2.1.0"
- type-fest "^0.8.1"
- widest-line "^3.1.0"
-
-brace-expansion@^1.1.7:
- version "1.1.11"
- resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
- integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
- dependencies:
- balanced-match "^1.0.0"
- concat-map "0.0.1"
-
-braces@^2.3.1:
- version "2.3.2"
- resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729"
- integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==
- dependencies:
- arr-flatten "^1.1.0"
- array-unique "^0.3.2"
- extend-shallow "^2.0.1"
- fill-range "^4.0.0"
- isobject "^3.0.1"
- repeat-element "^1.1.2"
- snapdragon "^0.8.1"
- snapdragon-node "^2.0.1"
- split-string "^3.0.2"
- to-regex "^3.0.1"
-
-braces@~3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
- integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
- dependencies:
- fill-range "^7.0.1"
-
-buffer-crc32@^0.2.1, buffer-crc32@^0.2.13:
- version "0.2.13"
- resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
- integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=
-
-buffer-equal-constant-time@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
- integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=
-
-buffer@^5.5.0:
- version "5.7.1"
- resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
- integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==
- dependencies:
- base64-js "^1.3.1"
- ieee754 "^1.1.13"
-
-busboy@^0.3.1:
- version "0.3.1"
- resolved "https://registry.yarnpkg.com/busboy/-/busboy-0.3.1.tgz#170899274c5bf38aae27d5c62b71268cd585fd1b"
- integrity sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==
- dependencies:
- dicer "0.3.0"
-
-bytes@3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
- integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=
-
-bytes@3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6"
- integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
-
-cache-base@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2"
- integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==
- dependencies:
- collection-visit "^1.0.0"
- component-emitter "^1.2.1"
- get-value "^2.0.6"
- has-value "^1.0.0"
- isobject "^3.0.1"
- set-value "^2.0.0"
- to-object-path "^0.3.0"
- union-value "^1.0.0"
- unset-value "^1.0.0"
-
-cacheable-request@^6.0.0:
- version "6.1.0"
- resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912"
- integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==
- dependencies:
- clone-response "^1.0.2"
- get-stream "^5.1.0"
- http-cache-semantics "^4.0.0"
- keyv "^3.0.0"
- lowercase-keys "^2.0.0"
- normalize-url "^4.1.0"
- responselike "^1.0.2"
-
-call-me-maybe@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b"
- integrity sha1-JtII6onje1y95gJQoV8DHBak1ms=
-
-callsites@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
- integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
-
-camelcase@^5.0.0, camelcase@^5.3.1:
- version "5.3.1"
- resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
- integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
-
-chalk@^2.0.0, chalk@^2.1.0, chalk@^2.3.2:
- version "2.4.2"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
- integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
- dependencies:
- ansi-styles "^3.2.1"
- escape-string-regexp "^1.0.5"
- supports-color "^5.3.0"
-
-chalk@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4"
- integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==
- dependencies:
- ansi-styles "^4.1.0"
- supports-color "^7.1.0"
-
-chalk@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a"
- integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==
- dependencies:
- ansi-styles "^4.1.0"
- supports-color "^7.1.0"
-
-chardet@^0.7.0:
- version "0.7.0"
- resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
- integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
-
-chokidar@^3.2.2:
- version "3.4.1"
- resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1"
- integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g==
- dependencies:
- anymatch "~3.1.1"
- braces "~3.0.2"
- glob-parent "~5.1.0"
- is-binary-path "~2.1.0"
- is-glob "~4.0.1"
- normalize-path "~3.0.0"
- readdirp "~3.4.0"
- optionalDependencies:
- fsevents "~2.1.2"
-
-chownr@^1.1.4:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
- integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
-
-ci-info@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
- integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
-
-class-utils@^0.3.5:
- version "0.3.6"
- resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463"
- integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==
- dependencies:
- arr-union "^3.1.0"
- define-property "^0.2.5"
- isobject "^3.0.0"
- static-extend "^0.1.1"
-
-cli-boxes@^2.2.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
- integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
-
-cli-cursor@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307"
- integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==
- dependencies:
- restore-cursor "^3.1.0"
-
-cli-width@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6"
- integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==
-
-cliui@^6.0.0:
- version "6.0.0"
- resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
- integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
- dependencies:
- string-width "^4.2.0"
- strip-ansi "^6.0.0"
- wrap-ansi "^6.2.0"
-
-clone-response@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b"
- integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=
- dependencies:
- mimic-response "^1.0.0"
-
-code-point-at@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
- integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
-
-collection-visit@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0"
- integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=
- dependencies:
- map-visit "^1.0.0"
- object-visit "^1.0.0"
-
-color-convert@^1.9.0:
- version "1.9.3"
- resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
- integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
- dependencies:
- color-name "1.1.3"
-
-color-convert@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
- integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
- dependencies:
- color-name "~1.1.4"
-
-color-name@1.1.3:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
- integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
-
-color-name@~1.1.4:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
- integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
-
-colorette@1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.1.0.tgz#1f943e5a357fac10b4e0f5aaef3b14cdc1af6ec7"
- integrity sha512-6S062WDQUXi6hOfkO/sBPVwE5ASXY4G2+b4atvhJfSsuUUhIaUKlkjLe9692Ipyt5/a+IPF5aVTu3V5gvXq5cg==
-
-commander@^4.1.1:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
- integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
-
-component-emitter@^1.2.1:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
- integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
-
-compress-commons@^4.1.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.1.tgz#df2a09a7ed17447642bad10a85cc9a19e5c42a7d"
- integrity sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==
- dependencies:
- buffer-crc32 "^0.2.13"
- crc32-stream "^4.0.2"
- normalize-path "^3.0.0"
- readable-stream "^3.6.0"
-
-compressible@~2.0.16:
- version "2.0.18"
- resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
- integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
- dependencies:
- mime-db ">= 1.43.0 < 2"
-
-compression@^1.7.4:
- version "1.7.4"
- resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
- integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
- dependencies:
- accepts "~1.3.5"
- bytes "3.0.0"
- compressible "~2.0.16"
- debug "2.6.9"
- on-headers "~1.0.2"
- safe-buffer "5.1.2"
- vary "~1.1.2"
-
-concat-map@0.0.1:
- version "0.0.1"
- resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
- integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
-
-config@^3.3.1:
- version "3.3.1"
- resolved "https://registry.yarnpkg.com/config/-/config-3.3.1.tgz#b6a70e2908a43b98ed20be7e367edf0cc8ed5a19"
- integrity sha512-+2/KaaaAzdwUBE3jgZON11L1ggLLhpf2FsGrfqYFHZW22ySGv/HqYIXrBwKKvn+XZh1UBUjHwAcrfsSkSygT+Q==
- dependencies:
- json5 "^2.1.1"
-
-configstore@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
- integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
- dependencies:
- dot-prop "^5.2.0"
- graceful-fs "^4.1.2"
- make-dir "^3.0.0"
- unique-string "^2.0.0"
- write-file-atomic "^3.0.0"
- xdg-basedir "^4.0.0"
-
-console-control-strings@^1.0.0, console-control-strings@~1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
- integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=
-
-content-disposition@0.5.3:
- version "0.5.3"
- resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd"
- integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==
- dependencies:
- safe-buffer "5.1.2"
-
-content-type@~1.0.4:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
- integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
-
-cookie-signature@1.0.6:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
- integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
-
-cookie@0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba"
- integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
-
-copy-descriptor@^0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
- integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=
-
-core-util-is@~1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
- integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
-
-crc-32@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.0.tgz#cb2db6e29b88508e32d9dd0ec1693e7b41a18208"
- integrity sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==
- dependencies:
- exit-on-epipe "~1.0.1"
- printj "~1.1.0"
-
-crc32-stream@^4.0.2:
- version "4.0.2"
- resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-4.0.2.tgz#c922ad22b38395abe9d3870f02fa8134ed709007"
- integrity sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==
- dependencies:
- crc-32 "^1.2.0"
- readable-stream "^3.4.0"
-
-cross-spawn@^6.0.5:
- version "6.0.5"
- resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
- integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
- dependencies:
- nice-try "^1.0.4"
- path-key "^2.0.1"
- semver "^5.5.0"
- shebang-command "^1.2.0"
- which "^1.2.9"
-
-crypto-random-string@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
- integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
-
-db-errors@^0.2.3:
- version "0.2.3"
- resolved "https://registry.yarnpkg.com/db-errors/-/db-errors-0.2.3.tgz#a6a38952e00b20e790f2695a6446b3c65497ffa2"
- integrity sha512-OOgqgDuCavHXjYSJoV2yGhv6SeG8nk42aoCSoyXLZUH7VwFG27rxbavU1z+VrZbZjphw5UkDQwUlD21MwZpUng==
-
-debug@2.6.9, debug@^2.2.0, debug@^2.3.3:
- version "2.6.9"
- resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
- integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
- dependencies:
- ms "2.0.0"
-
-debug@4.1.1, debug@^4.0.1:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
- integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==
- dependencies:
- ms "^2.1.1"
-
-debug@^3.2.6:
- version "3.2.6"
- resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
- integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
- dependencies:
- ms "^2.1.1"
-
-decamelize@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
- integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
-
-decode-uri-component@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545"
- integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=
-
-decompress-response@^3.3.0:
- version "3.3.0"
- resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3"
- integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=
- dependencies:
- mimic-response "^1.0.0"
-
-deep-extend@^0.6.0:
- version "0.6.0"
- resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
- integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
-
-deep-is@~0.1.3:
- version "0.1.3"
- resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
- integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=
-
-defer-to-connect@^1.0.1:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
- integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==
-
-define-property@^0.2.5:
- version "0.2.5"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116"
- integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=
- dependencies:
- is-descriptor "^0.1.0"
-
-define-property@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6"
- integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY=
- dependencies:
- is-descriptor "^1.0.0"
-
-define-property@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d"
- integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==
- dependencies:
- is-descriptor "^1.0.2"
- isobject "^3.0.1"
-
-delegates@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
- integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=
-
-depd@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
- integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
-
-destroy@~1.0.4:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
- integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=
-
-detect-file@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7"
- integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc=
-
-detect-libc@^1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
- integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
-
-dicer@0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872"
- integrity sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==
- dependencies:
- streamsearch "0.1.2"
-
-doctrine@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
- integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
- dependencies:
- esutils "^2.0.2"
-
-dot-prop@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
- integrity sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==
- dependencies:
- is-obj "^2.0.0"
-
-duplexer3@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
- integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=
-
-ecdsa-sig-formatter@1.0.11:
- version "1.0.11"
- resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
- integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
- dependencies:
- safe-buffer "^5.0.1"
-
-ee-first@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
- integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
-
-email-validator@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/email-validator/-/email-validator-2.0.4.tgz#b8dfaa5d0dae28f1b03c95881d904d4e40bfe7ed"
- integrity sha512-gYCwo7kh5S3IDyZPLZf6hSS0MnZT8QmJFqYvbqlDZSbwdZlY6QZWxJ4i/6UhITOJ4XzyI647Bm2MXKCLqnJ4nQ==
-
-emoji-regex@^7.0.1:
- version "7.0.3"
- resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
- integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
-
-emoji-regex@^8.0.0:
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
- integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
-
-encodeurl@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
- integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=
-
-end-of-stream@^1.1.0, end-of-stream@^1.4.1:
- version "1.4.4"
- resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
- integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
- dependencies:
- once "^1.4.0"
-
-error-ex@^1.3.1:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
- integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
- dependencies:
- is-arrayish "^0.2.1"
-
-escape-goat@^2.0.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
- integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
-
-escape-html@~1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
- integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
-
-escape-string-regexp@^1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
- integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
-
-eslint-plugin-align-assignments@^1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/eslint-plugin-align-assignments/-/eslint-plugin-align-assignments-1.1.2.tgz#83e1a8a826d4adf29e82b52d0bb39c88b301b576"
- integrity sha512-I1ZJgk9EjHfGVU9M2Ex8UkVkkjLL5Y9BS6VNnQHq79eHj2H4/Cgxf36lQSUTLgm2ntB03A2NtF+zg9fyi5vChg==
-
-eslint-scope@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.0.tgz#d0f971dfe59c69e0cada684b23d49dbf82600ce5"
- integrity sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w==
- dependencies:
- esrecurse "^4.1.0"
- estraverse "^4.1.1"
-
-eslint-utils@^1.4.3:
- version "1.4.3"
- resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f"
- integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==
- dependencies:
- eslint-visitor-keys "^1.1.0"
-
-eslint-visitor-keys@^1.1.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
- integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
-
-eslint@^6.8.0:
- version "6.8.0"
- resolved "https://registry.yarnpkg.com/eslint/-/eslint-6.8.0.tgz#62262d6729739f9275723824302fb227c8c93ffb"
- integrity sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==
- dependencies:
- "@babel/code-frame" "^7.0.0"
- ajv "^6.10.0"
- chalk "^2.1.0"
- cross-spawn "^6.0.5"
- debug "^4.0.1"
- doctrine "^3.0.0"
- eslint-scope "^5.0.0"
- eslint-utils "^1.4.3"
- eslint-visitor-keys "^1.1.0"
- espree "^6.1.2"
- esquery "^1.0.1"
- esutils "^2.0.2"
- file-entry-cache "^5.0.1"
- functional-red-black-tree "^1.0.1"
- glob-parent "^5.0.0"
- globals "^12.1.0"
- ignore "^4.0.6"
- import-fresh "^3.0.0"
- imurmurhash "^0.1.4"
- inquirer "^7.0.0"
- is-glob "^4.0.0"
- js-yaml "^3.13.1"
- json-stable-stringify-without-jsonify "^1.0.1"
- levn "^0.3.0"
- lodash "^4.17.14"
- minimatch "^3.0.4"
- mkdirp "^0.5.1"
- natural-compare "^1.4.0"
- optionator "^0.8.3"
- progress "^2.0.0"
- regexpp "^2.0.1"
- semver "^6.1.2"
- strip-ansi "^5.2.0"
- strip-json-comments "^3.0.1"
- table "^5.2.3"
- text-table "^0.2.0"
- v8-compile-cache "^2.0.3"
-
-esm@^3.2.25:
- version "3.2.25"
- resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10"
- integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==
-
-espree@^6.1.2:
- version "6.2.1"
- resolved "https://registry.yarnpkg.com/espree/-/espree-6.2.1.tgz#77fc72e1fd744a2052c20f38a5b575832e82734a"
- integrity sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==
- dependencies:
- acorn "^7.1.1"
- acorn-jsx "^5.2.0"
- eslint-visitor-keys "^1.1.0"
-
-esprima@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
- integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
-
-esquery@^1.0.1:
- version "1.3.1"
- resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57"
- integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==
- dependencies:
- estraverse "^5.1.0"
-
-esrecurse@^4.1.0:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf"
- integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==
- dependencies:
- estraverse "^4.1.0"
-
-estraverse@^4.1.0, estraverse@^4.1.1:
- version "4.3.0"
- resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
- integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
-
-estraverse@^5.1.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
- integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
-
-esutils@^2.0.2:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
- integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
-
-etag@~1.8.1:
- version "1.8.1"
- resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
- integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
-
-exit-on-epipe@~1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz#0bdd92e87d5285d267daa8171d0eb06159689692"
- integrity sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==
-
-expand-brackets@^2.1.4:
- version "2.1.4"
- resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
- integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI=
- dependencies:
- debug "^2.3.3"
- define-property "^0.2.5"
- extend-shallow "^2.0.1"
- posix-character-classes "^0.1.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-expand-tilde@^2.0.0, expand-tilde@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
- integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=
- dependencies:
- homedir-polyfill "^1.0.1"
-
-express-fileupload@^1.1.9:
- version "1.1.9"
- resolved "https://registry.yarnpkg.com/express-fileupload/-/express-fileupload-1.1.9.tgz#e798e9318394ed5083e56217ad6cda576da465d2"
- integrity sha512-f2w0aoe7lj3NeD8a4MXmYQsqir3Z66I08l9AKq04QbFUAjeZNmPwTlR5Lx2NGwSu/PslsAjGC38MWzo5tTjoBg==
- dependencies:
- busboy "^0.3.1"
-
-express@^4.17.1:
- version "4.17.1"
- resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134"
- integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
- dependencies:
- accepts "~1.3.7"
- array-flatten "1.1.1"
- body-parser "1.19.0"
- content-disposition "0.5.3"
- content-type "~1.0.4"
- cookie "0.4.0"
- cookie-signature "1.0.6"
- debug "2.6.9"
- depd "~1.1.2"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- etag "~1.8.1"
- finalhandler "~1.1.2"
- fresh "0.5.2"
- merge-descriptors "1.0.1"
- methods "~1.1.2"
- on-finished "~2.3.0"
- parseurl "~1.3.3"
- path-to-regexp "0.1.7"
- proxy-addr "~2.0.5"
- qs "6.7.0"
- range-parser "~1.2.1"
- safe-buffer "5.1.2"
- send "0.17.1"
- serve-static "1.14.1"
- setprototypeof "1.1.1"
- statuses "~1.5.0"
- type-is "~1.6.18"
- utils-merge "1.0.1"
- vary "~1.1.2"
-
-extend-shallow@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
- integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=
- dependencies:
- is-extendable "^0.1.0"
-
-extend-shallow@^3.0.0, extend-shallow@^3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
- integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=
- dependencies:
- assign-symbols "^1.0.0"
- is-extendable "^1.0.1"
-
-extend@^3.0.0:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
- integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
-
-external-editor@^3.0.3:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495"
- integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==
- dependencies:
- chardet "^0.7.0"
- iconv-lite "^0.4.24"
- tmp "^0.0.33"
-
-extglob@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543"
- integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==
- dependencies:
- array-unique "^0.3.2"
- define-property "^1.0.0"
- expand-brackets "^2.1.4"
- extend-shallow "^2.0.1"
- fragment-cache "^0.2.1"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-fast-deep-equal@^3.1.1:
- version "3.1.3"
- resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
- integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
-
-fast-json-stable-stringify@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
- integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
-
-fast-levenshtein@~2.0.6:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
- integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
-
-figures@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
- integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=
- dependencies:
- escape-string-regexp "^1.0.5"
-
-figures@^3.0.0:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af"
- integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==
- dependencies:
- escape-string-regexp "^1.0.5"
-
-file-entry-cache@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c"
- integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==
- dependencies:
- flat-cache "^2.0.1"
-
-fill-range@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
- integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=
- dependencies:
- extend-shallow "^2.0.1"
- is-number "^3.0.0"
- repeat-string "^1.6.1"
- to-regex-range "^2.1.0"
-
-fill-range@^7.0.1:
- version "7.0.1"
- resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
- integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
- dependencies:
- to-regex-range "^5.0.1"
-
-finalhandler@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d"
- integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==
- dependencies:
- debug "2.6.9"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- on-finished "~2.3.0"
- parseurl "~1.3.3"
- statuses "~1.5.0"
- unpipe "~1.0.0"
-
-find-up@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
- integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
- dependencies:
- locate-path "^2.0.0"
-
-find-up@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
- integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
- dependencies:
- locate-path "^5.0.0"
- path-exists "^4.0.0"
-
-findup-sync@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1"
- integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg==
- dependencies:
- detect-file "^1.0.0"
- is-glob "^4.0.0"
- micromatch "^3.0.4"
- resolve-dir "^1.0.1"
-
-fined@^1.0.1:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b"
- integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng==
- dependencies:
- expand-tilde "^2.0.2"
- is-plain-object "^2.0.3"
- object.defaults "^1.1.0"
- object.pick "^1.2.0"
- parse-filepath "^1.0.1"
-
-flagged-respawn@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41"
- integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q==
-
-flat-cache@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0"
- integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==
- dependencies:
- flatted "^2.0.0"
- rimraf "2.6.3"
- write "1.0.3"
-
-flatted@^2.0.0:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138"
- integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==
-
-for-in@^1.0.1, for-in@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
- integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=
-
-for-own@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b"
- integrity sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs=
- dependencies:
- for-in "^1.0.1"
-
-forwarded@~0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84"
- integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=
-
-fragment-cache@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19"
- integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=
- dependencies:
- map-cache "^0.2.2"
-
-fresh@0.5.2:
- version "0.5.2"
- resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
- integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=
-
-fs-constants@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
- integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
-
-fs-minipass@^1.2.7:
- version "1.2.7"
- resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7"
- integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==
- dependencies:
- minipass "^2.6.0"
-
-fs.realpath@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
- integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
-
-fsevents@~2.1.2:
- version "2.1.3"
- resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e"
- integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==
-
-functional-red-black-tree@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
- integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
-
-gauge@~2.7.3:
- version "2.7.4"
- resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
- integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=
- dependencies:
- aproba "^1.0.3"
- console-control-strings "^1.0.0"
- has-unicode "^2.0.0"
- object-assign "^4.1.0"
- signal-exit "^3.0.0"
- string-width "^1.0.1"
- strip-ansi "^3.0.1"
- wide-align "^1.1.0"
-
-get-caller-file@^2.0.1:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
- integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
-
-get-stream@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
- integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
- dependencies:
- pump "^3.0.0"
-
-get-stream@^5.1.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.1.0.tgz#01203cdc92597f9b909067c3e656cc1f4d3c4dc9"
- integrity sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==
- dependencies:
- pump "^3.0.0"
-
-get-value@^2.0.3, get-value@^2.0.6:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28"
- integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=
-
-getopts@2.2.5:
- version "2.2.5"
- resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.2.5.tgz#67a0fe471cacb9c687d817cab6450b96dde8313b"
- integrity sha512-9jb7AW5p3in+IiJWhQiZmmwkpLaR/ccTWdWQCtZM66HJcHHLegowh4q4tSD7gouUyeNvFWRavfK9GXosQHDpFA==
-
-glob-parent@^5.0.0, glob-parent@~5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
- integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
- dependencies:
- is-glob "^4.0.1"
-
-glob@^7.1.3:
- version "7.1.6"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
- integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-glob@^7.1.4:
- version "7.1.7"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90"
- integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-global-dirs@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201"
- integrity sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A==
- dependencies:
- ini "^1.3.5"
-
-global-modules@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea"
- integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==
- dependencies:
- global-prefix "^1.0.1"
- is-windows "^1.0.1"
- resolve-dir "^1.0.0"
-
-global-prefix@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe"
- integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=
- dependencies:
- expand-tilde "^2.0.2"
- homedir-polyfill "^1.0.1"
- ini "^1.3.4"
- is-windows "^1.0.1"
- which "^1.2.14"
-
-globals@^12.1.0:
- version "12.4.0"
- resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8"
- integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==
- dependencies:
- type-fest "^0.8.1"
-
-got@^9.6.0:
- version "9.6.0"
- resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85"
- integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==
- dependencies:
- "@sindresorhus/is" "^0.14.0"
- "@szmarczak/http-timer" "^1.1.2"
- cacheable-request "^6.0.0"
- decompress-response "^3.3.0"
- duplexer3 "^0.1.4"
- get-stream "^4.1.0"
- lowercase-keys "^1.0.1"
- mimic-response "^1.0.1"
- p-cancelable "^1.0.0"
- to-readable-stream "^1.0.0"
- url-parse-lax "^3.0.0"
-
-graceful-fs@^4.1.15, graceful-fs@^4.1.2:
- version "4.2.4"
- resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb"
- integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==
-
-graceful-fs@^4.2.0:
- version "4.2.8"
- resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a"
- integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==
-
-gravatar@^1.8.0:
- version "1.8.1"
- resolved "https://registry.yarnpkg.com/gravatar/-/gravatar-1.8.1.tgz#743bbdf3185c3433172e00e0e6ff5f6b30c58997"
- integrity sha512-18frnfVp4kRYkM/eQW32Mfwlsh/KMbwd3S6nkescBZHioobflFEFHsvM71qZAkUSLNifyi2uoI+TuGxJAnQIOA==
- dependencies:
- blueimp-md5 "^2.16.0"
- email-validator "^2.0.4"
- querystring "0.2.0"
- yargs "^15.4.1"
-
-has-flag@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
- integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
-
-has-flag@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
- integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
-
-has-unicode@^2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
- integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=
-
-has-value@^0.3.1:
- version "0.3.1"
- resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
- integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=
- dependencies:
- get-value "^2.0.3"
- has-values "^0.1.4"
- isobject "^2.0.0"
-
-has-value@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177"
- integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=
- dependencies:
- get-value "^2.0.6"
- has-values "^1.0.0"
- isobject "^3.0.0"
-
-has-values@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771"
- integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E=
-
-has-values@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f"
- integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=
- dependencies:
- is-number "^3.0.0"
- kind-of "^4.0.0"
-
-has-yarn@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77"
- integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==
-
-homedir-polyfill@^1.0.1:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8"
- integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==
- dependencies:
- parse-passwd "^1.0.0"
-
-http-cache-semantics@^4.0.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390"
- integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==
-
-http-errors@1.7.2:
- version "1.7.2"
- resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f"
- integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==
- dependencies:
- depd "~1.1.2"
- inherits "2.0.3"
- setprototypeof "1.1.1"
- statuses ">= 1.5.0 < 2"
- toidentifier "1.0.0"
-
-http-errors@~1.7.2:
- version "1.7.3"
- resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06"
- integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==
- dependencies:
- depd "~1.1.2"
- inherits "2.0.4"
- setprototypeof "1.1.1"
- statuses ">= 1.5.0 < 2"
- toidentifier "1.0.0"
-
-iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4:
- version "0.4.24"
- resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
- integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
- dependencies:
- safer-buffer ">= 2.1.2 < 3"
-
-ieee754@^1.1.13:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
- integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
-
-ignore-by-default@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09"
- integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk=
-
-ignore-walk@^3.0.1:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
- integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
- dependencies:
- minimatch "^3.0.4"
-
-ignore@^4.0.6:
- version "4.0.6"
- resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc"
- integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
-
-import-fresh@^3.0.0:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66"
- integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==
- dependencies:
- parent-module "^1.0.0"
- resolve-from "^4.0.0"
-
-import-lazy@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
- integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=
-
-imurmurhash@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
- integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
-
-inflight@^1.0.4:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
- integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
- dependencies:
- once "^1.3.0"
- wrappy "1"
-
-inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
- integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
-
-inherits@2.0.3:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
- integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
-
-ini@^1.3.4, ini@^1.3.5, ini@~1.3.0:
- version "1.3.8"
- resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
- integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
-
-inquirer@^7.0.0:
- version "7.3.3"
- resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003"
- integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==
- dependencies:
- ansi-escapes "^4.2.1"
- chalk "^4.1.0"
- cli-cursor "^3.1.0"
- cli-width "^3.0.0"
- external-editor "^3.0.3"
- figures "^3.0.0"
- lodash "^4.17.19"
- mute-stream "0.0.8"
- run-async "^2.4.0"
- rxjs "^6.6.0"
- string-width "^4.1.0"
- strip-ansi "^6.0.0"
- through "^2.3.6"
-
-interpret@^2.0.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
- integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
-
-ipaddr.js@1.9.1:
- version "1.9.1"
- resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
- integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
-
-is-absolute@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576"
- integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==
- dependencies:
- is-relative "^1.0.0"
- is-windows "^1.0.1"
-
-is-accessor-descriptor@^0.1.6:
- version "0.1.6"
- resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6"
- integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=
- dependencies:
- kind-of "^3.0.2"
-
-is-accessor-descriptor@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656"
- integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==
- dependencies:
- kind-of "^6.0.0"
-
-is-arrayish@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
- integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
-
-is-binary-path@~2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
- integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
- dependencies:
- binary-extensions "^2.0.0"
-
-is-buffer@^1.1.5:
- version "1.1.6"
- resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
- integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
-
-is-ci@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c"
- integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==
- dependencies:
- ci-info "^2.0.0"
-
-is-data-descriptor@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
- integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=
- dependencies:
- kind-of "^3.0.2"
-
-is-data-descriptor@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7"
- integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==
- dependencies:
- kind-of "^6.0.0"
-
-is-descriptor@^0.1.0:
- version "0.1.6"
- resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca"
- integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==
- dependencies:
- is-accessor-descriptor "^0.1.6"
- is-data-descriptor "^0.1.4"
- kind-of "^5.0.0"
-
-is-descriptor@^1.0.0, is-descriptor@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec"
- integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==
- dependencies:
- is-accessor-descriptor "^1.0.0"
- is-data-descriptor "^1.0.0"
- kind-of "^6.0.2"
-
-is-extendable@^0.1.0, is-extendable@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
- integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=
-
-is-extendable@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
- integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==
- dependencies:
- is-plain-object "^2.0.4"
-
-is-extglob@^2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
- integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
-
-is-fullwidth-code-point@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
- integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs=
- dependencies:
- number-is-nan "^1.0.0"
-
-is-fullwidth-code-point@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
- integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
-
-is-fullwidth-code-point@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
- integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
-
-is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
- integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
- dependencies:
- is-extglob "^2.1.1"
-
-is-installed-globally@^0.3.1:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141"
- integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g==
- dependencies:
- global-dirs "^2.0.1"
- is-path-inside "^3.0.1"
-
-is-npm@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d"
- integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig==
-
-is-number@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
- integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=
- dependencies:
- kind-of "^3.0.2"
-
-is-number@^7.0.0:
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
- integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
-
-is-obj@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
- integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
-
-is-path-inside@^3.0.1:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
- integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
-
-is-plain-object@^2.0.3, is-plain-object@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
- integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
- dependencies:
- isobject "^3.0.1"
-
-is-relative@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d"
- integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==
- dependencies:
- is-unc-path "^1.0.0"
-
-is-stream@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
- integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
-
-is-typedarray@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
- integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
-
-is-unc-path@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d"
- integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==
- dependencies:
- unc-path-regex "^0.1.2"
-
-is-windows@^1.0.1, is-windows@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
- integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
-
-is-yarn-global@^0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232"
- integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==
-
-isarray@1.0.0, isarray@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
- integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
-
-isexe@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
- integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
-
-isobject@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89"
- integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=
- dependencies:
- isarray "1.0.0"
-
-isobject@^3.0.0, isobject@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
- integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8=
-
-js-tokens@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
- integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
-
-js-yaml@^3.13.1:
- version "3.14.0"
- resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
- integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==
- dependencies:
- argparse "^1.0.7"
- esprima "^4.0.0"
-
-json-buffer@3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
- integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
-
-json-parse-better-errors@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
- integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
-
-json-schema-ref-parser@^8.0.0:
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-8.0.0.tgz#7c758fac2cf822c05e837abd0a13f8fa2c15ffd4"
- integrity sha512-2P4icmNkZLrBr6oa5gSZaDSol/oaBHYkoP/8dsw63E54NnHGRhhiFuy9yFoxPuSm+uHKmeGxAAWMDF16SCHhcQ==
- dependencies:
- "@apidevtools/json-schema-ref-parser" "8.0.0"
-
-json-schema-traverse@^0.4.1:
- version "0.4.1"
- resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
- integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
-
-json-stable-stringify-without-jsonify@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
- integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
-
-json5@^2.1.1:
- version "2.1.3"
- resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.3.tgz#c9b0f7fa9233bfe5807fe66fcf3a5617ed597d43"
- integrity sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA==
- dependencies:
- minimist "^1.2.5"
-
-jsonwebtoken@^8.5.1:
- version "8.5.1"
- resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d"
- integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==
- dependencies:
- jws "^3.2.2"
- lodash.includes "^4.3.0"
- lodash.isboolean "^3.0.3"
- lodash.isinteger "^4.0.4"
- lodash.isnumber "^3.0.3"
- lodash.isplainobject "^4.0.6"
- lodash.isstring "^4.0.1"
- lodash.once "^4.0.0"
- ms "^2.1.1"
- semver "^5.6.0"
-
-jwa@^1.4.1:
- version "1.4.1"
- resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
- integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==
- dependencies:
- buffer-equal-constant-time "1.0.1"
- ecdsa-sig-formatter "1.0.11"
- safe-buffer "^5.0.1"
-
-jws@^3.2.2:
- version "3.2.2"
- resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
- integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==
- dependencies:
- jwa "^1.4.1"
- safe-buffer "^5.0.1"
-
-keyv@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
- integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==
- dependencies:
- json-buffer "3.0.0"
-
-kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
- version "3.2.2"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
- integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=
- dependencies:
- is-buffer "^1.1.5"
-
-kind-of@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57"
- integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc=
- dependencies:
- is-buffer "^1.1.5"
-
-kind-of@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d"
- integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==
-
-kind-of@^6.0.0, kind-of@^6.0.2:
- version "6.0.3"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
- integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
-
-knex@^0.20.13:
- version "0.20.15"
- resolved "https://registry.yarnpkg.com/knex/-/knex-0.20.15.tgz#b7e9e1efd9cf35d214440d9439ed21153574679d"
- integrity sha512-WHmvgfQfxA5v8pyb9zbskxCS1L1WmYgUbwBhHojlkmdouUOazvroUWlCr6KIKMQ8anXZh1NXOOtIUMnxENZG5Q==
- dependencies:
- colorette "1.1.0"
- commander "^4.1.1"
- debug "4.1.1"
- esm "^3.2.25"
- getopts "2.2.5"
- inherits "~2.0.4"
- interpret "^2.0.0"
- liftoff "3.1.0"
- lodash "^4.17.15"
- mkdirp "^0.5.1"
- pg-connection-string "2.1.0"
- tarn "^2.0.0"
- tildify "2.0.0"
- uuid "^7.0.1"
- v8flags "^3.1.3"
-
-latest-version@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
- integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
- dependencies:
- package-json "^6.3.0"
-
-lazystream@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4"
- integrity sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=
- dependencies:
- readable-stream "^2.0.5"
-
-levn@^0.3.0, levn@~0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
- integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=
- dependencies:
- prelude-ls "~1.1.2"
- type-check "~0.3.2"
-
-liftoff@3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/liftoff/-/liftoff-3.1.0.tgz#c9ba6081f908670607ee79062d700df062c52ed3"
- integrity sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog==
- dependencies:
- extend "^3.0.0"
- findup-sync "^3.0.0"
- fined "^1.0.1"
- flagged-respawn "^1.0.0"
- is-plain-object "^2.0.4"
- object.map "^1.0.0"
- rechoir "^0.6.2"
- resolve "^1.1.7"
-
-liquidjs@^9.11.10:
- version "9.15.0"
- resolved "https://registry.yarnpkg.com/liquidjs/-/liquidjs-9.15.0.tgz#03e8c13aeda89801a346c614b0802f320458d0ac"
- integrity sha512-wRPNfMx6X3GGEDqTlBpw7VMo8ylKkzLYTcd7eeaDeYnZyR5BqUgF9tZy3FdPCHV2N/BassGKmlmlpJiRXGFOqg==
-
-load-json-file@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
- integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs=
- dependencies:
- graceful-fs "^4.1.2"
- parse-json "^4.0.0"
- pify "^3.0.0"
- strip-bom "^3.0.0"
-
-locate-path@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
- integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=
- dependencies:
- p-locate "^2.0.0"
- path-exists "^3.0.0"
-
-locate-path@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
- integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
- dependencies:
- p-locate "^4.1.0"
-
-lodash.defaults@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
- integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=
-
-lodash.difference@^4.5.0:
- version "4.5.0"
- resolved "https://registry.yarnpkg.com/lodash.difference/-/lodash.difference-4.5.0.tgz#9ccb4e505d486b91651345772885a2df27fd017c"
- integrity sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=
-
-lodash.flatten@^4.4.0:
- version "4.4.0"
- resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
- integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=
-
-lodash.includes@^4.3.0:
- version "4.3.0"
- resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
- integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=
-
-lodash.isboolean@^3.0.3:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
- integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=
-
-lodash.isinteger@^4.0.4:
- version "4.0.4"
- resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
- integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=
-
-lodash.isnumber@^3.0.3:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
- integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=
-
-lodash.isplainobject@^4.0.6:
- version "4.0.6"
- resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
- integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=
-
-lodash.isstring@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
- integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=
-
-lodash.once@^4.0.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
- integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=
-
-lodash.union@^4.6.0:
- version "4.6.0"
- resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
- integrity sha1-SLtQiECfFvGCFmZkHETdGqrjzYg=
-
-lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21:
- version "4.17.21"
- resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
- integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
-
-lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
- integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==
-
-lowercase-keys@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
- integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
-
-make-dir@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
- integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
- dependencies:
- semver "^6.0.0"
-
-make-iterator@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6"
- integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw==
- dependencies:
- kind-of "^6.0.2"
-
-map-cache@^0.2.0, map-cache@^0.2.2:
- version "0.2.2"
- resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
- integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=
-
-map-visit@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f"
- integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=
- dependencies:
- object-visit "^1.0.0"
-
-media-typer@0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
- integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
-
-merge-descriptors@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
- integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=
-
-methods@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
- integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
-
-micromatch@^3.0.4:
- version "3.1.10"
- resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23"
- integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==
- dependencies:
- arr-diff "^4.0.0"
- array-unique "^0.3.2"
- braces "^2.3.1"
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- extglob "^2.0.4"
- fragment-cache "^0.2.1"
- kind-of "^6.0.2"
- nanomatch "^1.2.9"
- object.pick "^1.3.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.2"
-
-mime-db@1.44.0, "mime-db@>= 1.43.0 < 2":
- version "1.44.0"
- resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92"
- integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==
-
-mime-types@~2.1.24:
- version "2.1.27"
- resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f"
- integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==
- dependencies:
- mime-db "1.44.0"
-
-mime@1.6.0:
- version "1.6.0"
- resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
- integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
-
-mimic-fn@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
- integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
-
-mimic-response@^1.0.0, mimic-response@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
- integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
-
-minimatch@^3.0.4:
- version "3.0.4"
- resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
- integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
- dependencies:
- brace-expansion "^1.1.7"
-
-minimist@^1.2.0, minimist@^1.2.5:
- version "1.2.5"
- resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
- integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
-
-minipass@^2.6.0, minipass@^2.9.0:
- version "2.9.0"
- resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
- integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
- dependencies:
- safe-buffer "^5.1.2"
- yallist "^3.0.0"
-
-minizlib@^1.3.3:
- version "1.3.3"
- resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
- integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
- dependencies:
- minipass "^2.9.0"
-
-mixin-deep@^1.2.0:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566"
- integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==
- dependencies:
- for-in "^1.0.2"
- is-extendable "^1.0.1"
-
-mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.5:
- version "0.5.5"
- resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
- integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
- dependencies:
- minimist "^1.2.5"
-
-moment@^2.24.0:
- version "2.27.0"
- resolved "https://registry.yarnpkg.com/moment/-/moment-2.27.0.tgz#8bff4e3e26a236220dfe3e36de756b6ebaa0105d"
- integrity sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ==
-
-ms@2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
- integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
-
-ms@2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
- integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
-
-ms@^2.1.1:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
- integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
-
-mute-stream@0.0.8:
- version "0.0.8"
- resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
- integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
-
-mysql@^2.18.1:
- version "2.18.1"
- resolved "https://registry.yarnpkg.com/mysql/-/mysql-2.18.1.tgz#2254143855c5a8c73825e4522baf2ea021766717"
- integrity sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==
- dependencies:
- bignumber.js "9.0.0"
- readable-stream "2.3.7"
- safe-buffer "5.1.2"
- sqlstring "2.3.1"
-
-nan@^2.12.1:
- version "2.14.1"
- resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01"
- integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw==
-
-nanomatch@^1.2.9:
- version "1.2.13"
- resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
- integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==
- dependencies:
- arr-diff "^4.0.0"
- array-unique "^0.3.2"
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- fragment-cache "^0.2.1"
- is-windows "^1.0.2"
- kind-of "^6.0.2"
- object.pick "^1.3.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-natural-compare@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
- integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
-
-needle@^2.2.1, needle@^2.5.0:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/needle/-/needle-2.5.0.tgz#e6fc4b3cc6c25caed7554bd613a5cf0bac8c31c0"
- integrity sha512-o/qITSDR0JCyCKEQ1/1bnUXMmznxabbwi/Y4WwJElf+evwJNFNwIDMCCt5IigFVxgeGBJESLohGtIS9gEzo1fA==
- dependencies:
- debug "^3.2.6"
- iconv-lite "^0.4.4"
- sax "^1.2.4"
-
-negotiator@0.6.2:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
- integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==
-
-nice-try@^1.0.4:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
- integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
-
-node-addon-api@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.0.0.tgz#812446a1001a54f71663bed188314bba07e09247"
- integrity sha512-sSHCgWfJ+Lui/u+0msF3oyCgvdkhxDbkCS6Q8uiJquzOimkJBvX6hl5aSSA7DR1XbMpdM8r7phjcF63sF4rkKg==
-
-node-pre-gyp@0.15.0:
- version "0.15.0"
- resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.15.0.tgz#c2fc383276b74c7ffa842925241553e8b40f1087"
- integrity sha512-7QcZa8/fpaU/BKenjcaeFF9hLz2+7S9AqyXFhlH/rilsQ/hPZKK32RtR5EQHJElgu+q5RfbJ34KriI79UWaorA==
- dependencies:
- detect-libc "^1.0.2"
- mkdirp "^0.5.3"
- needle "^2.5.0"
- nopt "^4.0.1"
- npm-packlist "^1.1.6"
- npmlog "^4.0.2"
- rc "^1.2.7"
- rimraf "^2.6.1"
- semver "^5.3.0"
- tar "^4.4.2"
-
-node-pre-gyp@^0.11.0:
- version "0.11.0"
- resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz#db1f33215272f692cd38f03238e3e9b47c5dd054"
- integrity sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==
- dependencies:
- detect-libc "^1.0.2"
- mkdirp "^0.5.1"
- needle "^2.2.1"
- nopt "^4.0.1"
- npm-packlist "^1.1.6"
- npmlog "^4.0.2"
- rc "^1.2.7"
- rimraf "^2.6.1"
- semver "^5.3.0"
- tar "^4"
-
-node-rsa@^1.0.8:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/node-rsa/-/node-rsa-1.1.1.tgz#efd9ad382097782f506153398496f79e4464434d"
- integrity sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==
- dependencies:
- asn1 "^0.2.4"
-
-nodemon@^2.0.2:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.4.tgz#55b09319eb488d6394aa9818148c0c2d1c04c416"
- integrity sha512-Ltced+hIfTmaS28Zjv1BM552oQ3dbwPqI4+zI0SLgq+wpJhSyqgYude/aZa/3i31VCQWMfXJVxvu86abcam3uQ==
- dependencies:
- chokidar "^3.2.2"
- debug "^3.2.6"
- ignore-by-default "^1.0.1"
- minimatch "^3.0.4"
- pstree.remy "^1.1.7"
- semver "^5.7.1"
- supports-color "^5.5.0"
- touch "^3.1.0"
- undefsafe "^2.0.2"
- update-notifier "^4.0.0"
-
-nopt@^4.0.1:
- version "4.0.3"
- resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48"
- integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==
- dependencies:
- abbrev "1"
- osenv "^0.1.4"
-
-nopt@~1.0.10:
- version "1.0.10"
- resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee"
- integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=
- dependencies:
- abbrev "1"
-
-normalize-path@^3.0.0, normalize-path@~3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
- integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
-
-normalize-url@^4.1.0:
- version "4.5.1"
- resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a"
- integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==
-
-npm-bundled@^1.0.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.1.tgz#1edd570865a94cdb1bc8220775e29466c9fb234b"
- integrity sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==
- dependencies:
- npm-normalize-package-bin "^1.0.1"
-
-npm-normalize-package-bin@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2"
- integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==
-
-npm-packlist@^1.1.6:
- version "1.4.8"
- resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.8.tgz#56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e"
- integrity sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==
- dependencies:
- ignore-walk "^3.0.1"
- npm-bundled "^1.0.1"
- npm-normalize-package-bin "^1.0.1"
-
-npmlog@^4.0.2:
- version "4.1.2"
- resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
- integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==
- dependencies:
- are-we-there-yet "~1.1.2"
- console-control-strings "~1.1.0"
- gauge "~2.7.3"
- set-blocking "~2.0.0"
-
-number-is-nan@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
- integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=
-
-object-assign@^4.1.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
- integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
-
-object-copy@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c"
- integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw=
- dependencies:
- copy-descriptor "^0.1.0"
- define-property "^0.2.5"
- kind-of "^3.0.3"
-
-object-visit@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb"
- integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=
- dependencies:
- isobject "^3.0.0"
-
-object.defaults@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf"
- integrity sha1-On+GgzS0B96gbaFtiNXNKeQ1/s8=
- dependencies:
- array-each "^1.0.1"
- array-slice "^1.0.0"
- for-own "^1.0.0"
- isobject "^3.0.0"
-
-object.map@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37"
- integrity sha1-z4Plncj8wK1fQlDh94s7gb2AHTc=
- dependencies:
- for-own "^1.0.0"
- make-iterator "^1.0.0"
-
-object.pick@^1.2.0, object.pick@^1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
- integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=
- dependencies:
- isobject "^3.0.1"
-
-objection@^2.2.16:
- version "2.2.16"
- resolved "https://registry.yarnpkg.com/objection/-/objection-2.2.16.tgz#552ec6d625a7f80d6e204fc63732cbd3fc56f31c"
- integrity sha512-sq8erZdxW5ruPUK6tVvwDxyO16U49XAn/BmOm2zaNhNA2phOPCe2/7+R70nDEF1SFrgJOrwDu/PtoxybuJxnjQ==
- dependencies:
- ajv "^6.12.6"
- db-errors "^0.2.3"
-
-on-finished@~2.3.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
- integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=
- dependencies:
- ee-first "1.1.1"
-
-on-headers@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
- integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
-
-once@^1.3.0, once@^1.3.1, once@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
- integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
- dependencies:
- wrappy "1"
-
-onetime@^5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.1.tgz#5c8016847b0d67fcedb7eef254751cfcdc7e9418"
- integrity sha512-ZpZpjcJeugQfWsfyQlshVoowIIQ1qBGSVll4rfDq6JJVO//fesjoX808hXWfBjY+ROZgpKDI5TRSRBSoJiZ8eg==
- dependencies:
- mimic-fn "^2.1.0"
-
-optionator@^0.8.3:
- version "0.8.3"
- resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
- integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==
- dependencies:
- deep-is "~0.1.3"
- fast-levenshtein "~2.0.6"
- levn "~0.3.0"
- prelude-ls "~1.1.2"
- type-check "~0.3.2"
- word-wrap "~1.2.3"
-
-os-homedir@^1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
- integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M=
-
-os-tmpdir@^1.0.0, os-tmpdir@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
- integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
-
-osenv@^0.1.4:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
- integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==
- dependencies:
- os-homedir "^1.0.0"
- os-tmpdir "^1.0.0"
-
-p-cancelable@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
- integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
-
-p-limit@^1.1.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8"
- integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==
- dependencies:
- p-try "^1.0.0"
-
-p-limit@^2.2.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
- integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
- dependencies:
- p-try "^2.0.0"
-
-p-locate@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
- integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=
- dependencies:
- p-limit "^1.1.0"
-
-p-locate@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
- integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
- dependencies:
- p-limit "^2.2.0"
-
-p-try@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
- integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=
-
-p-try@^2.0.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
- integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
-
-package-json@^6.3.0:
- version "6.5.0"
- resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0"
- integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==
- dependencies:
- got "^9.6.0"
- registry-auth-token "^4.0.0"
- registry-url "^5.0.0"
- semver "^6.2.0"
-
-parent-module@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
- integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
- dependencies:
- callsites "^3.0.0"
-
-parse-filepath@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891"
- integrity sha1-pjISf1Oq89FYdvWHLz/6x2PWyJE=
- dependencies:
- is-absolute "^1.0.0"
- map-cache "^0.2.0"
- path-root "^0.1.1"
-
-parse-json@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
- integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=
- dependencies:
- error-ex "^1.3.1"
- json-parse-better-errors "^1.0.1"
-
-parse-passwd@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6"
- integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=
-
-parseurl@~1.3.3:
- version "1.3.3"
- resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
- integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
-
-pascalcase@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14"
- integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=
-
-path-exists@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
- integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
-
-path-exists@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
- integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
-
-path-is-absolute@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
- integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
-
-path-key@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
- integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
-
-path-parse@^1.0.6:
- version "1.0.7"
- resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
- integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
-
-path-root-regex@^0.1.0:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d"
- integrity sha1-v8zcjfWxLcUsi0PsONGNcsBLqW0=
-
-path-root@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7"
- integrity sha1-mkpoFMrBwM1zNgqV8yCDyOpHRbc=
- dependencies:
- path-root-regex "^0.1.0"
-
-path-to-regexp@0.1.7:
- version "0.1.7"
- resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
- integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=
-
-path@^0.12.7:
- version "0.12.7"
- resolved "https://registry.yarnpkg.com/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f"
- integrity sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=
- dependencies:
- process "^0.11.1"
- util "^0.10.3"
-
-pg-connection-string@2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.1.0.tgz#e07258f280476540b24818ebb5dca29e101ca502"
- integrity sha512-bhlV7Eq09JrRIvo1eKngpwuqKtJnNhZdpdOlvrPrA4dxqXPjxSrbNrfnIDmTpwMyRszrcV4kU5ZA4mMsQUrjdg==
-
-picomatch@^2.0.4, picomatch@^2.2.1:
- version "2.2.2"
- resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
- integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
-
-pify@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
- integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
-
-pkg-conf@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.1.0.tgz#2126514ca6f2abfebd168596df18ba57867f0058"
- integrity sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg=
- dependencies:
- find-up "^2.0.0"
- load-json-file "^4.0.0"
-
-posix-character-classes@^0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
- integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
-
-prelude-ls@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
- integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=
-
-prepend-http@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
- integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=
-
-prettier@^2.0.4:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
- integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==
-
-printj@~1.1.0:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.2.tgz#d90deb2975a8b9f600fb3a1c94e3f4c53c78a222"
- integrity sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==
-
-process-nextick-args@~2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
- integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
-
-process@^0.11.1:
- version "0.11.10"
- resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
- integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
-
-progress@^2.0.0:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
- integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
-
-proxy-addr@~2.0.5:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf"
- integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==
- dependencies:
- forwarded "~0.1.2"
- ipaddr.js "1.9.1"
-
-pstree.remy@^1.1.7:
- version "1.1.8"
- resolved "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.8.tgz#c242224f4a67c21f686839bbdb4ac282b8373d3a"
- integrity sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==
-
-pump@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
- integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
- dependencies:
- end-of-stream "^1.1.0"
- once "^1.3.1"
-
-punycode@^2.1.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
- integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
-
-pupa@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.0.1.tgz#dbdc9ff48ffbea4a26a069b6f9f7abb051008726"
- integrity sha512-hEJH0s8PXLY/cdXh66tNEQGndDrIKNqNC5xmrysZy3i5C3oEoLna7YAOad+7u125+zH1HNXUmGEkrhb3c2VriA==
- dependencies:
- escape-goat "^2.0.0"
-
-qs@6.7.0:
- version "6.7.0"
- resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc"
- integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==
-
-querystring@0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
- integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
-
-range-parser@~1.2.1:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
- integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
-
-raw-body@2.4.0:
- version "2.4.0"
- resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332"
- integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==
- dependencies:
- bytes "3.1.0"
- http-errors "1.7.2"
- iconv-lite "0.4.24"
- unpipe "1.0.0"
-
-rc@^1.2.7, rc@^1.2.8:
- version "1.2.8"
- resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
- integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
- dependencies:
- deep-extend "^0.6.0"
- ini "~1.3.0"
- minimist "^1.2.0"
- strip-json-comments "~2.0.1"
-
-readable-stream@2.3.7, readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.0.6:
- version "2.3.7"
- resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
- integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
- dependencies:
- core-util-is "~1.0.0"
- inherits "~2.0.3"
- isarray "~1.0.0"
- process-nextick-args "~2.0.0"
- safe-buffer "~5.1.1"
- string_decoder "~1.1.1"
- util-deprecate "~1.0.1"
-
-readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
- version "3.6.0"
- resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
- integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
- dependencies:
- inherits "^2.0.3"
- string_decoder "^1.1.1"
- util-deprecate "^1.0.1"
-
-readdir-glob@^1.0.0:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.1.tgz#f0e10bb7bf7bfa7e0add8baffdc54c3f7dbee6c4"
- integrity sha512-91/k1EzZwDx6HbERR+zucygRFfiPl2zkIYZtv3Jjr6Mn7SkKcVct8aVO+sSRiGMc6fLf72du3d92/uY63YPdEA==
- dependencies:
- minimatch "^3.0.4"
-
-readdirp@~3.4.0:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.4.0.tgz#9fdccdf9e9155805449221ac645e8303ab5b9ada"
- integrity sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==
- dependencies:
- picomatch "^2.2.1"
-
-rechoir@^0.6.2:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"
- integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=
- dependencies:
- resolve "^1.1.6"
-
-regex-not@^1.0.0, regex-not@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
- integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==
- dependencies:
- extend-shallow "^3.0.2"
- safe-regex "^1.1.0"
-
-regexpp@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"
- integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==
-
-registry-auth-token@^4.0.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.0.tgz#1d37dffda72bbecd0f581e4715540213a65eb7da"
- integrity sha512-P+lWzPrsgfN+UEpDS3U8AQKg/UjZX6mQSJueZj3EK+vNESoqBSpBUD3gmu4sF9lOsjXWjF11dQKUqemf3veq1w==
- dependencies:
- rc "^1.2.8"
-
-registry-url@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009"
- integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==
- dependencies:
- rc "^1.2.8"
-
-repeat-element@^1.1.2:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce"
- integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==
-
-repeat-string@^1.6.1:
- version "1.6.1"
- resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
- integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc=
-
-require-directory@^2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
- integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
-
-require-main-filename@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
- integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
-
-resolve-dir@^1.0.0, resolve-dir@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43"
- integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=
- dependencies:
- expand-tilde "^2.0.0"
- global-modules "^1.0.0"
-
-resolve-from@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
- integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
-
-resolve-url@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
- integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
-
-resolve@^1.1.6, resolve@^1.1.7:
- version "1.17.0"
- resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444"
- integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==
- dependencies:
- path-parse "^1.0.6"
-
-responselike@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
- integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=
- dependencies:
- lowercase-keys "^1.0.0"
-
-restore-cursor@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e"
- integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==
- dependencies:
- onetime "^5.1.0"
- signal-exit "^3.0.2"
-
-ret@~0.1.10:
- version "0.1.15"
- resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
- integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
-
-rimraf@2.6.3:
- version "2.6.3"
- resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab"
- integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==
- dependencies:
- glob "^7.1.3"
-
-rimraf@^2.6.1:
- version "2.7.1"
- resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
- integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
- dependencies:
- glob "^7.1.3"
-
-run-async@^2.4.0:
- version "2.4.1"
- resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455"
- integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==
-
-rxjs@^6.6.0:
- version "6.6.2"
- resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.2.tgz#8096a7ac03f2cc4fe5860ef6e572810d9e01c0d2"
- integrity sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg==
- dependencies:
- tslib "^1.9.0"
-
-safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
- version "5.1.2"
- resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
- integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
-
-safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0:
- version "5.2.1"
- resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
- integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
-
-safe-regex@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e"
- integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4=
- dependencies:
- ret "~0.1.10"
-
-"safer-buffer@>= 2.1.2 < 3", safer-buffer@~2.1.0:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
- integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
-
-sax@^1.2.4:
- version "1.2.4"
- resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
- integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
-
-semver-diff@^3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
- integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
- dependencies:
- semver "^6.3.0"
-
-semver@^5.3.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
- version "5.7.1"
- resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
- integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
-
-semver@^6.0.0, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0:
- version "6.3.0"
- resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
- integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
-
-send@0.17.1:
- version "0.17.1"
- resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8"
- integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==
- dependencies:
- debug "2.6.9"
- depd "~1.1.2"
- destroy "~1.0.4"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- etag "~1.8.1"
- fresh "0.5.2"
- http-errors "~1.7.2"
- mime "1.6.0"
- ms "2.1.1"
- on-finished "~2.3.0"
- range-parser "~1.2.1"
- statuses "~1.5.0"
-
-serve-static@1.14.1:
- version "1.14.1"
- resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9"
- integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==
- dependencies:
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- parseurl "~1.3.3"
- send "0.17.1"
-
-set-blocking@^2.0.0, set-blocking@~2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
- integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
-
-set-value@^2.0.0, set-value@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b"
- integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==
- dependencies:
- extend-shallow "^2.0.1"
- is-extendable "^0.1.1"
- is-plain-object "^2.0.3"
- split-string "^3.0.1"
-
-setprototypeof@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683"
- integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==
-
-shebang-command@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
- integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
- dependencies:
- shebang-regex "^1.0.0"
-
-shebang-regex@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
- integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
-
-signal-exit@^3.0.0, signal-exit@^3.0.2:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
- integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
-
-signale@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/signale/-/signale-1.4.0.tgz#c4be58302fb0262ac00fc3d886a7c113759042f1"
- integrity sha512-iuh+gPf28RkltuJC7W5MRi6XAjTDCAPC/prJUpQoG4vIP3MJZ+GTydVnodXA7pwvTKb2cA0m9OFZW/cdWy/I/w==
- dependencies:
- chalk "^2.3.2"
- figures "^2.0.0"
- pkg-conf "^2.1.0"
-
-slice-ansi@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636"
- integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==
- dependencies:
- ansi-styles "^3.2.0"
- astral-regex "^1.0.0"
- is-fullwidth-code-point "^2.0.0"
-
-snapdragon-node@^2.0.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
- integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==
- dependencies:
- define-property "^1.0.0"
- isobject "^3.0.0"
- snapdragon-util "^3.0.1"
-
-snapdragon-util@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2"
- integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==
- dependencies:
- kind-of "^3.2.0"
-
-snapdragon@^0.8.1:
- version "0.8.2"
- resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d"
- integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==
- dependencies:
- base "^0.11.1"
- debug "^2.2.0"
- define-property "^0.2.5"
- extend-shallow "^2.0.1"
- map-cache "^0.2.2"
- source-map "^0.5.6"
- source-map-resolve "^0.5.0"
- use "^3.1.0"
-
-source-map-resolve@^0.5.0:
- version "0.5.3"
- resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a"
- integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==
- dependencies:
- atob "^2.1.2"
- decode-uri-component "^0.2.0"
- resolve-url "^0.2.1"
- source-map-url "^0.4.0"
- urix "^0.1.0"
-
-source-map-url@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
- integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=
-
-source-map@^0.5.6:
- version "0.5.7"
- resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
- integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
-
-split-string@^3.0.1, split-string@^3.0.2:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2"
- integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==
- dependencies:
- extend-shallow "^3.0.0"
-
-sprintf-js@~1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
- integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
-
-sqlite3@^4.1.1:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-4.2.0.tgz#49026d665e9fc4f922e56fb9711ba5b4c85c4901"
- integrity sha512-roEOz41hxui2Q7uYnWsjMOTry6TcNUNmp8audCx18gF10P2NknwdpF+E+HKvz/F2NvPKGGBF4NGc+ZPQ+AABwg==
- dependencies:
- nan "^2.12.1"
- node-pre-gyp "^0.11.0"
-
-sqlstring@2.3.1:
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.1.tgz#475393ff9e91479aea62dcaf0ca3d14983a7fb40"
- integrity sha1-R1OT/56RR5rqYtyvDKPRSYOn+0A=
-
-static-extend@^0.1.1:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6"
- integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=
- dependencies:
- define-property "^0.2.5"
- object-copy "^0.1.0"
-
-"statuses@>= 1.5.0 < 2", statuses@~1.5.0:
- version "1.5.0"
- resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
- integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=
-
-streamsearch@0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a"
- integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=
-
-string-width@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
- integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=
- dependencies:
- code-point-at "^1.0.0"
- is-fullwidth-code-point "^1.0.0"
- strip-ansi "^3.0.0"
-
-"string-width@^1.0.2 || 2":
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
- integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
- dependencies:
- is-fullwidth-code-point "^2.0.0"
- strip-ansi "^4.0.0"
-
-string-width@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
- integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
- dependencies:
- emoji-regex "^7.0.1"
- is-fullwidth-code-point "^2.0.0"
- strip-ansi "^5.1.0"
-
-string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
- integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
- dependencies:
- emoji-regex "^8.0.0"
- is-fullwidth-code-point "^3.0.0"
- strip-ansi "^6.0.0"
-
-string_decoder@^1.1.1:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
- integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
- dependencies:
- safe-buffer "~5.2.0"
-
-string_decoder@~1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
- integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
- dependencies:
- safe-buffer "~5.1.0"
-
-strip-ansi@^3.0.0, strip-ansi@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
- integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
- dependencies:
- ansi-regex "^2.0.0"
-
-strip-ansi@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
- integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
- dependencies:
- ansi-regex "^3.0.0"
-
-strip-ansi@^5.1.0, strip-ansi@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
- integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
- dependencies:
- ansi-regex "^4.1.0"
-
-strip-ansi@^6.0.0:
- version "6.0.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532"
- integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==
- dependencies:
- ansi-regex "^5.0.0"
-
-strip-bom@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
- integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
-
-strip-json-comments@^3.0.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
- integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
-
-strip-json-comments@~2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
- integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
-
-supports-color@^5.3.0, supports-color@^5.5.0:
- version "5.5.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
- integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
- dependencies:
- has-flag "^3.0.0"
-
-supports-color@^7.1.0:
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
- integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
- dependencies:
- has-flag "^4.0.0"
-
-table@^5.2.3:
- version "5.4.6"
- resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e"
- integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==
- dependencies:
- ajv "^6.10.2"
- lodash "^4.17.14"
- slice-ansi "^2.1.0"
- string-width "^3.0.0"
-
-tar-stream@^2.2.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
- integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
- dependencies:
- bl "^4.0.3"
- end-of-stream "^1.4.1"
- fs-constants "^1.0.0"
- inherits "^2.0.3"
- readable-stream "^3.1.1"
-
-tar@^4, tar@^4.4.2:
- version "4.4.19"
- resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3"
- integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==
- dependencies:
- chownr "^1.1.4"
- fs-minipass "^1.2.7"
- minipass "^2.9.0"
- minizlib "^1.3.3"
- mkdirp "^0.5.5"
- safe-buffer "^5.2.1"
- yallist "^3.1.1"
-
-tarn@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/tarn/-/tarn-2.0.0.tgz#c68499f69881f99ae955b4317ca7d212d942fdee"
- integrity sha512-7rNMCZd3s9bhQh47ksAQd92ADFcJUjjbyOvyFjNLwTPpGieFHMC84S+LOzw0fx1uh6hnDz/19r8CPMnIjJlMMA==
-
-temp-dir@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d"
- integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=
-
-temp-write@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/temp-write/-/temp-write-4.0.0.tgz#cd2e0825fc826ae72d201dc26eef3bf7e6fc9320"
- integrity sha512-HIeWmj77uOOHb0QX7siN3OtwV3CTntquin6TNVg6SHOqCP3hYKmox90eeFOGaY1MqJ9WYDDjkyZrW6qS5AWpbw==
- dependencies:
- graceful-fs "^4.1.15"
- is-stream "^2.0.0"
- make-dir "^3.0.0"
- temp-dir "^1.0.0"
- uuid "^3.3.2"
-
-term-size@^2.1.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.0.tgz#1f16adedfe9bdc18800e1776821734086fcc6753"
- integrity sha512-a6sumDlzyHVJWb8+YofY4TW112G6p2FCPEAFk+59gIYHv3XHRhm9ltVQ9kli4hNWeQBwSpe8cRN25x0ROunMOw==
-
-text-table@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
- integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
-
-through@^2.3.6:
- version "2.3.8"
- resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
- integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=
-
-tildify@2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a"
- integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==
-
-tmp@^0.0.33:
- version "0.0.33"
- resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
- integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==
- dependencies:
- os-tmpdir "~1.0.2"
-
-to-object-path@^0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af"
- integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=
- dependencies:
- kind-of "^3.0.2"
-
-to-readable-stream@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771"
- integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==
-
-to-regex-range@^2.1.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38"
- integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=
- dependencies:
- is-number "^3.0.0"
- repeat-string "^1.6.1"
-
-to-regex-range@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
- integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
- dependencies:
- is-number "^7.0.0"
-
-to-regex@^3.0.1, to-regex@^3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce"
- integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==
- dependencies:
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- regex-not "^1.0.2"
- safe-regex "^1.1.0"
-
-toidentifier@1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553"
- integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
-
-touch@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
- integrity sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==
- dependencies:
- nopt "~1.0.10"
-
-tslib@^1.9.0:
- version "1.13.0"
- resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043"
- integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==
-
-type-check@~0.3.2:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
- integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=
- dependencies:
- prelude-ls "~1.1.2"
-
-type-fest@^0.11.0:
- version "0.11.0"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1"
- integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==
-
-type-fest@^0.8.1:
- version "0.8.1"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
- integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
-
-type-is@~1.6.17, type-is@~1.6.18:
- version "1.6.18"
- resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
- integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
- dependencies:
- media-typer "0.3.0"
- mime-types "~2.1.24"
-
-typedarray-to-buffer@^3.1.5:
- version "3.1.5"
- resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
- integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
- dependencies:
- is-typedarray "^1.0.0"
-
-unc-path-regex@^0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa"
- integrity sha1-5z3T17DXxe2G+6xrCufYxqadUPo=
-
-undefsafe@^2.0.2:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.3.tgz#6b166e7094ad46313b2202da7ecc2cd7cc6e7aae"
- integrity sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A==
- dependencies:
- debug "^2.2.0"
-
-union-value@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847"
- integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==
- dependencies:
- arr-union "^3.1.0"
- get-value "^2.0.6"
- is-extendable "^0.1.1"
- set-value "^2.0.1"
-
-unique-string@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
- integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
- dependencies:
- crypto-random-string "^2.0.0"
-
-unpipe@1.0.0, unpipe@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
- integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
-
-unset-value@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559"
- integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=
- dependencies:
- has-value "^0.3.1"
- isobject "^3.0.0"
-
-update-notifier@^4.0.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-4.1.0.tgz#4866b98c3bc5b5473c020b1250583628f9a328f3"
- integrity sha512-w3doE1qtI0/ZmgeoDoARmI5fjDoT93IfKgEGqm26dGUOh8oNpaSTsGNdYRN/SjOuo10jcJGwkEL3mroKzktkew==
- dependencies:
- boxen "^4.2.0"
- chalk "^3.0.0"
- configstore "^5.0.1"
- has-yarn "^2.1.0"
- import-lazy "^2.1.0"
- is-ci "^2.0.0"
- is-installed-globally "^0.3.1"
- is-npm "^4.0.0"
- is-yarn-global "^0.3.0"
- latest-version "^5.0.0"
- pupa "^2.0.1"
- semver-diff "^3.1.1"
- xdg-basedir "^4.0.0"
-
-uri-js@^4.2.2:
- version "4.2.2"
- resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0"
- integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==
- dependencies:
- punycode "^2.1.0"
-
-urix@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72"
- integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=
-
-url-parse-lax@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c"
- integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=
- dependencies:
- prepend-http "^2.0.0"
-
-use@^3.1.0:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
- integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==
-
-util-deprecate@^1.0.1, util-deprecate@~1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
- integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
-
-util@^0.10.3:
- version "0.10.4"
- resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901"
- integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==
- dependencies:
- inherits "2.0.3"
-
-utils-merge@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
- integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
-
-uuid@^3.3.2:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
- integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
-
-uuid@^7.0.1:
- version "7.0.3"
- resolved "https://registry.yarnpkg.com/uuid/-/uuid-7.0.3.tgz#c5c9f2c8cf25dc0a372c4df1441c41f5bd0c680b"
- integrity sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==
-
-v8-compile-cache@^2.0.3:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745"
- integrity sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ==
-
-v8flags@^3.1.3:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656"
- integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==
- dependencies:
- homedir-polyfill "^1.0.1"
-
-vary@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
- integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
-
-which-module@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
- integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
-
-which@^1.2.14, which@^1.2.9:
- version "1.3.1"
- resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
- integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
- dependencies:
- isexe "^2.0.0"
-
-wide-align@^1.1.0:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
- integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
- dependencies:
- string-width "^1.0.2 || 2"
-
-widest-line@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca"
- integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==
- dependencies:
- string-width "^4.0.0"
-
-word-wrap@~1.2.3:
- version "1.2.3"
- resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
- integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
-
-wrap-ansi@^6.2.0:
- version "6.2.0"
- resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
- integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
- dependencies:
- ansi-styles "^4.0.0"
- string-width "^4.1.0"
- strip-ansi "^6.0.0"
-
-wrappy@1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
- integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
-
-write-file-atomic@^3.0.0:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
- integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
- dependencies:
- imurmurhash "^0.1.4"
- is-typedarray "^1.0.0"
- signal-exit "^3.0.2"
- typedarray-to-buffer "^3.1.5"
-
-write@1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3"
- integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==
- dependencies:
- mkdirp "^0.5.1"
-
-xdg-basedir@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
- integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
-
-y18n@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
- integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
-
-yallist@^3.0.0, yallist@^3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
- integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
-
-yargs-parser@^18.1.2:
- version "18.1.3"
- resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
- integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
- dependencies:
- camelcase "^5.0.0"
- decamelize "^1.2.0"
-
-yargs@^15.4.1:
- version "15.4.1"
- resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
- integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
- dependencies:
- cliui "^6.0.0"
- decamelize "^1.2.0"
- find-up "^4.1.0"
- get-caller-file "^2.0.1"
- require-directory "^2.1.1"
- require-main-filename "^2.0.0"
- set-blocking "^2.0.0"
- string-width "^4.2.0"
- which-module "^2.0.0"
- y18n "^4.0.0"
- yargs-parser "^18.1.2"
-
-zip-stream@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.0.tgz#51dd326571544e36aa3f756430b313576dc8fc79"
- integrity sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==
- dependencies:
- archiver-utils "^2.1.0"
- compress-commons "^4.1.0"
- readable-stream "^3.6.0"
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 378fffbf..3a0c7b4f 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,63 +1,107 @@
# This is a Dockerfile intended to be built using `docker buildx`
# for multi-arch support. Building with `docker build` may have unexpected results.
-# This file assumes that the frontend has been built using ./scripts/frontend-build
+# This file assumes that these scripts have been run first:
+# - ./scripts/ci/build-frontend
-FROM nginxproxymanager/nginx-full:certbot-node
+FROM nginxproxymanager/testca as testca
+FROM letsencrypt/pebble as pebbleca
+FROM jc21/gotools:latest AS gobuild
-ARG TARGETPLATFORM
+SHELL ["/bin/bash", "-o", "pipefail", "-c"]
+
+ARG BUILD_COMMIT
+ARG BUILD_VERSION
+ARG GOPRIVATE
+ARG GOPROXY
+ARG SENTRY_DSN
+
+ENV BUILD_COMMIT="${BUILD_COMMIT:-dev}" \
+ BUILD_VERSION="${BUILD_VERSION:-0.0.0}" \
+ CGO_ENABLED=1 \
+ GO111MODULE=on \
+ GOPRIVATE="${GOPRIVATE:-}" \
+ GOPROXY="${GOPROXY:-}" \
+ SENTRY_DSN="${SENTRY_DSN:-}"
+
+COPY scripts /scripts
+COPY backend /app
+WORKDIR /app
+
+RUN mkdir -p /dist \
+ && /scripts/go-multiarch-wrapper /dist/server
+
+#===============
+# Final image
+#===============
+
+FROM nginxproxymanager/nginx-full:acmesh AS final
+
+COPY --from=gobuild /dist/server /app/bin/server
+# these certs are used for testing in CI
+COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
+COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
+
+# These acmesh vars are defined in the base image
+ENV SUPPRESS_NO_CONFIG_WARNING=1 \
+ S6_FIX_ATTRS_HIDDEN=1 \
+ ACMESH_CONFIG_HOME=/data/.acme.sh/config \
+ ACMESH_HOME=/data/.acme.sh \
+ CERT_HOME=/data/.acme.sh/certs \
+ LE_CONFIG_HOME=/data/.acme.sh/config \
+ LE_WORKING_DIR=/data/.acme.sh
+
+RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
+
+# s6 overlay
+COPY scripts/install-s6 /tmp/install-s6
+RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -rf /tmp/*
+
+EXPOSE 80/tcp 81/tcp 443/tcp
+
+COPY docker/rootfs /
+
+# Remove frontend service not required for prod, dev nginx config as well
+# and remove any other cruft
+RUN rm -rf /etc/services.d/frontend \
+ /etc/nginx/conf.d/dev.conf \
+ /var/cache/* \
+ /var/log/* \
+ /tmp/* \
+ /var/lib/dpkg/status-old
+
+# Dummy cert
+RUN openssl req \
+ -new \
+ -newkey rsa:2048 \
+ -days 3650 \
+ -nodes \
+ -x509 \
+ -subj '/O=Nginx Proxy Manager/OU=Dummy Certificate/CN=localhost' \
+ -keyout /etc/ssl/certs/dummykey.pem \
+ -out /etc/ssl/certs/dummycert.pem \
+ && chmod +r /etc/ssl/certs/dummykey.pem /etc/ssl/certs/dummycert.pem
+
+VOLUME /data
+
+CMD [ "/init" ]
+
+ARG NOW
ARG BUILD_VERSION
ARG BUILD_COMMIT
ARG BUILD_DATE
-ENV SUPPRESS_NO_CONFIG_WARNING=1 \
- S6_FIX_ATTRS_HIDDEN=1 \
- S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
- NODE_ENV=production \
- NPM_BUILD_VERSION="${BUILD_VERSION}" \
- NPM_BUILD_COMMIT="${BUILD_COMMIT}" \
- NPM_BUILD_DATE="${BUILD_DATE}"
-
-RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
- && apt-get update \
- && apt-get install -y --no-install-recommends jq logrotate \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/*
-
-# s6 overlay
-COPY scripts/install-s6 /tmp/install-s6
-RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
-
-EXPOSE 80 81 443
-
-COPY backend /app
-COPY frontend/dist /app/frontend
-COPY global /app/global
-
-WORKDIR /app
-RUN yarn install
-
-# add late to limit cache-busting by modifications
-COPY docker/rootfs /
-
-# Remove frontend service not required for prod, dev nginx config as well
-RUN rm -rf /etc/services.d/frontend /etc/nginx/conf.d/dev.conf
-
-# Change permission of logrotate config file
-RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
-
-# fix for pip installs
-# https://github.com/NginxProxyManager/nginx-proxy-manager/issues/1769
-RUN pip uninstall --yes setuptools \
- && pip install "setuptools==58.0.0"
-
-VOLUME [ "/data", "/etc/letsencrypt" ]
-ENTRYPOINT [ "/init" ]
+ENV NPM_BUILD_VERSION="${BUILD_VERSION:-0.0.0}" \
+ NPM_BUILD_COMMIT="${BUILD_COMMIT:-dev}" \
+ NPM_BUILD_DATE="${BUILD_DATE:-}"
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.license="MIT" \
org.label-schema.name="nginx-proxy-manager" \
- org.label-schema.description="Docker container for managing Nginx proxy hosts with a simple, powerful interface " \
- org.label-schema.url="https://github.com/jc21/nginx-proxy-manager" \
- org.label-schema.vcs-url="https://github.com/jc21/nginx-proxy-manager.git" \
- org.label-schema.cmd="docker run --rm -ti jc21/nginx-proxy-manager:latest"
+ org.label-schema.description="Nginx Host Management and Proxy" \
+ org.label-schema.build-date="${NOW:-}" \
+ org.label-schema.version="${BUILD_VERSION:-0.0.0}" \
+ org.label-schema.url="https://nginxproxymanager.com" \
+ org.label-schema.vcs-url="https://github.com/NginxProxyManager/nginx-proxy-manager.git" \
+ org.label-schema.vcs-ref="${BUILD_COMMIT:-dev}" \
+ org.label-schema.cmd="docker run --rm -ti jc21/nginx-proxy-manager:${BUILD_VERSION:-0.0.0}"
diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile
index d2e2266a..4b0fe429 100644
--- a/docker/dev/Dockerfile
+++ b/docker/dev/Dockerfile
@@ -1,15 +1,34 @@
-FROM nginxproxymanager/nginx-full:certbot-node
+FROM nginxproxymanager/testca as testca
+FROM letsencrypt/pebble as pebbleca
+FROM nginxproxymanager/nginx-full:acmesh-golang
LABEL maintainer="Jamie Curnow "
-ENV S6_LOGGING=0 \
- SUPPRESS_NO_CONFIG_WARNING=1 \
- S6_FIX_ATTRS_HIDDEN=1
+SHELL ["/bin/bash", "-o", "pipefail", "-c"]
-RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
+ARG GOPROXY
+ARG GOPRIVATE
+
+ENV GOPROXY=$GOPROXY \
+ GOPRIVATE=$GOPRIVATE \
+ S6_LOGGING=0 \
+ SUPPRESS_NO_CONFIG_WARNING=1 \
+ S6_FIX_ATTRS_HIDDEN=1 \
+ ACMESH_CONFIG_HOME=/data/.acme.sh/config \
+ ACMESH_HOME=/data/.acme.sh \
+ CERT_HOME=/data/.acme.sh/certs \
+ LE_CONFIG_HOME=/data/.acme.sh/config \
+ LE_WORKING_DIR=/data/.acme.sh
+
+RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
+
+# usql and node
+RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash - \
&& apt-get update \
- && apt-get install -y certbot jq python3-pip logrotate \
+ && apt-get install -y --no-install-recommends nodejs vim dnsutils \
+ && npm install -g yarn \
&& apt-get clean \
- && rm -rf /var/lib/apt/lists/*
+ && rm -rf /var/lib/apt/lists/* \
+ && go install github.com/xo/usql@master
# Task
RUN cd /usr \
@@ -18,12 +37,29 @@ RUN cd /usr \
COPY rootfs /
RUN rm -f /etc/nginx/conf.d/production.conf
-RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
# s6 overlay
RUN curl -L -o /tmp/s6-overlay-amd64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz" \
&& tar -xzf /tmp/s6-overlay-amd64.tar.gz -C /
-EXPOSE 80 81 443
-ENTRYPOINT [ "/init" ]
+# Fix for golang dev:
+RUN chown -R 1000:1000 /opt/go
+COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
+COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
+
+# Dummy cert
+RUN openssl req \
+ -new \
+ -newkey rsa:2048 \
+ -days 3650 \
+ -nodes \
+ -x509 \
+ -subj '/O=Nginx Proxy Manager/OU=Dummy Certificate/CN=localhost' \
+ -keyout /etc/ssl/certs/dummykey.pem \
+ -out /etc/ssl/certs/dummycert.pem \
+ && chmod +r /etc/ssl/certs/dummykey.pem /etc/ssl/certs/dummycert.pem
+
+EXPOSE 80
+CMD [ "/init" ]
+HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://127.0.0.1:81/api || exit 1
diff --git a/docker/dev/dnsrouter-config.json b/docker/dev/dnsrouter-config.json
new file mode 100644
index 00000000..9560d7b8
--- /dev/null
+++ b/docker/dev/dnsrouter-config.json
@@ -0,0 +1,28 @@
+{
+ "log": {
+ "format": "nice",
+ "level": "debug"
+ },
+ "servers": [
+ {
+ "host": "0.0.0.0",
+ "port": 53,
+ "upstreams": [
+ {
+ "regex": "website[0-9]+.example\\.com",
+ "upstream": "127.0.0.11"
+ },
+ {
+ "regex": ".*\\.example\\.com",
+ "upstream": "1.1.1.1"
+ },
+ {
+ "regex": "local",
+ "nxdomain": true
+ }
+ ],
+ "internal": null,
+ "default_upstream": "127.0.0.11"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docker/dev/pdns-db.sql b/docker/dev/pdns-db.sql
new file mode 100644
index 00000000..dd7f293a
--- /dev/null
+++ b/docker/dev/pdns-db.sql
@@ -0,0 +1,87 @@
+CREATE TABLE `comments` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `name` varchar(255) NOT NULL,
+ `type` varchar(10) NOT NULL,
+ `modified_at` int(11) NOT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+ `comment` text CHARACTER SET utf8mb3 NOT NULL,
+ PRIMARY KEY (`id`),
+ KEY `comments_name_type_idx` (`name`,`type`),
+ KEY `comments_order_idx` (`domain_id`,`modified_at`)
+);
+
+CREATE TABLE `cryptokeys` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `flags` int(11) NOT NULL,
+ `active` tinyint(1) DEFAULT NULL,
+ `published` tinyint(1) DEFAULT 1,
+ `content` text DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `domainidindex` (`domain_id`)
+);
+
+CREATE TABLE `domainmetadata` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `kind` varchar(32) DEFAULT NULL,
+ `content` text DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `domainmetadata_idx` (`domain_id`,`kind`)
+);
+
+INSERT INTO `domainmetadata` VALUES (1,1,'SOA-EDIT-API','DEFAULT');
+
+CREATE TABLE `domains` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) NOT NULL,
+ `master` varchar(128) DEFAULT NULL,
+ `last_check` int(11) DEFAULT NULL,
+ `type` varchar(6) NOT NULL,
+ `notified_serial` int(10) unsigned DEFAULT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name_index` (`name`)
+);
+
+INSERT INTO `domains` VALUES (1,'example.com','',NULL,'NATIVE',NULL,'');
+
+CREATE TABLE `records` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) DEFAULT NULL,
+ `name` varchar(255) DEFAULT NULL,
+ `type` varchar(10) DEFAULT NULL,
+ `content` TEXT DEFAULT NULL,
+ `ttl` int(11) DEFAULT NULL,
+ `prio` int(11) DEFAULT NULL,
+ `disabled` tinyint(1) DEFAULT 0,
+ `ordername` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+ `auth` tinyint(1) DEFAULT 1,
+ PRIMARY KEY (`id`),
+ KEY `nametype_index` (`name`,`type`),
+ KEY `domain_id` (`domain_id`),
+ KEY `ordername` (`ordername`)
+);
+
+INSERT INTO `records` VALUES
+(1,1,'example.com','NS','ns1.pdns',1500,0,0,NULL,1),
+(2,1,'example.com','NS','ns2.pdns',1500,0,0,NULL,1),
+(4,1,'test.example.com','A','10.0.0.1',60,0,0,NULL,1),
+(5,1,'example.com','SOA','a.misconfigured.dns.server.invalid hostmaster.example.com 2022020702 10800 3600 604800 3600',1500,0,0,NULL,1);
+
+CREATE TABLE `supermasters` (
+ `ip` varchar(64) NOT NULL,
+ `nameserver` varchar(255) NOT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 NOT NULL,
+ PRIMARY KEY (`ip`,`nameserver`)
+);
+
+CREATE TABLE `tsigkeys` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `algorithm` varchar(50) DEFAULT NULL,
+ `secret` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `namealgoindex` (`name`,`algorithm`)
+);
diff --git a/docker/dev/pebble-config.json b/docker/dev/pebble-config.json
new file mode 100644
index 00000000..289d2906
--- /dev/null
+++ b/docker/dev/pebble-config.json
@@ -0,0 +1,12 @@
+{
+ "pebble": {
+ "listenAddress": "0.0.0.0:443",
+ "managementListenAddress": "0.0.0.0:15000",
+ "certificate": "test/certs/localhost/cert.pem",
+ "privateKey": "test/certs/localhost/key.pem",
+ "httpPort": 80,
+ "tlsPort": 443,
+ "ocspResponderURL": "",
+ "externalAccountBindingRequired": false
+ }
+}
\ No newline at end of file
diff --git a/docker/docker-compose.ci.yml b/docker/docker-compose.ci.yml
index a8049ec8..90164d2b 100644
--- a/docker/docker-compose.ci.yml
+++ b/docker/docker-compose.ci.yml
@@ -1,80 +1,91 @@
# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
-version: "3"
+version: "3.8"
services:
- fullstack-mysql:
- image: ${IMAGE}:ci-${BUILD_NUMBER}
+ fullstack:
+ image: ${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}
environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DB_MYSQL_HOST: "db"
- DB_MYSQL_PORT: 3306
- DB_MYSQL_USER: "npm"
- DB_MYSQL_PASSWORD: "npm"
- DB_MYSQL_NAME: "npm"
+ - NPM_LOG_LEVEL=debug
volumes:
- - npm_data:/data
- expose:
- - 81
- - 80
- - 443
+ - '/etc/localtime:/etc/localtime:ro'
+ - npm_data_ci:/data
+ - ../docs:/temp-docs
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ networks:
+ default:
+ aliases:
+ - website1.example.com
+ - website2.example.com
+ - website3.example.com
+
+ stepca:
+ image: nginxproxymanager/testca
+ volumes:
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ - '/etc/localtime:/etc/localtime:ro'
+ networks:
+ default:
+ aliases:
+ - ca.internal
+
+ pdns:
+ image: pschiffe/pdns-mysql
+ volumes:
+ - '/etc/localtime:/etc/localtime:ro'
+ environment:
+ PDNS_master: 'yes'
+ PDNS_api: 'yes'
+ PDNS_api_key: 'npm'
+ PDNS_webserver: 'yes'
+ PDNS_webserver_address: '0.0.0.0'
+ PDNS_webserver_password: 'npm'
+ PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_version_string: 'anonymous'
+ PDNS_default_ttl: 1500
+ PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_gmysql_host: pdns-db
+ PDNS_gmysql_port: 3306
+ PDNS_gmysql_user: pdns
+ PDNS_gmysql_password: pdns
+ PDNS_gmysql_dbname: pdns
depends_on:
- - db
- healthcheck:
- test: ["CMD", "/bin/check-health"]
- interval: 10s
- timeout: 3s
+ - pdns-db
+ networks:
+ default:
+ aliases:
+ - ns1.pdns
+ - ns2.pdns
- fullstack-sqlite:
- image: ${IMAGE}:ci-${BUILD_NUMBER}
+ pdns-db:
+ image: mariadb
environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DB_SQLITE_FILE: "/data/database.sqlite"
+ MYSQL_ROOT_PASSWORD: 'pdns'
+ MYSQL_DATABASE: 'pdns'
+ MYSQL_USER: 'pdns'
+ MYSQL_PASSWORD: 'pdns'
volumes:
- - npm_data:/data
- expose:
- - 81
- - 80
- - 443
- healthcheck:
- test: ["CMD", "/bin/check-health"]
- interval: 10s
- timeout: 3s
+ - pdns_mysql_vol:/var/lib/mysql
+ - '/etc/localtime:/etc/localtime:ro'
+ - ./dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro
- db:
- image: jc21/mariadb-aria
- environment:
- MYSQL_ROOT_PASSWORD: "npm"
- MYSQL_DATABASE: "npm"
- MYSQL_USER: "npm"
- MYSQL_PASSWORD: "npm"
+ dnsrouter:
+ image: jc21/dnsrouter
volumes:
- - db_data:/var/lib/mysql
+ - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
- cypress-mysql:
+ cypress:
image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
build:
- context: ../test/
- dockerfile: cypress/Dockerfile
+ context: ../
+ dockerfile: test/cypress/Dockerfile
environment:
- CYPRESS_baseUrl: "http://fullstack-mysql:81"
- volumes:
- - cypress-logs:/results
- command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
-
- cypress-sqlite:
- image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
- build:
- context: ../test/
- dockerfile: cypress/Dockerfile
- environment:
- CYPRESS_baseUrl: "http://fullstack-sqlite:81"
+ CYPRESS_baseUrl: "http://fullstack:81"
volumes:
- cypress-logs:/results
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
volumes:
cypress-logs:
- npm_data:
- db_data:
+ npm_data_ci:
+ pdns_mysql_vol:
diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml
index 79fbd799..44fe4232 100644
--- a/docker/docker-compose.dev.yml
+++ b/docker/docker-compose.dev.yml
@@ -1,9 +1,9 @@
-# WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
-version: "3.5"
+# WARNING: This is a DEVELOPMENT docker-compose file used for development of the entire app, it should not be used for production.
+version: "3"
services:
+
npm:
image: nginxproxymanager:dev
- container_name: npm_core
build:
context: ./
dockerfile: ./dev/Dockerfile
@@ -11,52 +11,105 @@ services:
- 3080:80
- 3081:81
- 3443:443
- networks:
- - nginx_proxy_manager
environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DEVELOPMENT: "true"
- DB_MYSQL_HOST: "db"
- DB_MYSQL_PORT: 3306
- DB_MYSQL_USER: "npm"
- DB_MYSQL_PASSWORD: "npm"
- DB_MYSQL_NAME: "npm"
- # DB_SQLITE_FILE: "/data/database.sqlite"
- # DISABLE_IPV6: "true"
+ DEVELOPMENT: 'true'
+ GOPROXY: "${GOPROXY:-}"
+ GOPRIVATE: "${GOPRIVATE:-}"
+ YARN_REGISTRY: "${DAB_YARN_REGISTRY:-}"
+ NPM_LOG_LEVEL: 'debug'
+ PUID: 1000
+ PGID: 1000
volumes:
- - npm_data:/data
- - le_data:/etc/letsencrypt
- - ../backend:/app
- - ../frontend:/app/frontend
- - ../global:/app/global
- depends_on:
- - db
+ - /etc/localtime:/etc/localtime:ro
+ - ../:/app
+ - ./rootfs/var/www/html:/var/www/html
+ - ../data:/data
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
working_dir: /app
-
- db:
- image: jc21/mariadb-aria
- container_name: npm_db
- ports:
- - 33306:3306
networks:
- - nginx_proxy_manager
+ default:
+ aliases:
+ - website1.internal
+ - website2.internal
+ - website3.internal
+
+ pebble:
+ image: letsencrypt/pebble
+ command: pebble -config /test/config/pebble-config.json
environment:
- MYSQL_ROOT_PASSWORD: "npm"
- MYSQL_DATABASE: "npm"
- MYSQL_USER: "npm"
- MYSQL_PASSWORD: "npm"
+ PEBBLE_VA_SLEEPTIME: 2
volumes:
- - db_data:/var/lib/mysql
+ - ./dev/pebble-config.json:/test/config/pebble-config.json
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ networks:
+ default:
+ aliases:
+ # required for https cert dns san
+ - pebble
+
+ stepca:
+ image: nginxproxymanager/testca
+ volumes:
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ networks:
+ default:
+ aliases:
+ - ca.internal
+
+ pdns:
+ image: pschiffe/pdns-mysql
+ volumes:
+ - '/etc/localtime:/etc/localtime:ro'
+ environment:
+ PDNS_master: 'yes'
+ PDNS_api: 'yes'
+ PDNS_api_key: 'npm'
+ PDNS_webserver: 'yes'
+ PDNS_webserver_address: '0.0.0.0'
+ PDNS_webserver_password: 'npm'
+ PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_version_string: 'anonymous'
+ PDNS_default_ttl: 1500
+ PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_gmysql_host: pdns-db
+ PDNS_gmysql_port: 3306
+ PDNS_gmysql_user: pdns
+ PDNS_gmysql_password: pdns
+ PDNS_gmysql_dbname: pdns
+ depends_on:
+ - pdns-db
+ networks:
+ default:
+ aliases:
+ - ns1.pdns
+ - ns2.pdns
+
+ pdns-db:
+ image: mariadb:10.7.1
+ environment:
+ MYSQL_ROOT_PASSWORD: 'pdns'
+ MYSQL_DATABASE: 'pdns'
+ MYSQL_USER: 'pdns'
+ MYSQL_PASSWORD: 'pdns'
+ volumes:
+ - pdns_mysql_vol:/var/lib/mysql
+ - /etc/localtime:/etc/localtime:ro
+ - ./dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro
+
+ dnsrouter:
+ image: jc21/dnsrouter
+ volumes:
+ - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
+
+ swagger:
+ image: swaggerapi/swagger-ui:latest
+ ports:
+ - 3001:80
+ environment:
+ URL: "http://${SWAGGER_PUBLIC_DOMAIN:-127.0.0.1:3081}/api/schema"
+ PORT: '80'
+ depends_on:
+ - npm
volumes:
- npm_data:
- name: npm_core_data
- le_data:
- name: npm_le_data
- db_data:
- name: npm_db_data
-
-networks:
- nginx_proxy_manager:
- name: npm_network
+ pdns_mysql_vol:
diff --git a/docker/rootfs/bin/check-health b/docker/rootfs/bin/check-health
deleted file mode 100755
index bcf5552b..00000000
--- a/docker/rootfs/bin/check-health
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-OK=$(curl --silent http://127.0.0.1:81/api/ | jq --raw-output '.status')
-
-if [ "$OK" == "OK" ]; then
- echo "OK"
- exit 0
-else
- echo "NOT OK"
- exit 1
-fi
diff --git a/docker/rootfs/bin/handle-ipv6-setting b/docker/rootfs/bin/handle-ipv6-setting
deleted file mode 100755
index 2aa0e41a..00000000
--- a/docker/rootfs/bin/handle-ipv6-setting
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-# This command reads the `DISABLE_IPV6` env var and will either enable
-# or disable ipv6 in all nginx configs based on this setting.
-
-# Lowercase
-DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
-
-CYAN='\E[1;36m'
-BLUE='\E[1;34m'
-YELLOW='\E[1;33m'
-RED='\E[1;31m'
-RESET='\E[0m'
-
-FOLDER=$1
-if [ "$FOLDER" == "" ]; then
- echo -e "${RED}❯ $0 requires a absolute folder path as the first argument!${RESET}"
- echo -e "${YELLOW} ie: $0 /data/nginx${RESET}"
- exit 1
-fi
-
-FILES=$(find "$FOLDER" -type f -name "*.conf")
-if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
- # IPV6 is disabled
- echo "Disabling IPV6 in hosts"
- echo -e "${BLUE}❯ ${CYAN}Disabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
-
- # Iterate over configs and run the regex
- for FILE in $FILES
- do
- echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
- sed -E -i 's/^([^#]*)listen \[::\]/\1#listen [::]/g' "$FILE"
- done
-
-else
- # IPV6 is enabled
- echo -e "${BLUE}❯ ${CYAN}Enabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
-
- # Iterate over configs and run the regex
- for FILE in $FILES
- do
- echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
- sed -E -i 's/^(\s*)#listen \[::\]/\1listen [::]/g' "$FILE"
- done
-
-fi
diff --git a/docker/rootfs/bin/healthcheck.sh b/docker/rootfs/bin/healthcheck.sh
new file mode 100755
index 00000000..63312086
--- /dev/null
+++ b/docker/rootfs/bin/healthcheck.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+set -euf -o pipefail
+
+HEALTHY="$(curl --silent "http://127.0.0.1:3000/api" | jq --raw-output '.result.healthy')"
+
+echo "Healthy: ${HEALTHY}"
+[ "$HEALTHY" = 'true' ] || exit 1
diff --git a/docker/rootfs/etc/cont-init.d/.gitignore b/docker/rootfs/etc/cont-init.d/.gitignore
deleted file mode 100644
index f04f0f6e..00000000
--- a/docker/rootfs/etc/cont-init.d/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-*
-!.gitignore
-!*.sh
diff --git a/docker/rootfs/etc/cont-init.d/01_perms.sh b/docker/rootfs/etc/cont-init.d/01_perms.sh
deleted file mode 100755
index e7875d32..00000000
--- a/docker/rootfs/etc/cont-init.d/01_perms.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/with-contenv bash
-set -e
-
-mkdir -p /data/logs
-echo "Changing ownership of /data/logs to $(id -u):$(id -g)"
-chown -R "$(id -u):$(id -g)" /data/logs
-
diff --git a/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh b/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh
deleted file mode 100644
index f145807a..00000000
--- a/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/with-contenv bash
-# ref: https://github.com/linuxserver/docker-baseimage-alpine/blob/master/root/etc/cont-init.d/01-envfile
-
-# in s6, environmental variables are written as text files for s6 to monitor
-# seach through full-path filenames for files ending in "__FILE"
-for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
- echo "[secret-init] Evaluating ${FILENAME##*/} ..."
-
- # set SECRETFILE to the contents of the full-path textfile
- SECRETFILE=$(cat ${FILENAME})
- # SECRETFILE=${FILENAME}
- # echo "[secret-init] Set SECRETFILE to ${SECRETFILE}" # DEBUG - rm for prod!
-
- # if SECRETFILE exists / is not null
- if [[ -f ${SECRETFILE} ]]; then
- # strip the appended "__FILE" from environmental variable name ...
- STRIPFILE=$(echo ${FILENAME} | sed "s/__FILE//g")
- # echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
-
- # ... and set value to contents of secretfile
- # since s6 uses text files, this is effectively "export ..."
- printf $(cat ${SECRETFILE}) > ${STRIPFILE}
- # echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
- echo "[secret-init] Success! ${STRIPFILE##*/} set from ${FILENAME##*/}"
-
- else
- echo "[secret-init] cannot find secret in ${FILENAME}"
- fi
-done
diff --git a/docker/rootfs/etc/cont-init.d/10-nginx b/docker/rootfs/etc/cont-init.d/10-nginx
new file mode 100755
index 00000000..b1e852c2
--- /dev/null
+++ b/docker/rootfs/etc/cont-init.d/10-nginx
@@ -0,0 +1,18 @@
+#!/usr/bin/with-contenv bash
+
+# Create required folders
+mkdir -p /tmp/nginx/body \
+ /run/nginx \
+ /var/log/nginx \
+ /var/lib/nginx/cache/public \
+ /var/lib/nginx/cache/private \
+ /var/cache/nginx/proxy_temp \
+ /data/logs
+
+touch /var/log/nginx/error.log && chmod 777 /var/log/nginx/error.log && chmod -R 777 /var/cache/nginx
+
+# Dynamically generate resolvers file
+echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" {print $2}' /etc/resolv.conf)" ";" > /etc/nginx/conf.d/include/resolvers.conf
+
+# Fire off acme.sh wrapper script to "install" itself if required
+acme.sh -h > /dev/null 2>&1
diff --git a/docker/rootfs/etc/cont-init.d/20-adduser b/docker/rootfs/etc/cont-init.d/20-adduser
new file mode 100755
index 00000000..31340800
--- /dev/null
+++ b/docker/rootfs/etc/cont-init.d/20-adduser
@@ -0,0 +1,33 @@
+#!/usr/bin/with-contenv bash
+
+PUID=${PUID:-911}
+PGID=${PGID:-911}
+
+groupmod -g 1000 users || exit 1
+useradd -u "${PUID}" -U -d /data -s /bin/false npmuser || exit 1
+usermod -G users npmuser || exit 1
+groupmod -o -g "$PGID" npmuser || exit 1
+
+echo "-------------------------------------
+ _ _ ____ __ __
+| \ | | _ \| \/ |
+| \| | |_) | |\/| |
+| |\ | __/| | | |
+|_| \_|_| |_| |_|
+-------------------------------------
+User UID: $(id -u npmuser)
+User GID: $(id -g npmuser)
+-------------------------------------
+"
+
+chown -R npmuser:npmuser /data
+chown -R npmuser:npmuser /run/nginx
+chown -R npmuser:npmuser /etc/nginx
+chown -R npmuser:npmuser /tmp/nginx
+chown -R npmuser:npmuser /var/cache/nginx
+chown -R npmuser:npmuser /var/lib/nginx
+chown -R npmuser:npmuser /var/log/nginx
+
+# Home for npmuser
+mkdir -p /tmp/npmuserhome
+chown -R npmuser:npmuser /tmp/npmuserhome
diff --git a/docker/rootfs/etc/letsencrypt.ini b/docker/rootfs/etc/letsencrypt.ini
deleted file mode 100644
index aae53b90..00000000
--- a/docker/rootfs/etc/letsencrypt.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-text = True
-non-interactive = True
-webroot-path = /data/letsencrypt-acme-challenge
-key-type = ecdsa
-elliptic-curve = secp384r1
-preferred-chain = ISRG Root X1
diff --git a/docker/rootfs/etc/logrotate.d/nginx-proxy-manager b/docker/rootfs/etc/logrotate.d/nginx-proxy-manager
deleted file mode 100644
index 20c23ac6..00000000
--- a/docker/rootfs/etc/logrotate.d/nginx-proxy-manager
+++ /dev/null
@@ -1,25 +0,0 @@
-/data/logs/*_access.log /data/logs/*/access.log {
- create 0644 root root
- weekly
- rotate 4
- missingok
- notifempty
- compress
- sharedscripts
- postrotate
- /bin/kill -USR1 `cat /run/nginx.pid 2>/dev/null` 2>/dev/null || true
- endscript
-}
-
-/data/logs/*_error.log /data/logs/*/error.log {
- create 0644 root root
- weekly
- rotate 10
- missingok
- notifempty
- compress
- sharedscripts
- postrotate
- /bin/kill -USR1 `cat /run/nginx.pid 2>/dev/null` 2>/dev/null || true
- endscript
-}
\ No newline at end of file
diff --git a/docker/rootfs/etc/nginx/conf.d/default.conf b/docker/rootfs/etc/nginx/conf.d/default.conf
index 37d316db..0e360743 100644
--- a/docker/rootfs/etc/nginx/conf.d/default.conf
+++ b/docker/rootfs/etc/nginx/conf.d/default.conf
@@ -1,18 +1,10 @@
# "You are not configured" page, which is the default if another default doesn't exist
server {
- listen 80;
- listen [::]:80;
-
- set $forward_scheme "http";
- set $server "127.0.0.1";
- set $port "80";
-
- server_name localhost-nginx-proxy-manager;
- access_log /data/logs/fallback_access.log standard;
- error_log /data/logs/fallback_error.log warn;
- include conf.d/include/assets.conf;
+ listen 80 default;
+ server_name localhost;
+ include conf.d/include/acme-challenge.conf;
include conf.d/include/block-exploits.conf;
- include conf.d/include/letsencrypt-acme-challenge.conf;
+ access_log /data/logs/default.log proxy;
location / {
index index.html;
@@ -22,18 +14,13 @@ server {
# First 443 Host, which is the default if another default doesn't exist
server {
- listen 443 ssl;
- listen [::]:443 ssl;
-
- set $forward_scheme "https";
- set $server "127.0.0.1";
- set $port "443";
-
+ listen 443 ssl default;
server_name localhost;
- access_log /data/logs/fallback_access.log standard;
- error_log /dev/null crit;
- ssl_certificate /data/nginx/dummycert.pem;
- ssl_certificate_key /data/nginx/dummykey.pem;
+ include conf.d/include/block-exploits.conf;
+ access_log /data/logs/default.log proxy;
+
+ ssl_certificate /etc/ssl/certs/dummycert.pem;
+ ssl_certificate_key /etc/ssl/certs/dummykey.pem;
include conf.d/include/ssl-ciphers.conf;
return 444;
diff --git a/docker/rootfs/etc/nginx/conf.d/dev.conf b/docker/rootfs/etc/nginx/conf.d/dev.conf
index edbdec8a..ce8c1da7 100644
--- a/docker/rootfs/etc/nginx/conf.d/dev.conf
+++ b/docker/rootfs/etc/nginx/conf.d/dev.conf
@@ -1,10 +1,6 @@
server {
listen 81 default;
- listen [::]:81 default;
-
server_name nginxproxymanager-dev;
- root /app/frontend/dist;
- access_log /dev/null;
location /api {
return 302 /api/;
@@ -16,14 +12,22 @@ server {
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
- proxy_pass http://127.0.0.1:3000/;
+ proxy_pass http://127.0.0.1:3000/api/;
+ }
- proxy_read_timeout 15m;
- proxy_send_timeout 15m;
+ location ~ .html {
+ try_files $uri =404;
}
location / {
- index index.html;
- try_files $uri $uri.html $uri/ /index.html;
+ add_header X-Served-By $host;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "Upgrade";
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_pass http://127.0.0.1:9000;
}
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/.gitignore b/docker/rootfs/etc/nginx/conf.d/include/.gitignore
deleted file mode 100644
index 5291fe15..00000000
--- a/docker/rootfs/etc/nginx/conf.d/include/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-resolvers.conf
diff --git a/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf b/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf
new file mode 100644
index 00000000..db408c77
--- /dev/null
+++ b/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf
@@ -0,0 +1,17 @@
+# Rule for legitimate ACME Challenge requests (like /.well-known/acme-challenge/xxxxxxxxx)
+# We use ^~ here, so that we don't check other regexes (for speed-up). We actually MUST cancel
+# other regex checks, because in our other config files have regex rule that denies access to files with dotted names.
+location ^~ /.well-known/acme-challenge/ {
+ auth_basic off;
+ auth_request off;
+ allow all;
+ default_type "text/plain";
+ root "/data/.acme.sh/.well-known";
+}
+
+# Hide /acme-challenge subdirectory and return 404 on all requests.
+# It is somewhat more secure than letting Nginx return 403.
+# Ending slash is important!
+location = /.well-known/acme-challenge/ {
+ return 404;
+}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/assets.conf b/docker/rootfs/etc/nginx/conf.d/include/assets.conf
index e95c2e8b..7dd0f5ce 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/assets.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/assets.conf
@@ -1,31 +1,31 @@
location ~* ^.*\.(css|js|jpe?g|gif|png|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
- if_modified_since off;
+ if_modified_since off;
- # use the public cache
- proxy_cache public-cache;
- proxy_cache_key $host$request_uri;
+ # use the public cache
+ proxy_cache public-cache;
+ proxy_cache_key $host$request_uri;
- # ignore these headers for media
- proxy_ignore_headers Set-Cookie Cache-Control Expires X-Accel-Expires;
+ # ignore these headers for media
+ proxy_ignore_headers Set-Cookie Cache-Control Expires X-Accel-Expires;
- # cache 200s and also 404s (not ideal but there are a few 404 images for some reason)
- proxy_cache_valid any 30m;
- proxy_cache_valid 404 1m;
+ # cache 200s and also 404s (not ideal but there are a few 404 images for some reason)
+ proxy_cache_valid any 30m;
+ proxy_cache_valid 404 1m;
- # strip this header to avoid If-Modified-Since requests
- proxy_hide_header Last-Modified;
- proxy_hide_header Cache-Control;
- proxy_hide_header Vary;
+ # strip this header to avoid If-Modified-Since requests
+ proxy_hide_header Last-Modified;
+ proxy_hide_header Cache-Control;
+ proxy_hide_header Vary;
- proxy_cache_bypass 0;
- proxy_no_cache 0;
+ proxy_cache_bypass 0;
+ proxy_no_cache 0;
- proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504 http_404;
- proxy_connect_timeout 5s;
- proxy_read_timeout 45s;
+ proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504 http_404;
+ proxy_connect_timeout 5s;
+ proxy_read_timeout 45s;
- expires @30m;
- access_log off;
+ expires @30m;
+ access_log off;
- include conf.d/include/proxy.conf;
+ include conf.d/include/proxy.conf;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf b/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
index 093bda23..22360fc1 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
@@ -2,92 +2,92 @@
set $block_sql_injections 0;
if ($query_string ~ "union.*select.*\(") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($query_string ~ "union.*all.*select.*") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($query_string ~ "concat.*\(") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($block_sql_injections = 1) {
- return 403;
+ return 403;
}
## Block file injections
set $block_file_injections 0;
if ($query_string ~ "[a-zA-Z0-9_]=http://") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($query_string ~ "[a-zA-Z0-9_]=(\.\.//?)+") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($query_string ~ "[a-zA-Z0-9_]=/([a-z0-9_.]//?)+") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($block_file_injections = 1) {
- return 403;
+ return 403;
}
## Block common exploits
set $block_common_exploits 0;
if ($query_string ~ "(<|%3C).*script.*(>|%3E)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "GLOBALS(=|\[|\%[0-9A-Z]{0,2})") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "_REQUEST(=|\[|\%[0-9A-Z]{0,2})") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "proc/self/environ") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "mosConfig_[a-zA-Z_]{1,21}(=|\%3D)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "base64_(en|de)code\(.*\)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($block_common_exploits = 1) {
- return 403;
+ return 403;
}
## Block spam
set $block_spam 0;
if ($query_string ~ "\b(ultram|unicauca|valium|viagra|vicodin|xanax|ypxaieo)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(erections|hoodia|huronriveracres|impotence|levitra|libido)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(ambien|blue\spill|cialis|cocaine|ejaculation|erectile)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(lipitor|phentermin|pro[sz]ac|sandyauer|tramadol|troyhamby)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($block_spam = 1) {
- return 403;
+ return 403;
}
## Block user agents
@@ -95,42 +95,42 @@ set $block_user_agents 0;
# Disable Akeeba Remote Control 2.5 and earlier
if ($http_user_agent ~ "Indy Library") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
# Common bandwidth hoggers and hacking tools.
if ($http_user_agent ~ "libwww-perl") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GetRight") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GetWeb!") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Go!Zilla") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Download Demon") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Go-Ahead-Got-It") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "TurnitinBot") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GrabNet") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($block_user_agents = 1) {
- return 403;
+ return 403;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf b/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
index 15f0d285..5fd4810f 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
@@ -1,3 +1,3 @@
if ($scheme = "http") {
- return 301 https://$host$request_uri;
+ return 301 https://$host$request_uri;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf b/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf
deleted file mode 100644
index 34249325..00000000
--- a/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf
+++ /dev/null
@@ -1,2 +0,0 @@
-# This should be left blank is it is populated programatically
-# by the application backend.
diff --git a/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf b/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf
deleted file mode 100644
index ff2a7827..00000000
--- a/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Rule for legitimate ACME Challenge requests (like /.well-known/acme-challenge/xxxxxxxxx)
-# We use ^~ here, so that we don't check other regexes (for speed-up). We actually MUST cancel
-# other regex checks, because in our other config files have regex rule that denies access to files with dotted names.
-location ^~ /.well-known/acme-challenge/ {
- # Since this is for letsencrypt authentication of a domain and they do not give IP ranges of their infrastructure
- # we need to open up access by turning off auth and IP ACL for this location.
- auth_basic off;
- auth_request off;
- allow all;
-
- # Set correct content type. According to this:
- # https://community.letsencrypt.org/t/using-the-webroot-domain-verification-method/1445/29
- # Current specification requires "text/plain" or no content header at all.
- # It seems that "text/plain" is a safe option.
- default_type "text/plain";
-
- # This directory must be the same as in /etc/letsencrypt/cli.ini
- # as "webroot-path" parameter. Also don't forget to set "authenticator" parameter
- # there to "webroot".
- # Do NOT use alias, use root! Target directory is located here:
- # /var/www/common/letsencrypt/.well-known/acme-challenge/
- root /data/letsencrypt-acme-challenge;
-}
-
-# Hide /acme-challenge subdirectory and return 404 on all requests.
-# It is somewhat more secure than letting Nginx return 403.
-# Ending slash is important!
-location = /.well-known/acme-challenge/ {
- return 404;
-}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/proxy.conf b/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
index fcaaf003..b84a4513 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
@@ -3,6 +3,4 @@ proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
-proxy_set_header X-Real-IP $remote_addr;
-proxy_pass $forward_scheme://$server:$port$request_uri;
-
+proxy_pass $forward_scheme://$server:$port;
diff --git a/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf b/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf
new file mode 100644
index 00000000..ccd9dcef
--- /dev/null
+++ b/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf
@@ -0,0 +1 @@
+# Intentionally blank
diff --git a/docker/rootfs/etc/nginx/conf.d/include/ssl-ciphers.conf b/docker/rootfs/etc/nginx/conf.d/include/ssl-ciphers.conf
index 233abb6e..bd905d31 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/ssl-ciphers.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/ssl-ciphers.conf
@@ -3,5 +3,7 @@ ssl_session_cache shared:SSL:50m;
# intermediate configuration. tweak to your needs.
ssl_protocols TLSv1.2 TLSv1.3;
-ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
-ssl_prefer_server_ciphers off;
+ssl_ciphers 'EECDH+AESGCM:AES256+EECDH:AES256+EDH:EDH+AESGCM:ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-
+ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AE
+S128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES';
+ssl_prefer_server_ciphers on;
diff --git a/docker/rootfs/etc/nginx/conf.d/production.conf b/docker/rootfs/etc/nginx/conf.d/production.conf
index 877e51dd..325cb8cc 100644
--- a/docker/rootfs/etc/nginx/conf.d/production.conf
+++ b/docker/rootfs/etc/nginx/conf.d/production.conf
@@ -1,33 +1,14 @@
# Admin Interface
server {
listen 81 default;
- listen [::]:81 default;
-
server_name nginxproxymanager;
- root /app/frontend;
- access_log /dev/null;
- location /api {
- return 302 /api/;
- }
-
- location /api/ {
+ location / {
add_header X-Served-By $host;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
- proxy_pass http://127.0.0.1:3000/;
-
- proxy_read_timeout 15m;
- proxy_send_timeout 15m;
- }
-
- location / {
- index index.html;
- if ($request_uri ~ ^/(.*)\.html$) {
- return 302 /$1;
- }
- try_files $uri $uri.html $uri/ /index.html;
+ proxy_pass http://localhost:3000/;
}
}
diff --git a/docker/rootfs/etc/nginx/nginx.conf b/docker/rootfs/etc/nginx/nginx.conf
index 4d5ee901..f815c1b7 100644
--- a/docker/rootfs/etc/nginx/nginx.conf
+++ b/docker/rootfs/etc/nginx/nginx.conf
@@ -1,7 +1,7 @@
# run nginx in foreground
daemon off;
-
-user root;
+user npmuser;
+pid /run/nginx/nginx.pid;
# Set number of worker processes automatically based on number of CPU cores.
worker_processes auto;
@@ -9,7 +9,7 @@ worker_processes auto;
# Enables the use of JIT for regular expressions to speed-up their processing.
pcre_jit on;
-error_log /data/logs/fallback_error.log warn;
+error_log /data/logs/error.log warn;
# Includes files with directives to load dynamic modules.
include /etc/nginx/modules/*.conf;
@@ -26,15 +26,12 @@ http {
tcp_nopush on;
tcp_nodelay on;
client_body_temp_path /tmp/nginx/body 1 2;
- keepalive_timeout 90s;
- proxy_connect_timeout 90s;
- proxy_send_timeout 90s;
- proxy_read_timeout 90s;
+ keepalive_timeout 65;
ssl_prefer_server_ciphers on;
gzip on;
proxy_ignore_client_abort off;
- client_max_body_size 2000m;
- server_names_hash_bucket_size 1024;
+ client_max_body_size 200m;
+ server_names_hash_bucket_size 64;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@@ -43,13 +40,8 @@ http {
proxy_cache_path /var/lib/nginx/cache/public levels=1:2 keys_zone=public-cache:30m max_size=192m;
proxy_cache_path /var/lib/nginx/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
- log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
- log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
-
- access_log /data/logs/fallback_access.log proxy;
-
- # Dynamically generated resolvers file
- include /etc/nginx/conf.d/include/resolvers.conf;
+ log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
+ access_log /data/logs/default.log proxy;
# Default upstream scheme
map $host $forward_scheme {
@@ -57,39 +49,18 @@ http {
}
# Real IP Determination
-
- # Local subnets:
- set_real_ip_from 10.0.0.0/8;
- set_real_ip_from 172.16.0.0/12; # Includes Docker subnet
- set_real_ip_from 192.168.0.0/16;
+ # Docker subnet:
+ set_real_ip_from 172.0.0.0/8;
# NPM generated CDN ip ranges:
- include conf.d/include/ip_ranges.conf;
+ #include conf.d/include/ip_ranges.conf;
# always put the following 2 lines after ip subnets:
- real_ip_header X-Real-IP;
+ real_ip_header X-Forwarded-For;
real_ip_recursive on;
- # Custom
- include /data/nginx/custom/http_top[.]conf;
-
# Files generated by NPM
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
include /data/nginx/proxy_host/*.conf;
include /data/nginx/redirection_host/*.conf;
include /data/nginx/dead_host/*.conf;
- include /data/nginx/temp/*.conf;
-
- # Custom
- include /data/nginx/custom/http[.]conf;
}
-
-stream {
- # Files generated by NPM
- include /data/nginx/stream/*.conf;
-
- # Custom
- include /data/nginx/custom/stream[.]conf;
-}
-
-# Custom
-include /data/nginx/custom/root[.]conf;
diff --git a/docker/rootfs/etc/services.d/backend/finish b/docker/rootfs/etc/services.d/backend/finish
new file mode 100755
index 00000000..2b661f61
--- /dev/null
+++ b/docker/rootfs/etc/services.d/backend/finish
@@ -0,0 +1,5 @@
+#!/usr/bin/execlineb -S1
+if { s6-test ${1} -ne 0 }
+if { s6-test ${1} -ne 256 }
+
+s6-svscanctl -t /var/run/s6/services
diff --git a/docker/rootfs/etc/services.d/backend/run b/docker/rootfs/etc/services.d/backend/run
new file mode 100755
index 00000000..b63ad826
--- /dev/null
+++ b/docker/rootfs/etc/services.d/backend/run
@@ -0,0 +1,23 @@
+#!/usr/bin/with-contenv bash
+
+RESET='\E[0m'
+YELLOW='\E[1;33m'
+
+echo -e "${YELLOW}Starting backend API ...${RESET}"
+
+if [ "$DEVELOPMENT" == "true" ]; then
+ HOME=/tmp/npmuserhome
+ GOPATH="$HOME/go"
+ mkdir -p "$GOPATH"
+ chown -R npmuser:npmuser "$GOPATH"
+ export HOME GOPATH
+ cd /app/backend || exit 1
+ s6-setuidgid npmuser task -w
+else
+ cd /app/bin || exit 1
+ while :
+ do
+ s6-setuidgid npmuser /app/bin/server
+ sleep 1
+ done
+fi
diff --git a/docker/rootfs/etc/services.d/frontend/finish b/docker/rootfs/etc/services.d/frontend/finish
index bca9a35d..2b661f61 100755
--- a/docker/rootfs/etc/services.d/frontend/finish
+++ b/docker/rootfs/etc/services.d/frontend/finish
@@ -3,4 +3,3 @@ if { s6-test ${1} -ne 0 }
if { s6-test ${1} -ne 256 }
s6-svscanctl -t /var/run/s6/services
-
diff --git a/docker/rootfs/etc/services.d/frontend/run b/docker/rootfs/etc/services.d/frontend/run
index a666d53e..87963721 100755
--- a/docker/rootfs/etc/services.d/frontend/run
+++ b/docker/rootfs/etc/services.d/frontend/run
@@ -3,10 +3,13 @@
# This service is DEVELOPMENT only.
if [ "$DEVELOPMENT" == "true" ]; then
+ CI=true
+ HOME=/tmp/npmuserhome
+ export CI
+ export HOME
cd /app/frontend || exit 1
- # If yarn install fails: add --verbose --network-concurrency 1
- yarn install
- yarn watch
+ s6-setuidgid npmuser yarn install
+ s6-setuidgid npmuser yarn start
else
exit 0
fi
diff --git a/docker/rootfs/etc/services.d/manager/finish b/docker/rootfs/etc/services.d/manager/finish
deleted file mode 100755
index 7d442d6a..00000000
--- a/docker/rootfs/etc/services.d/manager/finish
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-s6-svscanctl -t /var/run/s6/services
diff --git a/docker/rootfs/etc/services.d/manager/run b/docker/rootfs/etc/services.d/manager/run
deleted file mode 100755
index e365f4fb..00000000
--- a/docker/rootfs/etc/services.d/manager/run
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-mkdir -p /data/letsencrypt-acme-challenge
-
-cd /app || echo
-
-if [ "$DEVELOPMENT" == "true" ]; then
- cd /app || exit 1
- # If yarn install fails: add --verbose --network-concurrency 1
- yarn install
- node --max_old_space_size=250 --abort_on_uncaught_exception node_modules/nodemon/bin/nodemon.js
-else
- cd /app || exit 1
- while :
- do
- node --abort_on_uncaught_exception --max_old_space_size=250 index.js
- sleep 1
- done
-fi
diff --git a/docker/rootfs/etc/services.d/nginx/finish b/docker/rootfs/etc/services.d/nginx/finish
deleted file mode 120000
index 63b10de4..00000000
--- a/docker/rootfs/etc/services.d/nginx/finish
+++ /dev/null
@@ -1 +0,0 @@
-/bin/true
\ No newline at end of file
diff --git a/docker/rootfs/etc/services.d/nginx/finish b/docker/rootfs/etc/services.d/nginx/finish
new file mode 100755
index 00000000..bca9a35d
--- /dev/null
+++ b/docker/rootfs/etc/services.d/nginx/finish
@@ -0,0 +1,6 @@
+#!/usr/bin/execlineb -S1
+if { s6-test ${1} -ne 0 }
+if { s6-test ${1} -ne 256 }
+
+s6-svscanctl -t /var/run/s6/services
+
diff --git a/docker/rootfs/etc/services.d/nginx/run b/docker/rootfs/etc/services.d/nginx/run
index 51ca5ea1..e04643e7 100755
--- a/docker/rootfs/etc/services.d/nginx/run
+++ b/docker/rootfs/etc/services.d/nginx/run
@@ -1,49 +1,3 @@
#!/usr/bin/with-contenv bash
-# Create required folders
-mkdir -p /tmp/nginx/body \
- /run/nginx \
- /var/log/nginx \
- /data/nginx \
- /data/custom_ssl \
- /data/logs \
- /data/access \
- /data/nginx/default_host \
- /data/nginx/default_www \
- /data/nginx/proxy_host \
- /data/nginx/redirection_host \
- /data/nginx/stream \
- /data/nginx/dead_host \
- /data/nginx/temp \
- /var/lib/nginx/cache/public \
- /var/lib/nginx/cache/private \
- /var/cache/nginx/proxy_temp
-
-touch /var/log/nginx/error.log && chmod 777 /var/log/nginx/error.log && chmod -R 777 /var/cache/nginx
-chown root /tmp/nginx
-
-# Dynamically generate resolvers file, if resolver is IPv6, enclose in `[]`
-# thanks @tfmm
-echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
-
-# Generate dummy self-signed certificate.
-if [ ! -f /data/nginx/dummycert.pem ] || [ ! -f /data/nginx/dummykey.pem ]
-then
- echo "Generating dummy SSL certificate..."
- openssl req \
- -new \
- -newkey rsa:2048 \
- -days 3650 \
- -nodes \
- -x509 \
- -subj '/O=localhost/OU=localhost/CN=localhost' \
- -keyout /data/nginx/dummykey.pem \
- -out /data/nginx/dummycert.pem
- echo "Complete"
-fi
-
-# Handle IPV6 settings
-/bin/handle-ipv6-setting /etc/nginx/conf.d
-/bin/handle-ipv6-setting /data/nginx
-
-exec nginx
+exec s6-setuidgid npmuser nginx
diff --git a/docker/rootfs/root/.bashrc b/docker/rootfs/root/.bashrc
index 1deb975c..122da279 100644
--- a/docker/rootfs/root/.bashrc
+++ b/docker/rootfs/root/.bashrc
@@ -16,7 +16,5 @@ alias h='cd ~;clear;'
echo -e -n '\E[1;34m'
figlet -w 120 "NginxProxyManager"
-echo -e "\E[1;36mVersion \E[1;32m${NPM_BUILD_VERSION:-2.0.0-dev} (${NPM_BUILD_COMMIT:-dev}) ${NPM_BUILD_DATE:-0000-00-00}\E[1;36m, OpenResty \E[1;32m${OPENRESTY_VERSION:-unknown}\E[1;36m, ${ID:-debian} \E[1;32m${VERSION:-unknown}\E[1;36m, Certbot \E[1;32m$(certbot --version)\E[0m"
-echo -e -n '\E[1;34m'
-cat /built-for-arch
-echo -e '\E[0m'
+echo -e "\E[1;36mVersion \E[1;32m${NPM_BUILD_VERSION:-3.0.0-dev} (${NPM_BUILD_COMMIT:-dev}) ${NPM_BUILD_DATE:-0000-00-00}\E[1;36m, OpenResty \E[1;32m${OPENRESTY_VERSION:-unknown}\E[1;36m, Debian \E[1;32m${VERSION_ID:-unknown}\E[1;36m, Kernel \E[1;32m$(uname -r)\E[0m"
+echo
diff --git a/docker/rootfs/root/.config/litecli/config b/docker/rootfs/root/.config/litecli/config
new file mode 100644
index 00000000..bfc67aa8
--- /dev/null
+++ b/docker/rootfs/root/.config/litecli/config
@@ -0,0 +1,13 @@
+[main]
+multi_line = True
+log_level = INFO
+table_format = psql
+syntax_style = monokai
+wider_completion_menu = True
+prompt = '\d> '
+prompt_continuation = '-> '
+less_chatty = True
+auto_vertical_output = True
+
+[favorite_queries]
+show_users = select * from user
diff --git a/docker/rootfs/var/www/html/index.html b/docker/rootfs/var/www/html/index.html
index 8478b47f..2a437c36 100644
--- a/docker/rootfs/var/www/html/index.html
+++ b/docker/rootfs/var/www/html/index.html
@@ -1,24 +1,24 @@
-
-
-
-
- Default Site
-
-
-
-
-
-
-
Congratulations!
-
You've successfully started the Nginx Proxy Manager.
-
If you're seeing this site then you're trying to access a host that isn't set up yet.
-
Log in to the Admin panel to get started.
-
-
Powered by Nginx Proxy Manager
-
-
+
+
+
+
+ Default Site
+
+
+
+
+
+
+
Congratulations!
+
You've successfully started the Nginx Proxy Manager.
+
If you're seeing this site then you're trying to access a host that isn't set up yet.
+
Log in to the Admin panel to get started.
+
+
Powered by Nginx Proxy Manager
+
+
diff --git a/docs/.gitignore b/docs/.gitignore
index 38353fb5..eccfbb30 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -1,3 +1,5 @@
.vuepress/dist
node_modules
ts
+api.md
+api/
diff --git a/docs/.vuepress/config.js b/docs/.vuepress/config.js
index f3b735b8..562ed723 100644
--- a/docs/.vuepress/config.js
+++ b/docs/.vuepress/config.js
@@ -39,7 +39,7 @@ module.exports = {
// Custom text for edit link. Defaults to "Edit this page"
editLinkText: "Edit this page on GitHub",
// Custom navbar values
- nav: [{ text: "Setup", link: "/setup/" }],
+ nav: [{ text: "Setup", link: "/setup/" }, { text: "API", link: "/api/index.html" }],
// Custom sidebar values
sidebar: [
"/",
diff --git a/docs/README.md b/docs/README.md
index 082bb05c..4356775b 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -3,7 +3,7 @@ home: true
heroImage: /logo.png
actionText: Get Started →
actionLink: /guide/
-footer: MIT Licensed | Copyright © 2016-present jc21.com
+footer: MIT Licensed | Copyright © 2016-2021 jc21.com
---
@@ -37,3 +37,37 @@ footer: MIT Licensed | Copyright © 2016-present jc21.com
Configure other users to either view or manage their own hosts. Full access permissions are available.
+
+### Quick Setup
+
+1. Install Docker and Docker-Compose
+
+- [Docker Install documentation](https://docs.docker.com/install/)
+- [Docker-Compose Install documentation](https://docs.docker.com/compose/install/)
+
+2. Create a docker-compose.yml file similar to this:
+
+```yml
+version: '3'
+services:
+ app:
+ image: 'jc21/nginx-proxy-manager:3'
+ ports:
+ - '80:80'
+ - '81:81'
+ - '443:443'
+ volumes:
+ - ./data:/data
+```
+
+3. Bring up your stack
+
+```bash
+docker-compose up -d
+```
+
+4. Log in to the Admin UI
+
+When your docker container is running, connect to it on port `81` for the admin interface.
+
+[http://127.0.0.1:81](http://127.0.0.1:81)
diff --git a/docs/advanced-config/README.md b/docs/advanced-config/README.md
index c7b51a84..61820795 100644
--- a/docs/advanced-config/README.md
+++ b/docs/advanced-config/README.md
@@ -1,10 +1,10 @@
# Advanced Configuration
-## Best Practice: Use a Docker network
+## Best Practice: Use a docker network
-For those who have a few of their upstream services running in Docker on the same Docker
-host as NPM, here's a trick to secure things a bit better. By creating a custom Docker network,
-you don't need to publish ports for your upstream services to all of the Docker host's interfaces.
+For those who have a few of their upstream services running in docker on the same docker
+host as NPM, here's a trick to secure things a bit better. By creating a custom docker network,
+you don't need to publish ports for your upstream services to all of the docker host's interfaces.
Create a network, ie "scoobydoo":
@@ -13,7 +13,7 @@ docker network create scoobydoo
```
Then add the following to the `docker-compose.yml` file for both NPM and any other
-services running on this Docker host:
+services running on this docker host:
```yml
networks:
@@ -44,22 +44,10 @@ networks:
Now in the NPM UI you can create a proxy host with `portainer` as the hostname,
and port `9000` as the port. Even though this port isn't listed in the docker-compose
-file, it's "exposed" by the Portainer Docker image for you and not available on
-the Docker host outside of this Docker network. The service name is used as the
+file, it's "exposed" by the portainer docker image for you and not available on
+the docker host outside of this docker network. The service name is used as the
hostname, so make sure your service names are unique when using the same network.
-## Docker Healthcheck
-
-The `Dockerfile` that builds this project does not include a `HEALTHCHECK` but you can opt in to this
-feature by adding the following to the service in your `docker-compose.yml` file:
-
-```yml
-healthcheck:
- test: ["CMD", "/bin/check-health"]
- interval: 10s
- timeout: 3s
-```
-
## Docker Secrets
This image supports the use of Docker secrets to import from file and keep sensitive usernames or passwords from being passed or preserved in plaintext.
@@ -128,7 +116,7 @@ services:
## Disabling IPv6
-On some Docker hosts IPv6 may not be enabled. In these cases, the following message may be seen in the log:
+On some docker hosts IPv6 may not be enabled. In these cases, the following message may be seen in the log:
> Address family not supported by protocol
diff --git a/docs/faq/README.md b/docs/faq/README.md
index cf739ead..1703e705 100644
--- a/docs/faq/README.md
+++ b/docs/faq/README.md
@@ -21,6 +21,3 @@ Your best bet is to ask the [Reddit community for support](https://www.reddit.co
Gitter is best left for anyone contributing to the project to ask for help about internals, code reviews etc.
-## When adding username and password access control to a proxy host, I can no longer login into the app.
-
-Having an Access Control List (ACL) with username and password requires the browser to always send this username and password in the `Authorization` header on each request. If your proxied app also requires authentication (like Nginx Proxy Manager itself), most likely the app will also use the `Authorization` header to transmit this information, as this is the standardized header meant for this kind of information. However having multiples of the same headers is not allowed in the [internet standard](https://www.rfc-editor.org/rfc/rfc7230#section-3.2.2) and almost all apps do not support multiple values in the `Authorization` header. Hence one of the two logins will be broken. This can only be fixed by either removing one of the logins or by changing the app to use other non-standard headers for authorization.
\ No newline at end of file
diff --git a/docs/package.json b/docs/package.json
index dc28e5a0..a1cec675 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -357,7 +357,7 @@
"jsbn": "^1.1.0",
"jsesc": "^3.0.1",
"json-parse-better-errors": "^1.0.2",
- "json-schema": "^0.4.0",
+ "json-schema": "^0.2.5",
"json-schema-traverse": "^0.4.1",
"json-stringify-safe": "^5.0.1",
"json3": "^3.3.3",
@@ -394,7 +394,7 @@
"map-age-cleaner": "^0.1.3",
"map-cache": "^0.2.2",
"map-visit": "^1.0.0",
- "markdown-it": "^12.3.2",
+ "markdown-it": "^11.0.0",
"markdown-it-anchor": "^5.3.0",
"markdown-it-chain": "^1.3.0",
"markdown-it-container": "^3.0.0",
@@ -434,7 +434,7 @@
"neo-async": "^2.6.2",
"nice-try": "^2.0.1",
"no-case": "^3.0.3",
- "node-forge": "^1.0.0",
+ "node-forge": "^0.10.0",
"node-libs-browser": "^2.2.1",
"node-releases": "^1.1.60",
"nopt": "^4.0.3",
@@ -443,7 +443,7 @@
"normalize-url": "^5.1.0",
"npm-run-path": "^4.0.1",
"nprogress": "^0.2.0",
- "nth-check": "^2.0.1",
+ "nth-check": "^1.0.2",
"num2fraction": "^1.2.2",
"number-is-nan": "^2.0.0",
"oauth-sign": "^0.9.0",
@@ -612,7 +612,7 @@
"serve-index": "^1.9.1",
"serve-static": "^1.14.1",
"set-blocking": "^2.0.0",
- "set-value": "^4.0.1",
+ "set-value": "^3.0.2",
"setimmediate": "^1.0.5",
"setprototypeof": "^1.2.0",
"sha.js": "^2.4.11",
diff --git a/docs/setup/README.md b/docs/setup/README.md
index b9c42274..a2ff7392 100644
--- a/docs/setup/README.md
+++ b/docs/setup/README.md
@@ -1,35 +1,31 @@
# Full Setup Instructions
-## Running the App
+### Running the App
-Create a `docker-compose.yml` file:
+Via `docker-compose`:
```yml
version: "3"
services:
app:
- image: 'jc21/nginx-proxy-manager:latest'
- restart: unless-stopped
+ image: 'jc21/nginx-proxy-manager:v3-develop'
+ restart: always
ports:
- # These ports are in format :
- - '80:80' # Public HTTP Port
- - '443:443' # Public HTTPS Port
- - '81:81' # Admin Web Port
- # Add any other Stream port you want to expose
- # - '21:21' # FTP
-
- # Uncomment the next line if you uncomment anything in the section
- # environment:
- # Uncomment this if you want to change the location of
- # the SQLite DB file within the container
- # DB_SQLITE_FILE: "/data/database.sqlite"
-
+ # Public HTTP Port:
+ - '80:80'
+ # Public HTTPS Port:
+ - '443:443'
+ # Admin Web Port:
+ - '81:81'
+ environment:
+ # These run the processes and own the files
+ # for a specific user/group
+ - PUID=1000
+ - PGID=1000
# Uncomment this if IPv6 is not enabled on your host
# DISABLE_IPV6: 'true'
-
volumes:
- ./data:/data
- - ./letsencrypt:/etc/letsencrypt
```
Then:
@@ -38,64 +34,7 @@ Then:
docker-compose up -d
```
-## Using MySQL / MariaDB Database
-
-If you opt for the MySQL configuration you will have to provide the database server yourself. You can also use MariaDB. Here are the minimum supported versions:
-
-- MySQL v5.7.8+
-- MariaDB v10.2.7+
-
-It's easy to use another docker container for your database also and link it as part of the docker stack, so that's what the following examples
-are going to use.
-
-Here is an example of what your `docker-compose.yml` will look like when using a MariaDB container:
-
-```yml
-version: "3"
-services:
- app:
- image: 'jc21/nginx-proxy-manager:latest'
- restart: unless-stopped
- ports:
- # These ports are in format :
- - '80:80' # Public HTTP Port
- - '443:443' # Public HTTPS Port
- - '81:81' # Admin Web Port
- # Add any other Stream port you want to expose
- # - '21:21' # FTP
- environment:
- DB_MYSQL_HOST: "db"
- DB_MYSQL_PORT: 3306
- DB_MYSQL_USER: "npm"
- DB_MYSQL_PASSWORD: "npm"
- DB_MYSQL_NAME: "npm"
- # Uncomment this if IPv6 is not enabled on your host
- # DISABLE_IPV6: 'true'
- volumes:
- - ./data:/data
- - ./letsencrypt:/etc/letsencrypt
- depends_on:
- - db
-
- db:
- image: 'jc21/mariadb-aria:latest'
- restart: unless-stopped
- environment:
- MYSQL_ROOT_PASSWORD: 'npm'
- MYSQL_DATABASE: 'npm'
- MYSQL_USER: 'npm'
- MYSQL_PASSWORD: 'npm'
- volumes:
- - ./data/mysql:/var/lib/mysql
-```
-
-::: warning
-
-Please note, that `DB_MYSQL_*` environment variables will take precedent over `DB_SQLITE_*` variables. So if you keep the MySQL variables, you will not be able to use SQLite.
-
-:::
-
-## Running on Raspberry PI / ARM devices
+### Running on Raspberry PI / ARM devices
The docker images support the following architectures:
- amd64
@@ -107,76 +46,17 @@ you don't have to worry about doing anything special and you can follow the comm
Check out the [dockerhub tags](https://hub.docker.com/r/jc21/nginx-proxy-manager/tags)
for a list of supported architectures and if you want one that doesn't exist,
-[create a feature request](https://github.com/NginxProxyManager/nginx-proxy-manager/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=).
+[create a feature request](https://github.com/jc21/nginx-proxy-manager/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=).
Also, if you don't know how to already, follow [this guide to install docker and docker-compose](https://manre-universe.net/how-to-run-docker-and-docker-compose-on-raspbian/)
on Raspbian.
-Please note that the `jc21/mariadb-aria:latest` image might have some problems on some ARM devices, if you want a separate database container, use the `yobasystems/alpine-mariadb:latest` image.
-## Initial Run
+### Initial Run
After the app is running for the first time, the following will happen:
1. The database will initialize with table structures
2. GPG keys will be generated and saved in the configuration file
-3. A default admin user will be created
This process can take a couple of minutes depending on your machine.
-
-
-## Default Administrator User
-
-```
-Email: admin@example.com
-Password: changeme
-```
-
-Immediately after logging in with this default user you will be asked to modify your details and change your password.
-
-## Configuration File
-
-::: warning
-
-This section is meant for advanced users
-
-:::
-
-If you would like more control over the database settings you can define a custom config JSON file.
-
-
-Here's an example for `sqlite` configuration as it is generated from the environment variables:
-
-```json
-{
- "database": {
- "engine": "knex-native",
- "knex": {
- "client": "sqlite3",
- "connection": {
- "filename": "/data/database.sqlite"
- },
- "useNullAsDefault": true
- }
- }
-}
-```
-
-You can modify the `knex` object with your custom configuration, but note that not all knex clients might be installed in the image.
-
-Once you've created your configuration file you can mount it to `/app/config/production.json` inside you container using:
-
-```
-[...]
-services:
- app:
- image: 'jc21/nginx-proxy-manager:latest'
- [...]
- volumes:
- - ./config.json:/app/config/production.json
- [...]
-[...]
-```
-
-**Note:** After the first run of the application, the config file will be altered to include generated encryption keys unique to your installation.
-These keys affect the login and session management of the application. If these keys change for any reason, all users will be logged out.
diff --git a/docs/yarn.lock b/docs/yarn.lock
index 843a2415..5f981d38 100644
--- a/docs/yarn.lock
+++ b/docs/yarn.lock
@@ -1624,9 +1624,9 @@ ansi-regex@^4.1.0:
integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
ansi-regex@^5.0.0:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
- integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
+ integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
ansi-styles@^2.2.1:
version "2.2.1"
@@ -1686,11 +1686,6 @@ argparse@^1.0.10, argparse@^1.0.7:
dependencies:
sprintf-js "~1.0.2"
-argparse@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
- integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
-
arr-diff@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
@@ -2565,7 +2560,7 @@ cli-boxes@^2.2.0:
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
-clipboard@^2.0.6:
+clipboard@^2.0.0, clipboard@^2.0.6:
version "2.0.6"
resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.6.tgz#52921296eec0fdf77ead1749421b21c968647376"
integrity sha512-g5zbiixBRk/wyKakSwCKd7vQXDjFnAMGHoEyBogG/bw9kTD9GvdAvaoRR1ALcEzt3pVKxZR0pViekPMIS0QyGg==
@@ -2655,9 +2650,9 @@ color-name@^1.0.0, color-name@^1.1.4, color-name@~1.1.4:
integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
color-string@^1.5.2, color-string@^1.5.3:
- version "1.5.5"
- resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.5.tgz#65474a8f0e7439625f3d27a6a19d89fc45223014"
- integrity sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg==
+ version "1.5.3"
+ resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.3.tgz#c9bbc5f01b58b5492f3d6857459cb6590ce204cc"
+ integrity sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==
dependencies:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
@@ -3749,10 +3744,10 @@ entities@^1.1.1, entities@~1.1.1:
resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56"
integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==
-entities@^2.0.0, entities@^2.0.3, entities@~2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/entities/-/entities-2.1.0.tgz#992d3129cf7df6870b96c57858c249a120f8b8b5"
- integrity sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==
+entities@^2.0.0, entities@^2.0.3, entities@~2.0.0:
+ version "2.0.3"
+ resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.3.tgz#5c487e5742ab93c15abb5da22759b8590ec03b7f"
+ integrity sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==
envify@^4.0.0, envify@^4.1.0:
version "4.1.0"
@@ -4117,13 +4112,6 @@ fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0:
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
-fast-xml-parser@^3.19.0:
- version "3.21.1"
- resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-3.21.1.tgz#152a1d51d445380f7046b304672dd55d15c9e736"
- integrity sha512-FTFVjYoBOZTJekiUsawGsSYV9QL0A+zDYCRj7y34IO6Jg+2IMYEtQa+bbictpdpV8dHxXywqU7C0gRDEOFtBFg==
- dependencies:
- strnum "^1.0.4"
-
fastq@^1.6.0:
version "1.8.0"
resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.8.0.tgz#550e1f9f59bbc65fe185cb6a9b4d95357107f481"
@@ -4266,9 +4254,9 @@ flush-write-stream@^2.0.0:
readable-stream "^3.1.1"
follow-redirects@^1.0.0, follow-redirects@^1.12.1:
- version "1.14.8"
- resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.8.tgz#016996fb9a11a100566398b1c6839337d7bfa8fc"
- integrity sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==
+ version "1.12.1"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.12.1.tgz#de54a6205311b93d60398ebc01cf7015682312b6"
+ integrity sha512-tmRv0AVuR7ZyouUHLeNSiO6pqulF7dYa3s19c6t+wz9LD69/uSzdMxJ2S91nTI9U3rt/IldxpzMOFejp6f0hjg==
for-in@^1.0.2:
version "1.0.2"
@@ -5542,11 +5530,11 @@ is-svg@^3.0.0:
html-comment-regex "^1.1.0"
is-svg@^4.2.1:
- version "4.3.0"
- resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-4.3.0.tgz#3e46a45dcdb2780e42a3c8538154d7f7bfc07216"
- integrity sha512-Np3TOGLVr0J27VDaS/gVE7bT45ZcSmX4pMmMTsPjqO8JY383fuPIcWmZr3QsHVWhqhZWxSdmW+tkkl3PWOB0Nw==
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-4.2.2.tgz#a4ea0f3f78dada7085db88f1e85b6f845626cfae"
+ integrity sha512-JlA7Mc7mfWjdxxTkJ094oUK9amGD7gQaj5xA/NCY0vlVvZ1stmj4VX+bRuwOMN93IHRZ2ctpPH/0FO6DqvQ5Rw==
dependencies:
- fast-xml-parser "^3.19.0"
+ html-comment-regex "^1.1.2"
is-symbol@^1.0.2, is-symbol@^1.0.3:
version "1.0.3"
@@ -5726,10 +5714,10 @@ json-schema@0.2.3:
resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=
-json-schema@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5"
- integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==
+json-schema@^0.2.5:
+ version "0.2.5"
+ resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.5.tgz#97997f50972dd0500214e208c407efa4b5d7063b"
+ integrity sha512-gWJOWYFrhQ8j7pVm0EM8Slr+EPVq1Phf6lvzvD/WCeqkrx/f2xBI0xOsRRS9xCn3I4vKtP519dvs3TP09r24wQ==
json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
version "5.0.1"
@@ -6147,13 +6135,13 @@ markdown-it-table-of-contents@^0.4.0, markdown-it-table-of-contents@^0.4.4:
resolved "https://registry.yarnpkg.com/markdown-it-table-of-contents/-/markdown-it-table-of-contents-0.4.4.tgz#3dc7ce8b8fc17e5981c77cc398d1782319f37fbc"
integrity sha512-TAIHTHPwa9+ltKvKPWulm/beozQU41Ab+FIefRaQV1NRnpzwcV9QOe6wXQS5WLivm5Q/nlo0rl6laGkMDZE7Gw==
-markdown-it@^12.3.2:
- version "12.3.2"
- resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-12.3.2.tgz#bf92ac92283fe983fe4de8ff8abfb5ad72cd0c90"
- integrity sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==
+markdown-it@^11.0.0:
+ version "11.0.0"
+ resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-11.0.0.tgz#dbfc30363e43d756ebc52c38586b91b90046b876"
+ integrity sha512-+CvOnmbSubmQFSA9dKz1BRiaSMV7rhexl3sngKqFyXSagoA3fBdJQ8oZWtRy2knXdpDXaBw44euz37DeJQ9asg==
dependencies:
- argparse "^2.0.1"
- entities "~2.1.0"
+ argparse "^1.0.7"
+ entities "~2.0.0"
linkify-it "^3.0.1"
mdurl "^1.0.1"
uc.micro "^1.0.5"
@@ -6417,10 +6405,10 @@ minipass@^3.0.0, minipass@^3.1.1:
dependencies:
yallist "^4.0.0"
-minizlib@^2.1.1:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
- integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==
+minizlib@^2.1.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.0.tgz#fd52c645301ef09a63a2c209697c294c6ce02cf3"
+ integrity sha512-EzTZN/fjSvifSX0SlqUERCN39o6T40AMarPbv0MrarSFtIITCBh7bi+dU8nxGFHuqs9jdIAeoYoKuQAAASsPPA==
dependencies:
minipass "^3.0.0"
yallist "^4.0.0"
@@ -6607,10 +6595,10 @@ node-forge@0.9.0:
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579"
integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==
-node-forge@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.0.0.tgz#a025e3beeeb90d9cee37dae34d25b968ec3e6f15"
- integrity sha512-ShkiiAlzSsgH1IwGlA0jybk9vQTIOLyJ9nBd0JTuP+nzADJFLY0NoDijM2zvD/JaezooGu3G2p2FNxOAK6459g==
+node-forge@^0.10.0:
+ version "0.10.0"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3"
+ integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
node-libs-browser@^2.2.1:
version "2.2.1"
@@ -6738,13 +6726,6 @@ nth-check@^1.0.2, nth-check@~1.0.1:
dependencies:
boolbase "~1.0.0"
-nth-check@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.1.tgz#2efe162f5c3da06a28959fbd3db75dbeea9f0fc2"
- integrity sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==
- dependencies:
- boolbase "^1.0.0"
-
num2fraction@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/num2fraction/-/num2fraction-1.2.2.tgz#6f682b6a027a4e9ddfa4564cd2589d1d4e669ede"
@@ -7192,9 +7173,9 @@ path-key@^3.0.0, path-key@^3.1.0, path-key@^3.1.1:
integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
path-parse@^1.0.6:
- version "1.0.7"
- resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
- integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
+ integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
path-to-regexp@0.1.7:
version "0.1.7"
@@ -7718,9 +7699,11 @@ pretty-time@^1.1.0:
integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==
prismjs@^1.13.0, prismjs@^1.20.0:
- version "1.27.0"
- resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.27.0.tgz#bb6ee3138a0b438a3653dd4d6ce0cc6510a45057"
- integrity sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==
+ version "1.23.0"
+ resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.23.0.tgz#d3b3967f7d72440690497652a9d40ff046067f33"
+ integrity sha512-c29LVsqOaLbBHuIbsTxaKENh1N2EQBOHaWv7gkHN4dgRbxSREqDnDbtFJYdpPauS4YCplMSNCABQ6Eeor69bAA==
+ optionalDependencies:
+ clipboard "^2.0.0"
private@^0.1.8:
version "0.1.8"
@@ -8439,9 +8422,9 @@ set-blocking@^2.0.0:
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
set-getter@^0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.1.tgz#a3110e1b461d31a9cfc8c5c9ee2e9737ad447102"
- integrity sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw==
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376"
+ integrity sha1-12nBgsnVpR9AkUXy+6guXoboA3Y=
dependencies:
to-object-path "^0.3.0"
@@ -8455,20 +8438,13 @@ set-value@^2.0.0, set-value@^2.0.1:
is-plain-object "^2.0.3"
split-string "^3.0.1"
-set-value@^3.0.0:
+set-value@^3.0.0, set-value@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/set-value/-/set-value-3.0.2.tgz#74e8ecd023c33d0f77199d415409a40f21e61b90"
integrity sha512-npjkVoz+ank0zjlV9F47Fdbjfj/PfXyVhZvGALWsyIYU/qrMzpi6avjKW3/7KeSU2Df3I46BrN1xOI1+6vW0hA==
dependencies:
is-plain-object "^2.0.4"
-set-value@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/set-value/-/set-value-4.0.1.tgz#bc23522ade2d52314ec3b5d6fb140f5cd3a88acf"
- integrity sha512-ayATicCYPVnlNpFmjq2/VmVwhoCQA9+13j8qWp044fmFE3IFphosPtRM+0CJ5xoIx5Uy52fCcwg3XeH2pHbbPQ==
- dependencies:
- is-plain-object "^2.0.4"
-
setimmediate@^1.0.4, setimmediate@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285"
@@ -9092,11 +9068,6 @@ strip-json-comments@~2.0.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
-strnum@^1.0.4:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db"
- integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==
-
stylehacks@^4.0.0, stylehacks@^4.0.3:
version "4.0.3"
resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-4.0.3.tgz#6718fcaf4d1e07d8a1318690881e8d96726a71d5"
@@ -9185,14 +9156,14 @@ tapable@^1.0.0, tapable@^1.1.3:
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
tar@^6.0.2:
- version "6.1.11"
- resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621"
- integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/tar/-/tar-6.0.2.tgz#5df17813468a6264ff14f766886c622b84ae2f39"
+ integrity sha512-Glo3jkRtPcvpDlAs/0+hozav78yoXKFr+c4wgw62NNMO3oo4AaJdCo21Uu7lcwr55h39W2XD1LMERc64wtbItg==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^3.0.0"
- minizlib "^2.1.1"
+ minizlib "^2.1.0"
mkdirp "^1.0.3"
yallist "^4.0.0"
@@ -9681,9 +9652,9 @@ url-parse-lax@^3.0.0:
prepend-http "^2.0.0"
url-parse@^1.4.3, url-parse@^1.4.7:
- version "1.5.9"
- resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.9.tgz#05ff26484a0b5e4040ac64dcee4177223d74675e"
- integrity sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.0.tgz#90aba6c902aeb2d80eac17b91131c27665d5d828"
+ integrity sha512-9iT6N4s93SMfzunOyDPe4vo4nLcSu1yq0IQK1gURmjm8tQNlM6loiuCRrKG1hHGXfB2EWd6H4cGi7tGdaygMFw==
dependencies:
querystringify "^2.1.1"
requires-port "^1.0.0"
diff --git a/frontend/.babelrc b/frontend/.babelrc
deleted file mode 100644
index 54071ecd..00000000
--- a/frontend/.babelrc
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "presets": [
- [
- "env",
- {
- "targets": {
- "browsers": [
- "Chrome >= 65"
- ]
- },
- "debug": false,
- "modules": false,
- "useBuiltIns": "usage"
- }
- ]
- ]
-}
\ No newline at end of file
diff --git a/frontend/.env.development b/frontend/.env.development
new file mode 100644
index 00000000..6d0e75ca
--- /dev/null
+++ b/frontend/.env.development
@@ -0,0 +1,4 @@
+PORT=9000
+IMAGE_INLINE_SIZE_LIMIT=20000
+REACT_APP_VERSION=development
+REACT_APP_COMMIT=local
\ No newline at end of file
diff --git a/frontend/.eslintrc b/frontend/.eslintrc
new file mode 100644
index 00000000..383f205c
--- /dev/null
+++ b/frontend/.eslintrc
@@ -0,0 +1,127 @@
+{
+ "parser": "@typescript-eslint/parser",
+ "plugins": [
+ "@typescript-eslint",
+ "prettier",
+ "import",
+ "react-hooks"
+ ],
+ "extends": [
+ "react-app",
+ "eslint-config-prettier",
+ "plugin:@typescript-eslint/recommended",
+ "plugin:prettier/recommended",
+ "prettier"
+ ],
+ "env": {
+ "jest": true,
+ "browser": true,
+ "commonjs": true
+ },
+ "rules": {
+ "prettier/prettier": [
+ "error"
+ ],
+ "@typescript-eslint/ban-ts-comment": [
+ "error",
+ {
+ "ts-ignore": "allow-with-description"
+ }
+ ],
+ "@typescript-eslint/consistent-type-definitions": [
+ "error",
+ "interface"
+ ],
+ "@typescript-eslint/explicit-function-return-type": [
+ "off"
+ ],
+ "@typescript-eslint/explicit-module-boundary-types": [
+ "off"
+ ],
+ "@typescript-eslint/explicit-member-accessibility": [
+ "off"
+ ],
+ "@typescript-eslint/no-empty-function": [
+ "off"
+ ],
+ "@typescript-eslint/no-explicit-any": [
+ "off"
+ ],
+ "@typescript-eslint/no-non-null-assertion": [
+ "off"
+ ],
+ "@typescript-eslint/naming-convention": [
+ "error",
+ {
+ "selector": "default",
+ "format": [
+ "camelCase",
+ "PascalCase",
+ "UPPER_CASE"
+ ],
+ "leadingUnderscore": "allow",
+ "trailingUnderscore": "allow"
+ }
+ ],
+ "react-hooks/rules-of-hooks": [
+ "error"
+ ],
+ "react-hooks/exhaustive-deps": [
+ "warn",
+ {
+ "additionalHooks": "useAction|useReduxAction"
+ }
+ ],
+ "react/jsx-curly-brace-presence": [
+ "warn",
+ {
+ "props": "never",
+ "children": "never",
+ }
+ ],
+ "no-restricted-globals": [
+ "off"
+ ],
+ "import/extensions": 0, // We let webpack handle resolving file extensions
+ "import/order": [
+ "error",
+ {
+ "alphabetize": {
+ "order": "asc",
+ "caseInsensitive": true
+ },
+ "newlines-between": "always",
+ "pathGroups": [
+ {
+ "pattern": "@(react)",
+ "group": "external",
+ "position": "before"
+ },
+ {
+ "pattern": "@/@(fixtures|jest)/**",
+ "group": "internal",
+ "position": "before"
+ },
+ {
+ "pattern": "@/**",
+ "group": "internal"
+ }
+ ],
+ "pathGroupsExcludedImportTypes": [
+ "builtin",
+ "internal"
+ ],
+ "groups": [
+ "builtin",
+ "external",
+ "internal",
+ [
+ "parent",
+ "sibling",
+ "index"
+ ]
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/frontend/.gitignore b/frontend/.gitignore
index c8f4b4f9..aa97ba4b 100644
--- a/frontend/.gitignore
+++ b/frontend/.gitignore
@@ -1,4 +1,4 @@
-dist
-node_modules
-webpack_stats.html
-yarn-error.log
+.eslintcache
+coverage
+junit.xml
+eslint.xml
diff --git a/backend/.prettierrc b/frontend/.prettierrc
similarity index 55%
rename from backend/.prettierrc
rename to frontend/.prettierrc
index fefbcfa6..9e6bec4a 100644
--- a/backend/.prettierrc
+++ b/frontend/.prettierrc
@@ -1,11 +1,11 @@
{
- "printWidth": 320,
- "tabWidth": 4,
+ "printWidth": 80,
+ "tabWidth": 2,
"useTabs": true,
"semi": true,
- "singleQuote": true,
+ "singleQuote": false,
"bracketSpacing": true,
- "jsxBracketSameLine": true,
+ "bracketSameLine": true,
"trailingComma": "all",
"proseWrap": "always"
}
diff --git a/frontend/README.md b/frontend/README.md
new file mode 100644
index 00000000..c0b8c7ec
--- /dev/null
+++ b/frontend/README.md
@@ -0,0 +1,20 @@
+# NPM Frontend
+
+The frontend package is a React based UI, bootstrapped with
+[Create React App](https://github.com/facebook/create-react-app) and written in
+Typescript.
+
+## React Modules
+
+- [Chakra UI](https://chakra-ui.com/)
+- [Formik](https://formik.org/)
+- [React Query](https://react-query.tanstack.com/)
+- [React Table](https://react-table.tanstack.com/)
+- [React Router](https://reactrouter.com/)
+- [Format.js](https://formatjs.io/docs/getting-started/installation/)
+- [Rooks](https://react-hooks.org/)
+
+Some light reading that inspired the concepts used in the UI:
+
+- [Using react-query with react-table](https://nafeu.medium.com/using-react-query-with-react-table-884158535424)
+- [Server side pagination using react-table and react-query](https://dev.to/elangobharathi/server-side-pagination-using-react-table-v7-and-react-query-v3-3lck)
diff --git a/frontend/check-locales.js b/frontend/check-locales.js
new file mode 100755
index 00000000..0e71ab40
--- /dev/null
+++ b/frontend/check-locales.js
@@ -0,0 +1,127 @@
+#!/usr/bin/env node
+
+// This file does a few things to ensure that the Locales are present and valid:
+// - Ensures that the name of the locale exists in the language list
+// - Ensures that each locale contains the translations used in the application
+// - Ensures that there are no unused translations in the locale files
+// - Also checks the error messages returned by the backend
+
+const allLocales = [
+ ["en", "en-US"],
+ ["de", "de-DE"],
+ ["fa", "fa-IR"],
+];
+
+const ignoreUnused = [/^capability\..*$/, /^host-type\..*$/, /^acmesh\..*$/];
+
+const { spawnSync } = require("child_process");
+const fs = require("fs");
+
+const tmp = require("tmp");
+
+// Parse backend errors
+const BACKEND_ERRORS_FILE = "../backend/internal/errors/errors.go";
+const BACKEND_ERRORS = [];
+try {
+ const backendErrorsContent = fs.readFileSync(BACKEND_ERRORS_FILE, "utf8");
+ const backendErrorsContentRes = [
+ ...backendErrorsContent.matchAll(/errors\.New\("([^"]+)"\)/g),
+ ];
+ backendErrorsContentRes.map((item) => {
+ BACKEND_ERRORS.push("error." + item[1]);
+ return null;
+ });
+} catch (err) {
+ console.log("\x1b[31m%s\x1b[0m", err);
+ process.exit(1);
+}
+
+// get all translations used in frontend code
+const tmpobj = tmp.fileSync({ postfix: ".json" });
+spawnSync("yarn", ["locale-extract", "--out-file", tmpobj.name]);
+
+const allLocalesInProject = require(tmpobj.name);
+
+// get list og language names and locales
+const langList = require("./src/locale/src/lang-list.json");
+
+// store a list of all validation errors
+const allErrors = [];
+
+const checkLangList = (fullCode) => {
+ const key = "locale-" + fullCode;
+ if (typeof langList[key] === "undefined") {
+ allErrors.push(
+ "ERROR: `" + key + "` language does not exist in lang-list.json",
+ );
+ }
+};
+
+const compareLocale = (locale) => {
+ const projectLocaleKeys = Object.keys(allLocalesInProject);
+ // Check that locale contains the items used in the codebase
+ projectLocaleKeys.map((key) => {
+ if (typeof locale.data[key] === "undefined") {
+ allErrors.push(
+ "ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
+ );
+ }
+ return null;
+ });
+ // Check that locale contains all error.* items
+ BACKEND_ERRORS.forEach((key) => {
+ if (typeof locale.data[key] === "undefined") {
+ allErrors.push(
+ "ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
+ );
+ }
+ return null;
+ });
+
+ // Check that locale does not contain items not used in the codebase
+ const localeKeys = Object.keys(locale.data);
+ localeKeys.map((key) => {
+ let ignored = false;
+ ignoreUnused.map((regex) => {
+ if (key.match(regex)) {
+ ignored = true;
+ }
+ return null;
+ });
+
+ if (!ignored && typeof allLocalesInProject[key] === "undefined") {
+ // ensure this key doesn't exist in the backend errors either
+ if (!BACKEND_ERRORS.includes(key)) {
+ allErrors.push(
+ "ERROR: `" + locale[0] + "` contains unused item: `" + key + "`",
+ );
+ }
+ }
+ return null;
+ });
+};
+
+// Local all locale data
+allLocales.map((locale, idx) => {
+ checkLangList(locale[1]);
+ allLocales[idx].data = require("./src/locale/src/" + locale[0] + ".json");
+ return null;
+});
+
+// Verify all locale data
+allLocales.map((locale) => {
+ compareLocale(locale);
+ return null;
+});
+
+if (allErrors.length) {
+ allErrors.map((err) => {
+ console.log("\x1b[31m%s\x1b[0m", err);
+ return null;
+ });
+
+ process.exit(1);
+}
+
+console.log("\x1b[32m%s\x1b[0m", "Locale check passed");
+process.exit(0);
diff --git a/frontend/fonts/feather b/frontend/fonts/feather
deleted file mode 120000
index 440203ba..00000000
--- a/frontend/fonts/feather
+++ /dev/null
@@ -1 +0,0 @@
-../node_modules/tabler-ui/dist/assets/fonts/feather
\ No newline at end of file
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff
deleted file mode 100644
index 96d8768e..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2
deleted file mode 100644
index e97a2218..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff
deleted file mode 100644
index 0829caef..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2
deleted file mode 100644
index 7c901cd8..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-700italic.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff
deleted file mode 100644
index 99652481..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2
deleted file mode 100644
index 343e5ba8..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-italic.woff2 and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff
deleted file mode 100644
index 92c3260e..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff and /dev/null differ
diff --git a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2 b/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2
deleted file mode 100644
index d552543b..00000000
Binary files a/frontend/fonts/source-sans-pro/source-sans-pro-v14-latin-ext_latin-regular.woff2 and /dev/null differ
diff --git a/frontend/globalSetup.js b/frontend/globalSetup.js
new file mode 100644
index 00000000..7ba92707
--- /dev/null
+++ b/frontend/globalSetup.js
@@ -0,0 +1,4 @@
+module.exports = async () => {
+ process.env.TZ = "Australia/Queensland";
+ process.env.IMAGE_INLINE_SIZE_LIMIT = "20000";
+};
diff --git a/frontend/html/index.ejs b/frontend/html/index.ejs
deleted file mode 100644
index ae08b012..00000000
--- a/frontend/html/index.ejs
+++ /dev/null
@@ -1,9 +0,0 @@
-<% var title = 'Nginx Proxy Manager' %>
-<%- include partials/header.ejs %>
-
-
-
-
-
-
-<%- include partials/footer.ejs %>
diff --git a/frontend/html/login.ejs b/frontend/html/login.ejs
deleted file mode 100644
index bc4b9a27..00000000
--- a/frontend/html/login.ejs
+++ /dev/null
@@ -1,9 +0,0 @@
-<% var title = 'Login – Nginx Proxy Manager' %>
-<%- include partials/header.ejs %>
-
-
-
-
-
-
-<%- include partials/footer.ejs %>
diff --git a/frontend/html/partials/footer.ejs b/frontend/html/partials/footer.ejs
deleted file mode 100644
index 7fb2bd61..00000000
--- a/frontend/html/partials/footer.ejs
+++ /dev/null
@@ -1,2 +0,0 @@
-