diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 000000000..2ff6c7e6d
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,8 @@
+# Ignore everything
+*
+
+# Only allow the following for docker build:
+!backend/
+!docker/
+!scripts/
+!test/
diff --git a/.gitignore b/.gitignore
index 08462849d..d3fde6d23 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,29 @@
+git.idea
+.env
.DS_Store
-.idea
._*
+*.code-workspace
+vendor
+bin/*
+backend/config.json
+backend/embed/assets
+backend/.task
+backend/coverage.out
+backend/coverage.html
+test/node_modules
+*/node_modules
+docs/.vuepress/dist
+frontend/build
+frontend/yarn-error.log
+frontend/.npmrc
+frontend/src/locale/lang
+test/cypress/fixtures/example.json
.vscode
-certbot-help.txt
+docker-build
+data
+dist
+backend/embed/acme.sh
+docker/dev/resolv.conf
+docker/dev/dnsrouter-config.json.tmp
+thunder-tests
+test/cypress/videos
diff --git a/.version b/.version
index b0e185b74..5efd7ac5b 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.9.18
+3.0.0a
diff --git a/Jenkinsfile b/Jenkinsfile
index 1b7446924..03f054d60 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,21 +1,36 @@
+import groovy.transform.Field
+
+@Field
+def buildxPushTags = ""
+
+def getVersion() {
+ ver = sh(script: 'cat .version', returnStdout: true)
+ return ver.trim()
+}
+
+def getCommit() {
+ ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
+ return ver.trim()
+}
+
pipeline {
agent {
label 'docker-multiarch'
}
options {
- buildDiscarder(logRotator(numToKeepStr: '5'))
+ buildDiscarder(logRotator(numToKeepStr: '10'))
disableConcurrentBuilds()
ansiColor('xterm')
}
environment {
- IMAGE = "nginx-proxy-manager"
+ DOCKER_ORG = 'jc21'
+ IMAGE = 'nginx-proxy-manager'
BUILD_VERSION = getVersion()
- MAJOR_VERSION = "2"
- BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('/', '-')}"
- COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
- COMPOSE_FILE = 'docker/docker-compose.ci.yml'
+ BUILD_COMMIT = getCommit()
+ MAJOR_VERSION = '3'
+ BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
+ BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_INTERACTIVE_NO_CLI = 1
- BUILDX_NAME = "${COMPOSE_PROJECT_NAME}"
}
stages {
stage('Environment') {
@@ -26,7 +41,9 @@ pipeline {
}
steps {
script {
- env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
+ buildxPushTags = "-t docker.io/${DOCKER_ORG}/${IMAGE}:${BUILD_VERSION} -t docker.io/${DOCKER_ORG}/${IMAGE}:${MAJOR_VERSION} -t docker.io/${DOCKER_ORG}/${IMAGE}:latest"
+ echo 'Building on Master is disabled!'
+ sh 'exit 1'
}
}
}
@@ -39,121 +56,146 @@ pipeline {
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
- env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
+ // buildxPushTags = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
+ buildxPushTags = "-t docker.io/${DOCKER_ORG}/${IMAGE}:v3"
+ }
+ }
+ }
+ }
+ }
+ stage('Build') {
+ parallel {
+ stage('Project') {
+ steps {
+ sh './scripts/ci/build-frontend'
+ sh './scripts/ci/test-backend'
+ // Temporarily disable building backend binaries
+ // sh './scripts/ci/build-backend'
+ // Build the docker image used for testing below
+ sh '''docker build --pull --no-cache \\
+ -t "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}" \\
+ -f docker/Dockerfile \\
+ --build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
+ --build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
+ --build-arg BUILD_VERSION="${BUILD_VERSION}" \\
+ .
+ '''
+ }
+ post {
+ success {
+ junit 'test/results/junit/*'
+ // archiveArtifacts allowEmptyArchive: false, artifacts: 'bin/*'
+ publishHTML([
+ allowMissing: false,
+ alwaysLinkToLastBuild: false,
+ keepAll: false,
+ reportDir: 'test/results/html-reports',
+ reportFiles: 'backend-coverage.html',
+ reportName: 'HTML Reports',
+ useWrapperFileDirectly: true
+ ])
}
}
}
- stage('Versions') {
+ stage('Docs') {
steps {
- sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
- sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
- sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
- sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
- sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
+ dir(path: 'docs') {
+ sh 'yarn install'
+ sh 'yarn build'
+ }
}
}
}
}
- stage('Frontend') {
- steps {
- sh './scripts/frontend-build'
+ stage('Test Sqlite') {
+ environment {
+ COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
+ COMPOSE_FILE = 'docker/docker-compose.ci.yml'
}
- }
- stage('Backend') {
- steps {
- echo 'Checking Syntax ...'
- sh 'docker pull nginxproxymanager/nginx-full:certbot-node'
- // See: https://github.com/yarnpkg/yarn/issues/3254
- sh '''docker run --rm \\
- -v "$(pwd)/backend:/app" \\
- -v "$(pwd)/global:/app/global" \\
- -w /app \\
- nginxproxymanager/nginx-full:certbot-node \\
- sh -c "yarn install && yarn eslint . && rm -rf node_modules"
- '''
-
- echo 'Docker Build ...'
- sh '''docker build --pull --no-cache --squash --compress \\
- -t "${IMAGE}:ci-${BUILD_NUMBER}" \\
- -f docker/Dockerfile \\
- --build-arg TARGETPLATFORM=linux/amd64 \\
- --build-arg BUILDPLATFORM=linux/amd64 \\
- --build-arg BUILD_VERSION="${BUILD_VERSION}" \\
- --build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
- --build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
- .
- '''
+ when {
+ not {
+ equals expected: 'UNSTABLE', actual: currentBuild.result
+ }
}
- }
- stage('Integration Tests Sqlite') {
steps {
- // Bring up a stack
- sh 'docker-compose up -d fullstack-sqlite'
- sh './scripts/wait-healthy $(docker-compose ps -q fullstack-sqlite) 120'
-
- // Run tests
- sh 'rm -rf test/results'
- sh 'docker-compose up cypress-sqlite'
- // Get results
- sh 'docker cp -L "$(docker-compose ps -q cypress-sqlite):/test/results" test/'
+ sh 'rm -rf ./test/results/junit/*'
+ sh './scripts/ci/fulltest-cypress'
+ // Adding this here as the schema needs to come from a running stack, but this will be used by docs later
+ sh 'docker-compose exec -T fullstack curl -s --output /temp-docs/api-schema.json "http://fullstack:81/api/schema"'
}
post {
always {
// Dumps to analyze later
- sh 'mkdir -p debug'
- sh 'docker-compose logs fullstack-sqlite | gzip > debug/docker_fullstack_sqlite.log.gz'
- sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
- // Cypress videos and screenshot artifacts
- dir(path: 'test/results') {
- archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
- }
+ sh 'mkdir -p debug/sqlite'
+ sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
+ sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
}
}
- stage('Integration Tests Mysql') {
+ stage('Test Mysql') {
+ environment {
+ COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
+ COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
+ }
+ when {
+ not {
+ equals expected: 'UNSTABLE', actual: currentBuild.result
+ }
+ }
steps {
- // Bring up a stack
- sh 'docker-compose up -d fullstack-mysql'
- sh './scripts/wait-healthy $(docker-compose ps -q fullstack-mysql) 120'
-
- // Run tests
- sh 'rm -rf test/results'
- sh 'docker-compose up cypress-mysql'
- // Get results
- sh 'docker cp -L "$(docker-compose ps -q cypress-mysql):/test/results" test/'
+ sh 'rm -rf ./test/results/junit/*'
+ sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
- sh 'mkdir -p debug'
- sh 'docker-compose logs fullstack-mysql | gzip > debug/docker_fullstack_mysql.log.gz'
- sh 'docker-compose logs db | gzip > debug/docker_db.log.gz'
- // Cypress videos and screenshot artifacts
- dir(path: 'test/results') {
- archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
- }
+ sh 'mkdir -p debug/mysql'
+ sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
+ sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
}
}
- stage('Docs') {
+ stage('Test Postgres') {
+ environment {
+ COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
+ COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
+ }
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
- dir(path: 'docs') {
- sh 'yarn install'
- sh 'yarn build'
- }
+ sh 'rm -rf ./test/results/junit/*'
+ sh './scripts/ci/fulltest-cypress'
+ }
+ post {
+ always {
+ // Dumps to analyze later
+ sh 'mkdir -p debug/postgres'
+ sh 'docker logs $(docker-compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q db-postgres) > debug/postgres/docker_db.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
+ sh 'docker logs $(docker-compose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
- dir(path: 'docs/.vuepress/dist') {
- sh 'tar -czf ../../docs.tgz *'
+ junit 'test/results/junit/*'
+ sh 'docker-compose down --remove-orphans --volumes -t 30 || true'
}
-
- archiveArtifacts(artifacts: 'docs/docs.tgz', allowEmptyArchive: false)
}
}
stage('MultiArch Build') {
@@ -164,90 +206,63 @@ pipeline {
}
steps {
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
- // Docker Login
- sh "docker login -u '${duser}' -p '${dpass}'"
- // Buildx with push from cache
- sh "./scripts/buildx --push ${BUILDX_PUSH_TAGS}"
+ sh 'docker login -u "${duser}" -p "${dpass}"'
+ sh "./scripts/buildx --push ${buildxPushTags}"
+ // sh './scripts/buildx -o type=local,dest=docker-build'
}
}
}
- stage('Docs Deploy') {
- when {
- allOf {
- branch 'master'
- not {
- equals expected: 'UNSTABLE', actual: currentBuild.result
+ stage('Docs / Comment') {
+ parallel {
+ stage('Docs Job') {
+ when {
+ allOf {
+ branch pattern: "^(develop|master|v3)\$", comparator: "REGEXP"
+ not {
+ equals expected: 'UNSTABLE', actual: currentBuild.result
+ }
+ }
}
- }
- }
- steps {
- withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'npm-s3-docs', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
- sh """docker run --rm \\
- --name \${COMPOSE_PROJECT_NAME}-docs-upload \\
- -e S3_BUCKET=jc21-npm-site \\
- -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
- -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
- -v \$(pwd):/app \\
- -w /app \\
- jc21/ci-tools \\
- scripts/docs-upload /app/docs/.vuepress/dist/
- """
-
- sh """docker run --rm \\
- --name \${COMPOSE_PROJECT_NAME}-docs-invalidate \\
- -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \\
- -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \\
- jc21/ci-tools \\
- aws cloudfront create-invalidation --distribution-id EN1G6DEWZUTDT --paths '/*'
- """
- }
- }
- }
- stage('PR Comment') {
- when {
- allOf {
- changeRequest()
- not {
- equals expected: 'UNSTABLE', actual: currentBuild.result
+ steps {
+ build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
}
}
- }
- steps {
- script {
- def comment = pullRequest.comment("This is an automated message from CI:\n\nDocker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.")
+ stage('PR Comment') {
+ when {
+ allOf {
+ changeRequest()
+ not {
+ equals expected: 'UNSTABLE', actual: currentBuild.result
+ }
+ }
+ }
+ steps {
+ script {
+ npmGithubPrComment("Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`\n\n**Note:** ensure you backup your NPM instance before testing this PR image! Especially if this PR contains database changes.", true)
+ }
+ }
}
}
}
}
post {
always {
- sh 'docker-compose down --rmi all --remove-orphans --volumes -t 30'
- sh 'echo Reverting ownership'
- sh 'docker run --rm -v $(pwd):/data jc21/ci-tools chown -R $(id -u):$(id -g) /data'
- }
- success {
- juxtapose event: 'success'
- sh 'figlet "SUCCESS"'
+ sh './scripts/ci/build-cleanup'
+ echo 'Reverting ownership'
+ sh 'docker run --rm -v $(pwd):/data jc21/gotools:latest chown -R "$(id -u):$(id -g)" /data'
+ printResult()
}
failure {
- archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
- juxtapose event: 'failure'
- sh 'figlet "FAILURE"'
+ archiveArtifacts(artifacts: 'debug/**/*', allowEmptyArchive: true)
+ dir(path: 'test') {
+ archiveArtifacts allowEmptyArchive: true, artifacts: 'results/**/*', excludes: '**/*.xml'
+ }
}
unstable {
- archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
- juxtapose event: 'unstable'
- sh 'figlet "UNSTABLE"'
+ archiveArtifacts(artifacts: 'debug/**/*', allowEmptyArchive: true)
+ dir(path: 'test') {
+ archiveArtifacts allowEmptyArchive: true, artifacts: 'results/**/*', excludes: '**/*.xml'
+ }
}
}
}
-
-def getVersion() {
- ver = sh(script: 'cat .version', returnStdout: true)
- return ver.trim()
-}
-
-def getCommit() {
- ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
- return ver.trim()
-}
diff --git a/README.md b/README.md
index a97d3ba87..a66ba16f7 100644
--- a/README.md
+++ b/README.md
@@ -1,19 +1,13 @@
-
+
-
-
-
-
-
-
This project comes as a pre-built docker image that enables you to easily forward to your websites
@@ -35,7 +29,7 @@ so that the barrier for entry here is low.
## Features
-- Beautiful and Secure Admin Interface based on [Tabler](https://tabler.github.io/)
+- Beautiful and Secure Admin Interface based on [Chakra UI](https://chakra-ui.com/)
- Easily create forwarding domains, redirections, streams and 404 hosts without knowing anything about Nginx
- Free SSL using Let's Encrypt or provide your own custom SSL certificates
- Access Lists and basic HTTP Authentication for your hosts
@@ -52,7 +46,8 @@ I won't go in to too much detail here but here are the basics for someone new to
3. Configure your domain name details to point to your home, either with a static ip or a service like DuckDNS or [Amazon Route53](https://github.com/jc21/route53-ddns)
4. Use the Nginx Proxy Manager as your gateway to forward to your other web based services
-## Quick Setup
+
+## Quickest Setup
1. Install Docker and Docker-Compose
@@ -65,7 +60,7 @@ I won't go in to too much detail here but here are the basics for someone new to
version: '3'
services:
app:
- image: 'jc21/nginx-proxy-manager:latest'
+ image: 'jc21/nginx-proxy-manager:v3'
restart: unless-stopped
ports:
- '80:80'
@@ -73,460 +68,34 @@ services:
- '443:443'
volumes:
- ./data:/data
- - ./letsencrypt:/etc/letsencrypt
```
3. Bring up your stack by running
```bash
docker-compose up -d
+
+# If using docker-compose-plugin
+docker compose up -d
```
4. Log in to the Admin UI
When your docker container is running, connect to it on port `81` for the admin interface.
-Sometimes this can take a little bit because of the entropy of keys.
[http://127.0.0.1:81](http://127.0.0.1:81)
-Default Admin User:
-```
-Email: admin@example.com
-Password: changeme
-```
+## Contributors
-Immediately after logging in with this default user you will be asked to modify your details and change your password.
+Special thanks to [all of our contributors](https://github.com/NginxProxyManager/nginx-proxy-manager/graphs/contributors).
+## Getting Support
-## Contributors
+1. [Found a bug?](https://github.com/NginxProxyManager/nginx-proxy-manager/issues)
+2. [Discussions](https://github.com/NginxProxyManager/nginx-proxy-manager/discussions)
+3. [Development Gitter](https://gitter.im/nginx-proxy-manager/community)
+4. [Reddit](https://reddit.com/r/nginxproxymanager)
-Special thanks to the following contributors:
+## Become a Contributor
-
-
-
-
-
+A guide to setting up your own development environment [is found here](DEV-README.md).
diff --git a/backend/.editorconfig b/backend/.editorconfig
new file mode 100644
index 000000000..8b96428f4
--- /dev/null
+++ b/backend/.editorconfig
@@ -0,0 +1,8 @@
+root = true
+
+[*]
+indent_style = tab
+indent_size = 4
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = false
diff --git a/backend/.eslintrc.json b/backend/.eslintrc.json
deleted file mode 100644
index 6d6172a48..000000000
--- a/backend/.eslintrc.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "env": {
- "node": true,
- "es6": true
- },
- "extends": [
- "eslint:recommended"
- ],
- "globals": {
- "Atomics": "readonly",
- "SharedArrayBuffer": "readonly"
- },
- "parserOptions": {
- "ecmaVersion": 2018,
- "sourceType": "module"
- },
- "plugins": [
- "align-assignments"
- ],
- "rules": {
- "arrow-parens": [
- "error",
- "always"
- ],
- "indent": [
- "error",
- "tab"
- ],
- "linebreak-style": [
- "error",
- "unix"
- ],
- "quotes": [
- "error",
- "single"
- ],
- "semi": [
- "error",
- "always"
- ],
- "key-spacing": [
- "error",
- {
- "align": "value"
- }
- ],
- "comma-spacing": [
- "error",
- {
- "before": false,
- "after": true
- }
- ],
- "func-call-spacing": [
- "error",
- "never"
- ],
- "keyword-spacing": [
- "error",
- {
- "before": true
- }
- ],
- "no-irregular-whitespace": "error",
- "no-unused-expressions": 0,
- "align-assignments/align-assignments": [
- 2,
- {
- "requiresOnly": false
- }
- ]
- }
-}
\ No newline at end of file
diff --git a/backend/.gitignore b/backend/.gitignore
deleted file mode 100644
index 149080b91..000000000
--- a/backend/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-config/development.json
-data/*
-yarn-error.log
-tmp
-certbot.log
-node_modules
-core.*
-
diff --git a/backend/.golangci.yml b/backend/.golangci.yml
new file mode 100644
index 000000000..7f0d08570
--- /dev/null
+++ b/backend/.golangci.yml
@@ -0,0 +1,166 @@
+---
+linters:
+ enable:
+ # Prevents against memory leaks in production caused by not closing
+ # file handle
+ - bodyclose
+ # Detects cloned code. DRY is good programming practice. Can cause issues
+ # with testing code where simplicity is preferred over duplication.
+ # Disabled for test code.
+ # - dupl
+ # Detects unchecked errors in go programs. These unchecked errors can be
+ # critical bugs in some cases.
+ - errcheck
+ # Simplifies go code.
+ - gosimple
+ # Controls Go package import order and makes it always deterministic.
+ - gci
+ # Reports suspicious constructs, maintained by goteam. e.g. Printf unused
+ # params not caught at compile time.
+ - govet
+ # Detect security issues with gocode. Use of secrets in code or obsolete
+ # security algorithms. It's imaged heuristic methods are used in finding
+ # problems. If issues with rules are found particular rules can be disabled
+ # as required. Could possibility cause issues with testing.
+ # Disabled for test code.
+ - gosec
+ # Detect repeated strings that could be replaced by a constant
+ - goconst
+ # Misc linters missing from other projects. Grouped into 3 categories
+ # diagnostics, style and performance
+ - gocritic
+ # Limits code cyclomatic complexity
+ - gocyclo
+ # Detects if code needs to be gofmt'd
+ - gofmt
+ # Detects unused go package imports
+ - goimports
+ # Detects style mistakes not correctness. Golint is meant to carry out the
+ # stylistic conventions put forth in Effective Go and CodeReviewComments.
+ # golint has false positives and false negatives and can be tweaked.
+ - revive
+ # Detects ineffectual assignments in code
+ - ineffassign
+ # Reports long lines
+ # - lll
+ # Detect commonly misspelled english words in comments
+ - misspell
+ # Detect naked returns on non-trivial functions, and conform with
+ # Go CodeReviewComments
+ - nakedret
+ # Detect slice allocations that can be preallocated
+ - prealloc
+ # Misc collection of static analysis tools
+ - staticcheck
+ # Detects unused struct fields
+ # - structcheck
+ # Parses and typechecks the code like the go compiler
+ - typecheck
+ # Detects unused constants, variables, functions and types
+ - unused
+ # Remove unnecessary type conversions
+ - unconvert
+ # Remove unnecessary(unused) function parameters
+ - unparam
+linters-settings:
+ errcheck:
+ exclude-functions:
+ - fmt.Fprint
+ - fmt.Fprintf
+ gci:
+ sections:
+ - standard # Standard section: captures all standard packages.
+ - localmodule # Local module section: contains all local packages.
+ # - prefix(gogs.jc21.com) # Prefixed gerrit.lan packages (jumgo).
+ - default # Everything else (github.com, golang.org, etc).
+ - blank # Blank section: contains all blank imports.
+ custom-order: true
+ goconst:
+ # minimal length of string constant
+ # default: 3
+ min-len: 2
+ # minimum number of occurrences of string constant
+ # default: 3
+ min-occurences: 2
+ revive:
+ enable-all-rules: true
+ rules:
+ - name: unchecked-type-assertion
+ disabled: true
+ # handled by goconst
+ - name: add-constant
+ disabled: true
+ # cant limit this arbitrarily
+ - name: argument-limit
+ disabled: true
+ # handled by gocyclo
+ - name: cognitive-complexity
+ disabled: true
+ # false positive for Exported vs non-exported functions of the same name
+ - name: confusing-naming
+ disabled: true
+ # false positives for "" - which is the nil value of a string (also 0)
+ - name: confusing-results
+ disabled: true
+ # handled by gocyclo
+ - name: cyclomatic
+ disabled: true
+ # have comments on exported functions but not on vars/types/constants
+ - name: exported
+ arguments:
+ - "disableChecksOnConstants"
+ - "disableChecksOnTypes"
+ - "disableChecksOnVariables"
+ # false positives on bool params
+ - name: flag-parameter
+ disabled: true
+ # extreme verticalization can happen
+ - name: function-length
+ disabled: true
+ # can false positive for non-getters
+ - name: get-return
+ disabled: true
+ # only allows lowercase names
+ - name: import-alias-naming
+ disabled: true
+ # handled by lll
+ - name: line-length-limit
+ disabled: true
+ # don't want to arbitrarily limit this
+ # many places have specific model.go files to contain all structs
+ - name: max-public-structs
+ disabled: true
+ # disable package-comments
+ - name: package-comments
+ disabled: true
+ # this is handled by errcheck
+ - name: unhandled-error
+ disabled: true
+ - name: function-result-limit
+ disabled: true
+issues:
+ # Maximum count of issues with the same text. Set to 0 to disable. Default
+ # is 3. We have chosen an arbitrary value that works based on practical usage.
+ max-same: 20
+ # See cmdline flag documentation for more info about default excludes
+ # --exclude-use-default. Nothing is excluded by default
+ exclude-use-default: false
+ # Excluding configuration per-path, per-linter, per-text and per-source
+ exclude-rules:
+ # Exclude some linters from running on tests files.
+ # TODO: Add examples why this is good
+ - path: _test\.go
+ linters:
+ # Tests should be simple? Add example why this is good?
+ - gocyclo
+ # Error checking adds verbosity and complexity for minimal value
+ - errcheck
+ # Table test encourage duplication in defining the table tests.
+ - dupl
+ # Hard coded example tokens, SQL injection and other bad practices may
+ # want to be tested
+ - gosec
+ # Test data can long
+ # - lll
+run:
+ go: '1.23'
diff --git a/backend/.prettierrc b/backend/.prettierrc
deleted file mode 100644
index fefbcfa6d..000000000
--- a/backend/.prettierrc
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "printWidth": 320,
- "tabWidth": 4,
- "useTabs": true,
- "semi": true,
- "singleQuote": true,
- "bracketSpacing": true,
- "jsxBracketSameLine": true,
- "trailingComma": "all",
- "proseWrap": "always"
-}
diff --git a/backend/.testcoverage.yml b/backend/.testcoverage.yml
new file mode 100644
index 000000000..b52eaeffe
--- /dev/null
+++ b/backend/.testcoverage.yml
@@ -0,0 +1,21 @@
+---
+
+# (mandatory)
+# Path to coverprofile file (output of `go test -coverprofile` command).
+profile: coverage.out
+
+# (optional; but recommended to set)
+# When specified reported file paths will not contain local prefix in the output
+local-prefix: "npm"
+
+# Holds coverage thresholds percentages, values should be in range [0-100]
+threshold:
+ # (optional; default 0)
+ # The minimum coverage that each file should have
+ # file: 70
+ # (optional; default 0)
+ # The minimum coverage that each package should have
+ # package: 30
+ # (optional; default 0)
+ # The minimum total coverage project should have
+ total: 30
diff --git a/backend/.vscode/settings.json b/backend/.vscode/settings.json
deleted file mode 100644
index 4e540ab30..000000000
--- a/backend/.vscode/settings.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "editor.insertSpaces": false,
- "editor.formatOnSave": true,
- "files.trimTrailingWhitespace": true,
- "editor.codeActionsOnSave": {
- "source.fixAll.eslint": true
- }
-}
\ No newline at end of file
diff --git a/backend/README.md b/backend/README.md
new file mode 100644
index 000000000..912f00690
--- /dev/null
+++ b/backend/README.md
@@ -0,0 +1,6 @@
+# Backend
+
+## Guides and materials
+
+- [Nginx Proxy Protocol](https://docs.nginx.com/nginx/admin-guide/load-balancer/using-proxy-protocol/)
+-
diff --git a/backend/Taskfile.yml b/backend/Taskfile.yml
new file mode 100644
index 000000000..512046e38
--- /dev/null
+++ b/backend/Taskfile.yml
@@ -0,0 +1,70 @@
+version: "3"
+
+tasks:
+ default:
+ cmds:
+ - task: run
+
+ run:
+ desc: Build and run
+ sources:
+ - internal/**/*.go
+ - cmd/**/*.go
+ - ../frontend/src/locale/src/*.json
+ cmds:
+ - task: locale
+ - task: build
+ force: true
+ - cmd: echo -e "==> Running..."
+ silent: true
+ - cmd: ../dist/bin/server
+ ignore_error: true
+ silent: true
+ env:
+ LOG_LEVEL: debug
+
+ build:
+ desc: Build the server
+ cmds:
+ - cmd: echo -e "==> Building..."
+ silent: true
+ - cmd: rm -f dist/bin/*
+ silent: true
+ - cmd: go build -tags 'json1' -buildvcs=false -ldflags="-X main.commit={{.GIT_COMMIT}} -X main.version={{.VERSION}}" -o ../dist/bin/server ./cmd/server/main.go
+ silent: true
+ - cmd: go build -buildvcs=false -ldflags="-X main.commit={{.GIT_COMMIT}} -X main.version={{.VERSION}}" -o ../dist/bin/ipranges ./cmd/ipranges/main.go
+ silent: true
+ - cmd: rm -f /etc/nginx/conf.d/include/ipranges.conf && /app/dist/bin/ipranges > /etc/nginx/conf.d/include/ipranges.conf
+ - task: lint
+ vars:
+ GIT_COMMIT:
+ sh: git log -n 1 --format=%h
+ VERSION:
+ sh: cat ../.version
+
+ lint:
+ desc: Linting
+ cmds:
+ - cmd: echo -e "==> Linting..."
+ silent: true
+ - cmd: bash scripts/lint.sh
+ silent: true
+
+ test:
+ desc: Testing
+ cmds:
+ - cmd: echo -e "==> Testing..."
+ silent: true
+ - cmd: bash scripts/test.sh
+ silent: true
+
+ locale:
+ desc: Locale
+ dir: /app/frontend
+ cmds:
+ - cmd: yarn locale-compile
+ silent: true
+ ignore_error: true
+ - cmd: chown -R "$PUID:$PGID" src/locale/lang
+ silent: true
+ ignore_error: true
diff --git a/backend/app.js b/backend/app.js
deleted file mode 100644
index ca6d6fbae..000000000
--- a/backend/app.js
+++ /dev/null
@@ -1,89 +0,0 @@
-const express = require('express');
-const bodyParser = require('body-parser');
-const fileUpload = require('express-fileupload');
-const compression = require('compression');
-const log = require('./logger').express;
-
-/**
- * App
- */
-const app = express();
-app.use(fileUpload());
-app.use(bodyParser.json());
-app.use(bodyParser.urlencoded({extended: true}));
-
-// Gzip
-app.use(compression());
-
-/**
- * General Logging, BEFORE routes
- */
-
-app.disable('x-powered-by');
-app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
-app.enable('strict routing');
-
-// pretty print JSON when not live
-if (process.env.NODE_ENV !== 'production') {
- app.set('json spaces', 2);
-}
-
-// CORS for everything
-app.use(require('./lib/express/cors'));
-
-// General security/cache related headers + server header
-app.use(function (req, res, next) {
- let x_frame_options = 'DENY';
-
- if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
- x_frame_options = process.env.X_FRAME_OPTIONS;
- }
-
- res.set({
- 'X-XSS-Protection': '1; mode=block',
- 'X-Content-Type-Options': 'nosniff',
- 'X-Frame-Options': x_frame_options,
- 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
- Pragma: 'no-cache',
- Expires: 0
- });
- next();
-});
-
-app.use(require('./lib/express/jwt')());
-app.use('/', require('./routes/api/main'));
-
-// production error handler
-// no stacktraces leaked to user
-// eslint-disable-next-line
-app.use(function (err, req, res, next) {
-
- let payload = {
- error: {
- code: err.status,
- message: err.public ? err.message : 'Internal Error'
- }
- };
-
- if (process.env.NODE_ENV === 'development' || (req.baseUrl + req.path).includes('nginx/certificates')) {
- payload.debug = {
- stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
- previous: err.previous
- };
- }
-
- // Not every error is worth logging - but this is good for now until it gets annoying.
- if (typeof err.stack !== 'undefined' && err.stack) {
- if (process.env.NODE_ENV === 'development' || process.env.DEBUG) {
- log.debug(err.stack);
- } else if (typeof err.public == 'undefined' || !err.public) {
- log.warn(err.message);
- }
- }
-
- res
- .status(err.status || 500)
- .send(payload);
-});
-
-module.exports = app;
diff --git a/backend/cmd/ipranges/main.go b/backend/cmd/ipranges/main.go
new file mode 100644
index 000000000..7aaceccfd
--- /dev/null
+++ b/backend/cmd/ipranges/main.go
@@ -0,0 +1,125 @@
+package main
+
+import (
+ "bufio"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+
+ "npm/internal/config"
+ "npm/internal/model"
+
+ "github.com/rotisserie/eris"
+)
+
+var commit string
+var version string
+
+var cloudfrontURL = "https://ip-ranges.amazonaws.com/ip-ranges.json"
+var cloudflare4URL = "https://www.cloudflare.com/ips-v4"
+var cloudflare6URL = "https://www.cloudflare.com/ips-v6"
+
+func main() {
+ config.InitArgs(&version, &commit)
+ if err := config.InitIPRanges(&version, &commit); err != nil {
+ fmt.Printf("# Config ERROR: %v\n", err)
+ os.Exit(1)
+ }
+
+ exitCode := 0
+
+ // Cloudfront
+ fmt.Printf("# Cloudfront Ranges from: %s\n", cloudfrontURL)
+ if ranges, err := parseCloudfront(); err == nil {
+ for _, item := range ranges {
+ fmt.Printf("set_real_ip_from %s;\n", item)
+ }
+ } else {
+ fmt.Printf("# ERROR: %v\n", err)
+ }
+
+ // Cloudflare ipv4
+ if !config.Configuration.DisableIPV4 {
+ fmt.Printf("\n# Cloudflare Ranges from: %s\n", cloudflare4URL)
+ if ranges, err := parseCloudflare(cloudflare4URL); err == nil {
+ for _, item := range ranges {
+ fmt.Printf("set_real_ip_from %s;\n", item)
+ }
+ } else {
+ fmt.Printf("# ERROR: %v\n", err)
+ }
+ }
+
+ // Cloudflare ipv6
+ if !config.Configuration.DisableIPV6 {
+ fmt.Printf("\n# Cloudflare Ranges from: %s\n", cloudflare6URL)
+ if ranges, err := parseCloudflare(cloudflare6URL); err == nil {
+ for _, item := range ranges {
+ fmt.Printf("set_real_ip_from %s;\n", item)
+ }
+ } else {
+ fmt.Printf("# ERROR: %v\n", err)
+ }
+ }
+
+ // Done
+ os.Exit(exitCode)
+}
+
+func parseCloudfront() ([]string, error) {
+ // nolint: gosec
+ resp, err := http.Get(cloudfrontURL)
+ if err != nil {
+ return nil, eris.Wrapf(err, "Failed to download Cloudfront IP Ranges from %s", cloudfrontURL)
+ }
+
+ // nolint: errcheck, gosec
+ defer resp.Body.Close()
+ body, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return nil, eris.Wrapf(err, "Failed to read Cloudfront IP Ranges body")
+ }
+
+ var result model.CloudfrontIPRanges
+ if err := json.Unmarshal(body, &result); err != nil {
+ return nil, eris.Wrapf(err, "Failed to unmarshal Cloudfront IP Ranges file")
+ }
+
+ ranges := make([]string, 0)
+ if !config.Configuration.DisableIPV4 {
+ for _, item := range result.IPV4Prefixes {
+ ranges = append(ranges, item.Value)
+ }
+ }
+ if !config.Configuration.DisableIPV6 {
+ for _, item := range result.IPV6Prefixes {
+ ranges = append(ranges, item.Value)
+ }
+ }
+
+ return ranges, nil
+}
+
+func parseCloudflare(url string) ([]string, error) {
+ // nolint: gosec
+ resp, err := http.Get(url)
+ if err != nil {
+ return nil, eris.Wrapf(err, "Failed to download Cloudflare IP Ranges from %s", url)
+ }
+
+ // nolint: errcheck, gosec
+ defer resp.Body.Close()
+
+ scanner := bufio.NewScanner(resp.Body)
+ scanner.Split(bufio.ScanLines)
+
+ ranges := make([]string, 0)
+ for scanner.Scan() {
+ if scanner.Text() != "" {
+ ranges = append(ranges, scanner.Text())
+ }
+ }
+ return ranges, nil
+}
diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go
new file mode 100644
index 000000000..7f6e93a02
--- /dev/null
+++ b/backend/cmd/server/main.go
@@ -0,0 +1,87 @@
+package main
+
+import (
+ "os"
+ "os/signal"
+ "syscall"
+
+ "npm/internal/api"
+ "npm/internal/config"
+ "npm/internal/database"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ "npm/internal/jobqueue"
+ "npm/internal/jwt"
+ "npm/internal/logger"
+
+ // properly respect available cpu cores
+ _ "go.uber.org/automaxprocs"
+)
+
+var commit string
+var version string
+
+func main() {
+ config.InitArgs(&version, &commit)
+ config.Init(&version, &commit)
+ config.CreateDataFolders()
+ logger.Info("Build Version: %s (%s)", version, commit)
+
+ database.Migrate(func() {
+ if err := jwt.LoadKeys(); err != nil {
+ logger.Error("KeysError", err)
+ os.Exit(1)
+ }
+
+ checkSetup()
+
+ // Internal Job Queue
+ jobqueue.Start()
+ certificate.AddPendingJobs()
+ host.AddPendingJobs()
+
+ // Http server
+ api.StartServer()
+ irqchan := make(chan os.Signal, 1)
+ signal.Notify(irqchan, syscall.SIGINT, syscall.SIGTERM)
+
+ for irq := range irqchan {
+ if irq == syscall.SIGINT || irq == syscall.SIGTERM {
+ logger.Info("Got ", irq, " shutting server down ...")
+ // Close db
+ sqlDB, _ := database.GetDB().DB()
+ err := sqlDB.Close()
+ if err != nil {
+ logger.Error("DatabaseCloseError", err)
+ }
+ // nolint
+ jobqueue.Shutdown()
+ break
+ }
+ }
+ })
+}
+
+// checkSetup Quick check by counting the number of users in the database
+func checkSetup() {
+ db := database.GetDB()
+ var count int64
+
+ if db != nil {
+ db.Model(&user.Model{}).
+ Where("is_disabled = ?", false).
+ Where("is_system = ?", false).
+ Count(&count)
+
+ if count == 0 {
+ logger.Warn("No users found, starting in Setup Mode")
+ } else {
+ config.IsSetup = true
+ logger.Info("Application is setup")
+ }
+ } else {
+ logger.Error("DatabaseError", errors.ErrDatabaseUnavailable)
+ }
+}
diff --git a/backend/config/README.md b/backend/config/README.md
deleted file mode 100644
index 26268a116..000000000
--- a/backend/config/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-These files are use in development and are not deployed as part of the final product.
-
\ No newline at end of file
diff --git a/backend/config/default.json b/backend/config/default.json
deleted file mode 100644
index 64ab577c8..000000000
--- a/backend/config/default.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "database": {
- "engine": "mysql",
- "host": "db",
- "name": "npm",
- "user": "npm",
- "password": "npm",
- "port": 3306
- }
-}
diff --git a/backend/config/sqlite-test-db.json b/backend/config/sqlite-test-db.json
deleted file mode 100644
index ad5488651..000000000
--- a/backend/config/sqlite-test-db.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
- "database": {
- "engine": "knex-native",
- "knex": {
- "client": "sqlite3",
- "connection": {
- "filename": "/app/config/mydb.sqlite"
- },
- "pool": {
- "min": 0,
- "max": 1,
- "createTimeoutMillis": 3000,
- "acquireTimeoutMillis": 30000,
- "idleTimeoutMillis": 30000,
- "reapIntervalMillis": 1000,
- "createRetryIntervalMillis": 100,
- "propagateCreateError": false
- },
- "migrations": {
- "tableName": "migrations",
- "stub": "src/backend/lib/migrate_template.js",
- "directory": "src/backend/migrations"
- }
- }
- }
-}
diff --git a/backend/db.js b/backend/db.js
deleted file mode 100644
index ce5338f01..000000000
--- a/backend/db.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const config = require('config');
-
-if (!config.has('database')) {
- throw new Error('Database config does not exist! Please read the instructions: https://github.com/jc21/nginx-proxy-manager/blob/master/doc/INSTALL.md');
-}
-
-function generateDbConfig() {
- if (config.database.engine === 'knex-native') {
- return config.database.knex;
- } else
- return {
- client: config.database.engine,
- connection: {
- host: config.database.host,
- user: config.database.user,
- password: config.database.password,
- database: config.database.name,
- port: config.database.port
- },
- migrations: {
- tableName: 'migrations'
- }
- };
-}
-
-
-let data = generateDbConfig();
-
-if (typeof config.database.version !== 'undefined') {
- data.version = config.database.version;
-}
-
-module.exports = require('knex')(data);
diff --git a/backend/doc/api.swagger.json b/backend/doc/api.swagger.json
deleted file mode 100644
index 06c025648..000000000
--- a/backend/doc/api.swagger.json
+++ /dev/null
@@ -1,1254 +0,0 @@
-{
- "openapi": "3.0.0",
- "info": {
- "title": "Nginx Proxy Manager API",
- "version": "2.x.x"
- },
- "servers": [
- {
- "url": "http://127.0.0.1:81/api"
- }
- ],
- "paths": {
- "/": {
- "get": {
- "operationId": "health",
- "summary": "Returns the API health status",
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "status": "OK",
- "version": {
- "major": 2,
- "minor": 1,
- "revision": 0
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HealthObject"
- }
- }
- }
- }
- }
- }
- },
- "/schema": {
- "get": {
- "operationId": "schema",
- "responses": {
- "200": {
- "description": "200 response"
- }
- },
- "summary": "Returns this swagger API schema"
- }
- },
- "/tokens": {
- "get": {
- "operationId": "refreshToken",
- "summary": "Refresh your access token",
- "tags": [
- "Tokens"
- ],
- "security": [
- {
- "BearerAuth": [
- "tokens"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "expires": 1566540510,
- "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/TokenObject"
- }
- }
- }
- }
- }
- },
- "post": {
- "operationId": "requestToken",
- "parameters": [
- {
- "description": "Credentials Payload",
- "in": "body",
- "name": "credentials",
- "required": true,
- "schema": {
- "additionalProperties": false,
- "properties": {
- "identity": {
- "minLength": 1,
- "type": "string"
- },
- "scope": {
- "minLength": 1,
- "type": "string",
- "enum": [
- "user"
- ]
- },
- "secret": {
- "minLength": 1,
- "type": "string"
- }
- },
- "required": [
- "identity",
- "secret"
- ],
- "type": "object"
- }
- }
- ],
- "responses": {
- "200": {
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "result": {
- "expires": 1566540510,
- "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/TokenObject"
- }
- }
- },
- "description": "200 response"
- }
- },
- "summary": "Request a new access token from credentials",
- "tags": [
- "Tokens"
- ]
- }
- },
- "/settings": {
- "get": {
- "operationId": "getSettings",
- "summary": "Get all settings",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": [
- {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- ]
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingsList"
- }
- }
- }
- }
- }
- }
- },
- "/settings/{settingID}": {
- "get": {
- "operationId": "getSetting",
- "summary": "Get a setting",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "settingID",
- "schema": {
- "type": "string",
- "minLength": 1
- },
- "required": true,
- "description": "Setting ID",
- "example": "default-site"
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- }
- }
- }
- },
- "put": {
- "operationId": "updateSetting",
- "summary": "Update a setting",
- "tags": [
- "Settings"
- ],
- "security": [
- {
- "BearerAuth": [
- "settings"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "settingID",
- "schema": {
- "type": "string",
- "minLength": 1
- },
- "required": true,
- "description": "Setting ID",
- "example": "default-site"
- },
- {
- "in": "body",
- "name": "setting",
- "description": "Setting Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": "default-site",
- "name": "Default Site",
- "description": "What to show when Nginx is hit with an unknown Host",
- "value": "congratulations",
- "meta": {}
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/SettingObject"
- }
- }
- }
- }
- }
- }
- },
- "/users": {
- "get": {
- "operationId": "getUsers",
- "summary": "Get all users",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "query",
- "name": "expand",
- "description": "Expansions",
- "schema": {
- "type": "string",
- "enum": [
- "permissions"
- ]
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": [
- {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- ]
- },
- "withPermissions": {
- "value": [
- {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ],
- "permissions": {
- "visibility": "all",
- "proxy_hosts": "manage",
- "redirection_hosts": "manage",
- "dead_hosts": "manage",
- "streams": "manage",
- "access_lists": "manage",
- "certificates": "manage"
- }
- }
- ]
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UsersList"
- }
- }
- }
- }
- }
- },
- "post": {
- "operationId": "createUser",
- "summary": "Create a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- ],
- "responses": {
- "201": {
- "description": "201 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 2,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ],
- "permissions": {
- "visibility": "all",
- "proxy_hosts": "manage",
- "redirection_hosts": "manage",
- "dead_hosts": "manage",
- "streams": "manage",
- "access_lists": "manage",
- "certificates": "manage"
- }
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}": {
- "get": {
- "operationId": "getUser",
- "summary": "Get a user",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 1
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 1,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- },
- "put": {
- "operationId": "updateUser",
- "summary": "Update a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "id": 2,
- "created_on": "2020-01-30T09:36:08.000Z",
- "modified_on": "2020-01-30T09:41:04.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- },
- "delete": {
- "operationId": "deleteUser",
- "summary": "Delete a User",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/auth": {
- "put": {
- "operationId": "updateUserAuth",
- "summary": "Update a User's Authentication",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^me$"
- },
- {
- "type": "integer",
- "minimum": 1
- }
- ]
- },
- "required": true,
- "description": "User ID or 'me' for yourself",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "User Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/AuthObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/permissions": {
- "put": {
- "operationId": "updateUserPermissions",
- "summary": "Update a User's Permissions",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- },
- {
- "in": "body",
- "name": "user",
- "description": "Permissions Payload",
- "required": true,
- "schema": {
- "$ref": "#/components/schemas/PermissionsObject"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": true
- }
- },
- "schema": {
- "type": "boolean"
- }
- }
- }
- }
- }
- }
- },
- "/users/{userID}/login": {
- "put": {
- "operationId": "loginAsUser",
- "summary": "Login as this user",
- "tags": [
- "Users"
- ],
- "security": [
- {
- "BearerAuth": [
- "users"
- ]
- }
- ],
- "parameters": [
- {
- "in": "path",
- "name": "userID",
- "schema": {
- "type": "integer",
- "minimum": 1
- },
- "required": true,
- "description": "User ID",
- "example": 2
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "token": "eyJhbGciOiJSUzI1NiIsInR...16OjT8B3NLyXg",
- "expires": "2020-01-31T10:56:23.239Z",
- "user": {
- "id": 1,
- "created_on": "2020-01-30T10:43:44.000Z",
- "modified_on": "2020-01-30T10:43:44.000Z",
- "is_disabled": 0,
- "email": "jc@jc21.com",
- "name": "Jamie Curnow",
- "nickname": "James",
- "avatar": "//www.gravatar.com/avatar/3c8d73f45fd8763f827b964c76e6032a?default=mm",
- "roles": [
- "admin"
- ]
- }
- }
- }
- },
- "schema": {
- "type": "object",
- "description": "Login object",
- "required": [
- "expires",
- "token",
- "user"
- ],
- "additionalProperties": false,
- "properties": {
- "expires": {
- "description": "Token Expiry Unix Time",
- "example": 1566540249,
- "minimum": 1,
- "type": "number"
- },
- "token": {
- "description": "JWT Token",
- "example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
- "type": "string"
- },
- "user": {
- "$ref": "#/components/schemas/UserObject"
- }
- }
- }
- }
- }
- }
- }
- }
- },
- "/reports/hosts": {
- "get": {
- "operationId": "reportsHosts",
- "summary": "Report on Host Statistics",
- "tags": [
- "Reports"
- ],
- "security": [
- {
- "BearerAuth": [
- "reports"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "proxy": 20,
- "redirection": 1,
- "stream": 0,
- "dead": 1
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HostReportObject"
- }
- }
- }
- }
- }
- }
- },
- "/audit-log": {
- "get": {
- "operationId": "getAuditLog",
- "summary": "Get Audit Log",
- "tags": [
- "Audit Log"
- ],
- "security": [
- {
- "BearerAuth": [
- "audit-log"
- ]
- }
- ],
- "responses": {
- "200": {
- "description": "200 response",
- "content": {
- "application/json": {
- "examples": {
- "default": {
- "value": {
- "proxy": 20,
- "redirection": 1,
- "stream": 0,
- "dead": 1
- }
- }
- },
- "schema": {
- "$ref": "#/components/schemas/HostReportObject"
- }
- }
- }
- }
- }
- }
- }
- },
- "components": {
- "securitySchemes": {
- "BearerAuth": {
- "type": "http",
- "scheme": "bearer"
- }
- },
- "schemas": {
- "HealthObject": {
- "type": "object",
- "description": "Health object",
- "additionalProperties": false,
- "required": [
- "status",
- "version"
- ],
- "properties": {
- "status": {
- "type": "string",
- "description": "Healthy",
- "example": "OK"
- },
- "version": {
- "type": "object",
- "description": "The version object",
- "example": {
- "major": 2,
- "minor": 0,
- "revision": 0
- },
- "additionalProperties": false,
- "required": [
- "major",
- "minor",
- "revision"
- ],
- "properties": {
- "major": {
- "type": "integer",
- "minimum": 0
- },
- "minor": {
- "type": "integer",
- "minimum": 0
- },
- "revision": {
- "type": "integer",
- "minimum": 0
- }
- }
- }
- }
- },
- "TokenObject": {
- "type": "object",
- "description": "Token object",
- "required": [
- "expires",
- "token"
- ],
- "additionalProperties": false,
- "properties": {
- "expires": {
- "description": "Token Expiry Unix Time",
- "example": 1566540249,
- "minimum": 1,
- "type": "number"
- },
- "token": {
- "description": "JWT Token",
- "example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
- "type": "string"
- }
- }
- },
- "SettingObject": {
- "type": "object",
- "description": "Setting object",
- "required": [
- "id",
- "name",
- "description",
- "value",
- "meta"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": "string",
- "description": "Setting ID",
- "minLength": 1,
- "example": "default-site"
- },
- "name": {
- "type": "string",
- "description": "Setting Display Name",
- "minLength": 1,
- "example": "Default Site"
- },
- "description": {
- "type": "string",
- "description": "Meaningful description",
- "minLength": 1,
- "example": "What to show when Nginx is hit with an unknown Host"
- },
- "value": {
- "description": "Value in almost any form",
- "example": "congratulations",
- "oneOf": [
- {
- "type": "string",
- "minLength": 1
- },
- {
- "type": "integer"
- },
- {
- "type": "object"
- },
- {
- "type": "number"
- },
- {
- "type": "array"
- }
- ]
- },
- "meta": {
- "description": "Extra metadata",
- "example": {},
- "type": "object"
- }
- }
- },
- "SettingsList": {
- "type": "array",
- "description": "Setting list",
- "items": {
- "$ref": "#/components/schemas/SettingObject"
- }
- },
- "UserObject": {
- "type": "object",
- "description": "User object",
- "required": [
- "id",
- "created_on",
- "modified_on",
- "is_disabled",
- "email",
- "name",
- "nickname",
- "avatar",
- "roles"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": "integer",
- "description": "User ID",
- "minimum": 1,
- "example": 1
- },
- "created_on": {
- "type": "string",
- "description": "Created Date",
- "example": "2020-01-30T09:36:08.000Z"
- },
- "modified_on": {
- "type": "string",
- "description": "Modified Date",
- "example": "2020-01-30T09:41:04.000Z"
- },
- "is_disabled": {
- "type": "integer",
- "minimum": 0,
- "maximum": 1,
- "description": "Is user Disabled (0 = false, 1 = true)",
- "example": 0
- },
- "email": {
- "type": "string",
- "description": "Email",
- "minLength": 3,
- "example": "jc@jc21.com"
- },
- "name": {
- "type": "string",
- "description": "Name",
- "minLength": 1,
- "example": "Jamie Curnow"
- },
- "nickname": {
- "type": "string",
- "description": "Nickname",
- "example": "James"
- },
- "avatar": {
- "type": "string",
- "description": "Gravatar URL based on email, without scheme",
- "example": "//www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?default=mm"
- },
- "roles": {
- "description": "Roles applied",
- "example": [
- "admin"
- ],
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- "UsersList": {
- "type": "array",
- "description": "User list",
- "items": {
- "$ref": "#/components/schemas/UserObject"
- }
- },
- "AuthObject": {
- "type": "object",
- "description": "Authentication Object",
- "required": [
- "type",
- "secret"
- ],
- "properties": {
- "type": {
- "type": "string",
- "pattern": "^password$",
- "example": "password"
- },
- "current": {
- "type": "string",
- "minLength": 1,
- "maxLength": 64,
- "example": "changeme"
- },
- "secret": {
- "type": "string",
- "minLength": 8,
- "maxLength": 64,
- "example": "mySuperN3wP@ssword!"
- }
- }
- },
- "PermissionsObject": {
- "type": "object",
- "properties": {
- "visibility": {
- "type": "string",
- "description": "Visibility Type",
- "enum": [
- "all",
- "user"
- ]
- },
- "access_lists": {
- "type": "string",
- "description": "Access Lists Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "dead_hosts": {
- "type": "string",
- "description": "404 Hosts Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "proxy_hosts": {
- "type": "string",
- "description": "Proxy Hosts Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "redirection_hosts": {
- "type": "string",
- "description": "Redirection Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "streams": {
- "type": "string",
- "description": "Streams Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- },
- "certificates": {
- "type": "string",
- "description": "Certificates Permissions",
- "enum": [
- "hidden",
- "view",
- "manage"
- ]
- }
- }
- },
- "HostReportObject": {
- "type": "object",
- "properties": {
- "proxy": {
- "type": "integer",
- "description": "Proxy Hosts Count"
- },
- "redirection": {
- "type": "integer",
- "description": "Redirection Hosts Count"
- },
- "stream": {
- "type": "integer",
- "description": "Streams Count"
- },
- "dead": {
- "type": "integer",
- "description": "404 Hosts Count"
- }
- }
- }
- }
- }
-}
\ No newline at end of file
diff --git a/backend/embed/api_docs/api.swagger.json b/backend/embed/api_docs/api.swagger.json
new file mode 100644
index 000000000..7946b9cb5
--- /dev/null
+++ b/backend/embed/api_docs/api.swagger.json
@@ -0,0 +1,310 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "title": "Nginx Proxy Manager API",
+ "version": "{{VERSION}}"
+ },
+ "paths": {
+ "/": {
+ "get": {
+ "$ref": "file://./paths/get.json"
+ }
+ },
+ "/auth": {
+ "get": {
+ "$ref": "file://./paths/auth/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/auth/post.json"
+ }
+ },
+ "/auth/refresh": {
+ "post": {
+ "$ref": "file://./paths/auth/refresh/post.json"
+ }
+ },
+ "/auth/sse": {
+ "post": {
+ "$ref": "file://./paths/auth/sse/post.json"
+ }
+ },
+ "/certificates": {
+ "get": {
+ "$ref": "file://./paths/certificates/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/certificates/post.json"
+ }
+ },
+ "/certificates/{certificateID}": {
+ "get": {
+ "$ref": "file://./paths/certificates/certificateID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/certificates/certificateID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/certificates/certificateID/delete.json"
+ }
+ },
+ "/certificates-authorities": {
+ "get": {
+ "$ref": "file://./paths/certificates-authorities/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/certificates-authorities/post.json"
+ }
+ },
+ "/certificates-authorities/{caID}": {
+ "get": {
+ "$ref": "file://./paths/certificates-authorities/caID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/certificates-authorities/caID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/certificates-authorities/caID/delete.json"
+ }
+ },
+ "/config": {
+ "get": {
+ "$ref": "file://./paths/config/get.json"
+ }
+ },
+ "/dns-providers": {
+ "get": {
+ "$ref": "file://./paths/dns-providers/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/dns-providers/post.json"
+ }
+ },
+ "/dns-providers/{providerID}": {
+ "get": {
+ "$ref": "file://./paths/dns-providers/providerID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/dns-providers/providerID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/dns-providers/providerID/delete.json"
+ }
+ },
+ "/hosts": {
+ "get": {
+ "$ref": "file://./paths/hosts/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/hosts/post.json"
+ }
+ },
+ "/hosts/{hostID}": {
+ "get": {
+ "$ref": "file://./paths/hosts/hostID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/hosts/hostID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/hosts/hostID/delete.json"
+ }
+ },
+ "/hosts/{hostID}/nginx-config": {
+ "get": {
+ "$ref": "file://./paths/hosts/hostID/nginx-config/get.json"
+ }
+ },
+ "/nginx-templates": {
+ "get": {
+ "$ref": "file://./paths/nginx-templates/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/nginx-templates/post.json"
+ }
+ },
+ "/nginx-templates/{templateID}": {
+ "get": {
+ "$ref": "file://./paths/nginx-templates/templateID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/nginx-templates/templateID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/nginx-templates/templateID/delete.json"
+ }
+ },
+ "/schema": {
+ "get": {
+ "$ref": "file://./paths/schema/get.json"
+ }
+ },
+ "/settings": {
+ "get": {
+ "$ref": "file://./paths/settings/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/settings/post.json"
+ }
+ },
+ "/settings/{name}": {
+ "get": {
+ "$ref": "file://./paths/settings/name/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/settings/name/put.json"
+ }
+ },
+ "/streams": {
+ "get": {
+ "$ref": "file://./paths/streams/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/streams/post.json"
+ }
+ },
+ "/streams/{streamID}": {
+ "get": {
+ "$ref": "file://./paths/streams/streamID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/streams/streamID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/streams/streamID/delete.json"
+ }
+ },
+ "/upstreams": {
+ "get": {
+ "$ref": "file://./paths/upstreams/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/upstreams/post.json"
+ }
+ },
+ "/upstreams/{upstreamID}": {
+ "get": {
+ "$ref": "file://./paths/upstreams/upstreamID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/upstreams/upstreamID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/upstreams/upstreamID/delete.json"
+ }
+ },
+ "/upstreams/{upstreamID}/nginx-config": {
+ "get": {
+ "$ref": "file://./paths/upstreams/upstreamID/nginx-config/get.json"
+ }
+ },
+ "/users": {
+ "get": {
+ "$ref": "file://./paths/users/get.json"
+ },
+ "post": {
+ "$ref": "file://./paths/users/post.json"
+ }
+ },
+ "/users/{userID}": {
+ "get": {
+ "$ref": "file://./paths/users/userID/get.json"
+ },
+ "put": {
+ "$ref": "file://./paths/users/userID/put.json"
+ },
+ "delete": {
+ "$ref": "file://./paths/users/userID/delete.json"
+ }
+ },
+ "/users/{userID}/auth": {
+ "post": {
+ "$ref": "file://./paths/users/userID/auth/post.json"
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "AuthConfigObject": {
+ "$ref": "file://./components/AuthConfigObject.json"
+ },
+ "CertificateAuthorityList": {
+ "$ref": "file://./components/CertificateAuthorityList.json"
+ },
+ "CertificateAuthorityObject": {
+ "$ref": "file://./components/CertificateAuthorityObject.json"
+ },
+ "CertificateList": {
+ "$ref": "file://./components/CertificateList.json"
+ },
+ "CertificateObject": {
+ "$ref": "file://./components/CertificateObject.json"
+ },
+ "ConfigObject": {
+ "$ref": "file://./components/ConfigObject.json"
+ },
+ "DeletedItemResponse": {
+ "$ref": "file://./components/DeletedItemResponse.json"
+ },
+ "DNSProviderList": {
+ "$ref": "file://./components/DNSProviderList.json"
+ },
+ "DNSProviderObject": {
+ "$ref": "file://./components/DNSProviderObject.json"
+ },
+ "ErrorObject": {
+ "$ref": "file://./components/ErrorObject.json"
+ },
+ "FilterObject": {
+ "$ref": "file://./components/FilterObject.json"
+ },
+ "HealthObject": {
+ "$ref": "file://./components/HealthObject.json"
+ },
+ "HostList": {
+ "$ref": "file://./components/HostList.json"
+ },
+ "HostObject": {
+ "$ref": "file://./components/HostObject.json"
+ },
+ "NginxTemplateList": {
+ "$ref": "file://./components/NginxTemplateList.json"
+ },
+ "NginxTemplateObject": {
+ "$ref": "file://./components/NginxTemplateObject.json"
+ },
+ "SettingList": {
+ "$ref": "file://./components/SettingList.json"
+ },
+ "SettingObject": {
+ "$ref": "file://./components/SettingObject.json"
+ },
+ "SortObject": {
+ "$ref": "file://./components/SortObject.json"
+ },
+ "StreamList": {
+ "$ref": "file://./components/StreamList.json"
+ },
+ "StreamObject": {
+ "$ref": "file://./components/StreamObject.json"
+ },
+ "TokenObject": {
+ "$ref": "file://./components/TokenObject.json"
+ },
+ "UpstreamList": {
+ "$ref": "file://./components/UpstreamList.json"
+ },
+ "UpstreamObject": {
+ "$ref": "file://./components/UpstreamObject.json"
+ },
+ "UserAuthObject": {
+ "$ref": "file://./components/UserAuthObject.json"
+ },
+ "UserList": {
+ "$ref": "file://./components/UserList.json"
+ },
+ "UserObject": {
+ "$ref": "file://./components/UserObject.json"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/AuthConfigObject.json b/backend/embed/api_docs/components/AuthConfigObject.json
new file mode 100644
index 000000000..9a6aa3969
--- /dev/null
+++ b/backend/embed/api_docs/components/AuthConfigObject.json
@@ -0,0 +1,13 @@
+{
+ "type": "array",
+ "description": "AuthConfigObject",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "enum": [
+ "local",
+ "ldap",
+ "oauth"
+ ]
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateAuthorityList.json b/backend/embed/api_docs/components/CertificateAuthorityList.json
new file mode 100644
index 000000000..131140ef8
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateAuthorityList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "CertificateAuthorityList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateAuthorityObject.json b/backend/embed/api_docs/components/CertificateAuthorityObject.json
new file mode 100644
index 000000000..7cd157f20
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateAuthorityObject.json
@@ -0,0 +1,57 @@
+{
+ "type": "object",
+ "description": "CertificateAuthorityObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "name",
+ "acmesh_server",
+ "ca_bundle",
+ "max_domains",
+ "is_wildcard_supported",
+ "is_readonly"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "acmesh_server": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 255
+ },
+ "ca_bundle": {
+ "type": "string",
+ "minLength": 0,
+ "maxLength": 255
+ },
+ "max_domains": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "is_wildcard_supported": {
+ "type": "boolean"
+ },
+ "is_readonly": {
+ "type": "boolean"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateList.json b/backend/embed/api_docs/components/CertificateList.json
new file mode 100644
index 000000000..8fbf2cccf
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "CertificateList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/CertificateObject.json b/backend/embed/api_docs/components/CertificateObject.json
new file mode 100644
index 000000000..18a087f5b
--- /dev/null
+++ b/backend/embed/api_docs/components/CertificateObject.json
@@ -0,0 +1,86 @@
+{
+ "type": "object",
+ "description": "CertificateObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "expires_on",
+ "type",
+ "user_id",
+ "certificate_authority_id",
+ "dns_provider_id",
+ "name",
+ "is_ecc",
+ "status",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "expires_on": {
+ "type": "integer",
+ "minimum": 0,
+ "nullable": true
+ },
+ "type": {
+ "type": "string",
+ "enum": ["custom", "http", "dns"]
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "certificate_authority_id": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "certificate_authority": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ },
+ "dns_provider_id": {
+ "type": "integer",
+ "minimum": 0,
+ "nullable": true
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ },
+ "status": {
+ "type": "string",
+ "enum": ["ready", "requesting", "failed", "provided"]
+ },
+ "is_ecc": {
+ "type": "boolean"
+ },
+ "error_message": {
+ "type": "string"
+ },
+ "user": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/ConfigObject.json b/backend/embed/api_docs/components/ConfigObject.json
new file mode 100644
index 000000000..96c4176f8
--- /dev/null
+++ b/backend/embed/api_docs/components/ConfigObject.json
@@ -0,0 +1,4 @@
+{
+ "type": "object",
+ "description": "ConfigObject"
+}
diff --git a/backend/embed/api_docs/components/DNSProviderList.json b/backend/embed/api_docs/components/DNSProviderList.json
new file mode 100644
index 000000000..edf8385ce
--- /dev/null
+++ b/backend/embed/api_docs/components/DNSProviderList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "DNSProviderList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/DNSProviderObject.json b/backend/embed/api_docs/components/DNSProviderObject.json
new file mode 100644
index 000000000..2e1f305e2
--- /dev/null
+++ b/backend/embed/api_docs/components/DNSProviderObject.json
@@ -0,0 +1,51 @@
+{
+ "type": "object",
+ "description": "DNSProviderObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "user_id",
+ "name",
+ "acmesh_name",
+ "dns_sleep",
+ "meta"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "acmesh_name": {
+ "type": "string",
+ "minLength": 4,
+ "maxLength": 50
+ },
+ "dns_sleep": {
+ "type": "integer"
+ },
+ "meta": {
+ "type": "object"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/DeletedItemResponse.json b/backend/embed/api_docs/components/DeletedItemResponse.json
new file mode 100644
index 000000000..0e0a9a0eb
--- /dev/null
+++ b/backend/embed/api_docs/components/DeletedItemResponse.json
@@ -0,0 +1,15 @@
+{
+ "type": "object",
+ "description": "DeletedItemResponse",
+ "additionalProperties": false,
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "type": "boolean",
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/ErrorObject.json b/backend/embed/api_docs/components/ErrorObject.json
new file mode 100644
index 000000000..a1d776058
--- /dev/null
+++ b/backend/embed/api_docs/components/ErrorObject.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "description": "ErrorObject",
+ "additionalProperties": false,
+ "required": ["code", "message"],
+ "properties": {
+ "code": {
+ "type": "integer",
+ "description": "Error code",
+ "minimum": 0
+ },
+ "message": {
+ "type": "string",
+ "description": "Error message"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/FilterObject.json b/backend/embed/api_docs/components/FilterObject.json
new file mode 100644
index 000000000..4ba75766d
--- /dev/null
+++ b/backend/embed/api_docs/components/FilterObject.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "description": "FilterObject",
+ "additionalProperties": false,
+ "required": ["field", "modifier", "value"],
+ "properties": {
+ "field": {
+ "type": "string",
+ "description": "Field to filter with"
+ },
+ "modifier": {
+ "type": "string",
+ "description": "Filter modifier",
+ "pattern": "^(equals|not|min|max|greater|lesser|contains|starts|ends|in|notin)$"
+ },
+ "value": {
+ "type": "array",
+ "description": "Values used for filtering",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HealthObject.json b/backend/embed/api_docs/components/HealthObject.json
new file mode 100644
index 000000000..cf2897a72
--- /dev/null
+++ b/backend/embed/api_docs/components/HealthObject.json
@@ -0,0 +1,31 @@
+{
+ "type": "object",
+ "description": "HealthObject",
+ "additionalProperties": false,
+ "required": ["version", "commit", "healthy", "setup"],
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "Version",
+ "minLength": 1
+ },
+ "commit": {
+ "type": "string",
+ "description": "Commit hash",
+ "minLength": 7
+ },
+ "healthy": {
+ "type": "boolean",
+ "description": "Healthy?"
+ },
+ "setup": {
+ "type": "boolean",
+ "description": "Is the application set up?"
+ },
+ "acme.sh": {
+ "type": "string",
+ "description": "Acme.sh version",
+ "minLength": 1
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostList.json b/backend/embed/api_docs/components/HostList.json
new file mode 100644
index 000000000..8d9d413a8
--- /dev/null
+++ b/backend/embed/api_docs/components/HostList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "HostList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/HostObject.json b/backend/embed/api_docs/components/HostObject.json
new file mode 100644
index 000000000..8b166ead0
--- /dev/null
+++ b/backend/embed/api_docs/components/HostObject.json
@@ -0,0 +1,55 @@
+{
+ "type": "object",
+ "description": "HostObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "user_id",
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ },
+ "user": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/NginxTemplateList.json b/backend/embed/api_docs/components/NginxTemplateList.json
new file mode 100644
index 000000000..c0c11be3a
--- /dev/null
+++ b/backend/embed/api_docs/components/NginxTemplateList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "NginxTemplateList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/NginxTemplateObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/NginxTemplateObject.json b/backend/embed/api_docs/components/NginxTemplateObject.json
new file mode 100644
index 000000000..9b6b3f86c
--- /dev/null
+++ b/backend/embed/api_docs/components/NginxTemplateObject.json
@@ -0,0 +1,46 @@
+{
+ "type": "object",
+ "description": "NginxTemplateObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "user_id",
+ "name",
+ "type",
+ "template"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^proxy|redirect|dead|stream|upstream$"
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SettingList.json b/backend/embed/api_docs/components/SettingList.json
new file mode 100644
index 000000000..77fd564be
--- /dev/null
+++ b/backend/embed/api_docs/components/SettingList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "SettingList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SettingObject.json b/backend/embed/api_docs/components/SettingObject.json
new file mode 100644
index 000000000..6f151dfe7
--- /dev/null
+++ b/backend/embed/api_docs/components/SettingObject.json
@@ -0,0 +1,51 @@
+{
+ "type": "object",
+ "description": "SettingObject",
+ "additionalProperties": false,
+ "required": ["id", "created_at", "updated_at", "name", "value"],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 100
+ },
+ "description": {
+ "type": "string",
+ "minLength": 0,
+ "maxLength": 100
+ },
+ "value": {
+ "oneOf": [
+ {
+ "type": "array"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object"
+ },
+ {
+ "type": "integer"
+ },
+ {
+ "type": "string"
+ }
+ ]
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/SortObject.json b/backend/embed/api_docs/components/SortObject.json
new file mode 100644
index 000000000..a2810ce61
--- /dev/null
+++ b/backend/embed/api_docs/components/SortObject.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "description": "SortObject",
+ "additionalProperties": false,
+ "required": ["field", "direction"],
+ "properties": {
+ "field": {
+ "type": "string",
+ "description": "Field for sorting on"
+ },
+ "direction": {
+ "type": "string",
+ "description": "Sort order",
+ "pattern": "^(ASC|DESC)$"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/StreamList.json b/backend/embed/api_docs/components/StreamList.json
new file mode 100644
index 000000000..c3dae5ab6
--- /dev/null
+++ b/backend/embed/api_docs/components/StreamList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "StreamList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/StreamObject.json b/backend/embed/api_docs/components/StreamObject.json
new file mode 100644
index 000000000..9c8787194
--- /dev/null
+++ b/backend/embed/api_docs/components/StreamObject.json
@@ -0,0 +1,57 @@
+{
+ "type": "object",
+ "description": "StreamObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "expires_on",
+ "user_id",
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "expires_on": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "domain_names": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 4
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/TokenObject.json b/backend/embed/api_docs/components/TokenObject.json
new file mode 100644
index 000000000..88863f4c7
--- /dev/null
+++ b/backend/embed/api_docs/components/TokenObject.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "description": "TokenObject",
+ "additionalProperties": false,
+ "required": ["expires", "token"],
+ "properties": {
+ "expires": {
+ "type": "number",
+ "description": "Token Expiry Unix Time",
+ "minimum": 1
+ },
+ "token": {
+ "type": "string",
+ "description": "JWT Token"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UpstreamList.json b/backend/embed/api_docs/components/UpstreamList.json
new file mode 100644
index 000000000..316725ac5
--- /dev/null
+++ b/backend/embed/api_docs/components/UpstreamList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "UpstreamList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/UpstreamObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UpstreamObject.json b/backend/embed/api_docs/components/UpstreamObject.json
new file mode 100644
index 000000000..5a64e18a0
--- /dev/null
+++ b/backend/embed/api_docs/components/UpstreamObject.json
@@ -0,0 +1,140 @@
+{
+ "type": "object",
+ "description": "UpstreamObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "user_id",
+ "name",
+ "nginx_template_id",
+ "ip_hash",
+ "ntlm",
+ "keepalive",
+ "keepalive_requests",
+ "keepalive_time",
+ "keepalive_timeout",
+ "advanced_config",
+ "status",
+ "error_message",
+ "servers"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "nginx_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "ip_hash": {
+ "type": "boolean"
+ },
+ "ntlm": {
+ "type": "boolean"
+ },
+ "keepalive": {
+ "type": "integer"
+ },
+ "keepalive_requests": {
+ "type": "integer"
+ },
+ "keepalive_time": {
+ "type": "string"
+ },
+ "keepalive_timeout": {
+ "type": "string"
+ },
+ "advanced_config": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ },
+ "error_message": {
+ "type": "string"
+ },
+ "user": {
+ "$ref": "#/components/schemas/UserObject"
+ },
+ "servers": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "upstream_id",
+ "server",
+ "weight",
+ "max_conns",
+ "max_fails",
+ "fail_timeout",
+ "backup"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "upstream_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "server": {
+ "type": "string",
+ "minLength": 2
+ },
+ "weight": {
+ "type": "integer"
+ },
+ "max_conns": {
+ "type": "integer"
+ },
+ "max_fails": {
+ "type": "integer"
+ },
+ "fail_timeout": {
+ "type": "integer"
+ },
+ "backup": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserAuthObject.json b/backend/embed/api_docs/components/UserAuthObject.json
new file mode 100644
index 000000000..1059b43cd
--- /dev/null
+++ b/backend/embed/api_docs/components/UserAuthObject.json
@@ -0,0 +1,30 @@
+{
+ "type": "object",
+ "description": "UserAuthObject",
+ "additionalProperties": false,
+ "required": ["id", "user_id", "type", "created_at", "updated_at"],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "user_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^(local|ldap|oauth)$"
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserList.json b/backend/embed/api_docs/components/UserList.json
new file mode 100644
index 000000000..a4d502f35
--- /dev/null
+++ b/backend/embed/api_docs/components/UserList.json
@@ -0,0 +1,40 @@
+{
+ "type": "object",
+ "description": "UserList",
+ "additionalProperties": false,
+ "required": ["total", "offset", "limit", "sort"],
+ "properties": {
+ "total": {
+ "type": "integer",
+ "description": "Total number of rows"
+ },
+ "offset": {
+ "type": "integer",
+ "description": "Pagination Offset"
+ },
+ "limit": {
+ "type": "integer",
+ "description": "Pagination Limit"
+ },
+ "sort": {
+ "type": "array",
+ "description": "Sorting",
+ "items": {
+ "$ref": "#/components/schemas/SortObject"
+ }
+ },
+ "filter": {
+ "type": "array",
+ "description": "Filters",
+ "items": {
+ "$ref": "#/components/schemas/FilterObject"
+ }
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/components/UserObject.json b/backend/embed/api_docs/components/UserObject.json
new file mode 100644
index 000000000..94a6dc5d1
--- /dev/null
+++ b/backend/embed/api_docs/components/UserObject.json
@@ -0,0 +1,69 @@
+{
+ "type": "object",
+ "description": "UserObject",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "created_at",
+ "updated_at",
+ "name",
+ "email",
+ "is_disabled"
+ ],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "created_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Created Unix time with milliseconds"
+ },
+ "updated_at": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Updated Unix time with milliseconds"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 2,
+ "maxLength": 50
+ },
+ "email": {
+ "type": "string",
+ "minLength": 5,
+ "maxLength": 150
+ },
+ "gravatar_url": {
+ "type": "string"
+ },
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "is_system": {
+ "type": "boolean"
+ },
+ "auth": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^(local|ldap|oauth)$"
+ }
+ }
+ },
+ "capabilities": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 1
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/auth/get.json b/backend/embed/api_docs/paths/auth/get.json
new file mode 100644
index 000000000..e34da6c34
--- /dev/null
+++ b/backend/embed/api_docs/paths/auth/get.json
@@ -0,0 +1,28 @@
+{
+ "operationId": "getAuthConfig",
+ "summary": "Returns auth configuration",
+ "tags": ["Auth"],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/AuthConfigObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": "todo"
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/auth/post.json b/backend/embed/api_docs/paths/auth/post.json
new file mode 100644
index 000000000..2cd55d3e6
--- /dev/null
+++ b/backend/embed/api_docs/paths/auth/post.json
@@ -0,0 +1,75 @@
+{
+ "operationId": "requestToken",
+ "summary": "Request a new access token from credentials",
+ "tags": ["Auth"],
+ "requestBody": {
+ "description": "Credentials Payload",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.GetToken}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/TokenObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "expires": 1566540510,
+ "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
+ "scope": "user"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "403": {
+ "description": "403 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["error"],
+ "properties": {
+ "result": {
+ "type": "object",
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 403,
+ "message": "Not available during setup phase"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/auth/refresh/post.json b/backend/embed/api_docs/paths/auth/refresh/post.json
new file mode 100644
index 000000000..b44bf95c9
--- /dev/null
+++ b/backend/embed/api_docs/paths/auth/refresh/post.json
@@ -0,0 +1,34 @@
+{
+ "operationId": "refreshToken",
+ "summary": "Refresh your access token",
+ "tags": ["Auth"],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/TokenObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "expires": 1566540510,
+ "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
+ "scope": "user"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/auth/sse/post.json b/backend/embed/api_docs/paths/auth/sse/post.json
new file mode 100644
index 000000000..0e53181aa
--- /dev/null
+++ b/backend/embed/api_docs/paths/auth/sse/post.json
@@ -0,0 +1,34 @@
+{
+ "operationId": "requestSSEToken",
+ "summary": "Request a new SSE token",
+ "tags": ["Auth"],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/TokenObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "expires": 1566540510,
+ "token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
+ "scope": "user"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json b/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json
new file mode 100644
index 000000000..3ae3bea87
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/delete.json
@@ -0,0 +1,39 @@
+{
+ "operationId": "deleteCertificateAuthority",
+ "summary": "Delete a Certificate Authority",
+ "tags": [
+ "Certificate Authorities"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/get.json b/backend/embed/api_docs/paths/certificates-authorities/caID/get.json
new file mode 100644
index 000000000..2034faaab
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/get.json
@@ -0,0 +1,53 @@
+{
+ "operationId": "getCertificateAuthority",
+ "summary": "Get a Certificate Authority object by ID",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1627531400000,
+ "updated_at": 1627531400000,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/caID/put.json b/backend/embed/api_docs/paths/certificates-authorities/caID/put.json
new file mode 100644
index 000000000..0e18611e0
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/caID/put.json
@@ -0,0 +1,62 @@
+{
+ "operationId": "updateCertificateAuthority",
+ "summary": "Update an existing Certificate Authority",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "caID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Certificate Authority",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Certificate Authority details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateCertificateAuthority}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1627531400000,
+ "updated_at": 1627531400000,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/get.json b/backend/embed/api_docs/paths/certificates-authorities/get.json
new file mode 100644
index 000000000..f2e1d4c7c
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/get.json
@@ -0,0 +1,93 @@
+{
+ "operationId": "getCertificateAuthorities",
+ "summary": "Get a list of Certificate Authorities",
+ "tags": ["Certificate Authorities"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 2,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1627531400000,
+ "updated_at": 1627531400000,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_setup": true
+ },
+ {
+ "id": 2,
+ "created_at": 1627531400000,
+ "updated_at": 1627531400000,
+ "name": "Let's Encrypt",
+ "acmesh_server": "https://acme-v02.api.letsencrypt.org/directory",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_setup": true
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates-authorities/post.json b/backend/embed/api_docs/paths/certificates-authorities/post.json
new file mode 100644
index 000000000..bac0df52e
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates-authorities/post.json
@@ -0,0 +1,49 @@
+{
+ "operationId": "createCertificateAuthority",
+ "summary": "Create a new Certificate Authority",
+ "tags": ["Certificate Authorities"],
+ "requestBody": {
+ "description": "Certificate Authority to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateCertificateAuthority}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateAuthorityObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1627531400000,
+ "updated_at": 1627531400000,
+ "name": "ZeroSSL",
+ "acmesh_server": "zerossl",
+ "ca_bundle": "",
+ "max_domains": 10,
+ "is_wildcard_supported": true,
+ "is_readonly": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/delete.json b/backend/embed/api_docs/paths/certificates/certificateID/delete.json
new file mode 100644
index 000000000..98acfaf77
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteCertificate",
+ "summary": "Delete a Certificate",
+ "tags": [
+ "Certificates"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the certificate",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a certificate that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/get.json b/backend/embed/api_docs/paths/certificates/certificateID/get.json
new file mode 100644
index 000000000..637e1e191
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/get.json
@@ -0,0 +1,56 @@
+{
+ "operationId": "getCertificate",
+ "summary": "Get a certificate object by ID",
+ "tags": ["Certificates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the certificate",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1604536109000,
+ "updated_at": 1604536109000,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": ["test1.jc21.com.au"],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates/certificateID/put.json b/backend/embed/api_docs/paths/certificates/certificateID/put.json
new file mode 100644
index 000000000..011961532
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/certificateID/put.json
@@ -0,0 +1,65 @@
+{
+ "operationId": "updateCertificate",
+ "summary": "Update an existing Certificate",
+ "tags": ["Certificates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "certificateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the certificate",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Certificate details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateCertificate}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1604536109000,
+ "updated_at": 1604536109000,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": ["test1.jc21.com.au"],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates/get.json b/backend/embed/api_docs/paths/certificates/get.json
new file mode 100644
index 000000000..b3113e67a
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/get.json
@@ -0,0 +1,87 @@
+{
+ "operationId": "getCertificates",
+ "summary": "Get a list of certificates",
+ "tags": ["Certificates"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1604536109000,
+ "updated_at": 1604536109000,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": [
+ "test1.jc21.com.au"
+ ],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/certificates/post.json b/backend/embed/api_docs/paths/certificates/post.json
new file mode 100644
index 000000000..7b93d9aff
--- /dev/null
+++ b/backend/embed/api_docs/paths/certificates/post.json
@@ -0,0 +1,52 @@
+{
+ "operationId": "createCertificate",
+ "summary": "Create a new Certificate",
+ "tags": ["Certificates"],
+ "requestBody": {
+ "description": "Certificate to create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateCertificate}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/CertificateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1604536109000,
+ "updated_at": 1604536109000,
+ "expires_on": null,
+ "type": "dns",
+ "user_id": 1,
+ "certificate_authority_id": 2,
+ "dns_provider_id": 1,
+ "name": "test1.jc21.com.au",
+ "domain_names": ["test1.jc21.com.au"],
+ "is_ecc": 0,
+ "status": "ready"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/config/get.json b/backend/embed/api_docs/paths/config/get.json
new file mode 100644
index 000000000..05a083b2e
--- /dev/null
+++ b/backend/embed/api_docs/paths/config/get.json
@@ -0,0 +1,35 @@
+{
+ "operationId": "config",
+ "summary": "Returns the API Service configuration",
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/ConfigObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "data": "/data",
+ "log": {
+ "level": "debug",
+ "format": "nice"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/get.json b/backend/embed/api_docs/paths/dns-providers/get.json
new file mode 100644
index 000000000..c3e281d72
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/get.json
@@ -0,0 +1,83 @@
+{
+ "operationId": "getDNSProviders",
+ "summary": "Get a list of DNS Providers",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1602593653000,
+ "updated_at": 1602593653000,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/post.json b/backend/embed/api_docs/paths/dns-providers/post.json
new file mode 100644
index 000000000..f7d142a8d
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/post.json
@@ -0,0 +1,50 @@
+{
+ "operationId": "createDNSProvider",
+ "summary": "Create a new DNS Provider",
+ "tags": ["DNS Providers"],
+ "requestBody": {
+ "description": "DNS Provider to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateDNSProvider}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1602593653000,
+ "updated_at": 1602593653000,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/delete.json b/backend/embed/api_docs/paths/dns-providers/providerID/delete.json
new file mode 100644
index 000000000..32b77b0d1
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteDNSProvider",
+ "summary": "Delete a DNS Provider",
+ "tags": [
+ "DNS Providers"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a DNS Provider that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/get.json b/backend/embed/api_docs/paths/dns-providers/providerID/get.json
new file mode 100644
index 000000000..743366021
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/get.json
@@ -0,0 +1,54 @@
+{
+ "operationId": "getDNSProvider",
+ "summary": "Get a DNS Provider object by ID",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1602593653000,
+ "updated_at": 1602593653000,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/dns-providers/providerID/put.json b/backend/embed/api_docs/paths/dns-providers/providerID/put.json
new file mode 100644
index 000000000..b5709994d
--- /dev/null
+++ b/backend/embed/api_docs/paths/dns-providers/providerID/put.json
@@ -0,0 +1,65 @@
+{
+ "operationId": "updateDNSProvider",
+ "summary": "Update an existing DNS Provider",
+ "tags": ["DNS Providers"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "providerID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the DNS Provider",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "DNS Provider details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateDNSProvider}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/DNSProviderObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "result": {
+ "id": 1,
+ "created_at": 1602593653000,
+ "updated_at": 1602593653000,
+ "user_id": 1,
+ "name": "Route53",
+ "acmesh_name": "dns_aws",
+ "meta": {
+ "AWS_ACCESS_KEY_ID": "abc123",
+ "AWS_SECRET_ACCESS_KEY": "def098"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/get.json b/backend/embed/api_docs/paths/get.json
new file mode 100644
index 000000000..567b2e78a
--- /dev/null
+++ b/backend/embed/api_docs/paths/get.json
@@ -0,0 +1,44 @@
+{
+ "operationId": "health",
+ "summary": "Returns the API health status",
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HealthObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "version": "3.0.0",
+ "commit": "9f119b6",
+ "healthy": true,
+ "setup": true
+ }
+ }
+ },
+ "unhealthy": {
+ "value": {
+ "result": {
+ "version": "3.0.0",
+ "commit": "9f119b6",
+ "healthy": false,
+ "setup": true
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/get.json b/backend/embed/api_docs/paths/hosts/get.json
new file mode 100644
index 000000000..ae82a57de
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/get.json
@@ -0,0 +1,94 @@
+{
+ "operationId": "getHosts",
+ "summary": "Get a list of Hosts",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "domain_names",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1646279455000,
+ "updated_at": 1646279455000,
+ "user_id": 2,
+ "type": "proxy",
+ "nginx_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/hostID/delete.json b/backend/embed/api_docs/paths/hosts/hostID/delete.json
new file mode 100644
index 000000000..4df119ad0
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteHost",
+ "summary": "Delete a Host",
+ "tags": [
+ "Hosts"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Host",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a host that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/hosts/hostID/get.json b/backend/embed/api_docs/paths/hosts/hostID/get.json
new file mode 100644
index 000000000..0f2ca20d4
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/get.json
@@ -0,0 +1,65 @@
+{
+ "operationId": "getHost",
+ "summary": "Get a Host object by ID",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1646279455000,
+ "updated_at": 1646279455000,
+ "user_id": 2,
+ "type": "proxy",
+ "nginx_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/hostID/nginx-config/get.json b/backend/embed/api_docs/paths/hosts/hostID/nginx-config/get.json
new file mode 100644
index 000000000..ea7be7ab7
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/nginx-config/get.json
@@ -0,0 +1,43 @@
+{
+ "operationId": "getHostNginxConfig",
+ "summary": "Get a Host Nginx Config object by ID",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "type": "string"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "# ------------------------------------------------------------\n# a.example.com\n# ------------------------------------------------------------\nserver {\n listen 80;\n server_name a.example.com ;\n access_log /data/logs/host-1_access.log proxy;\n error_log /data/logs/host-1_error.log warn;\n # locations ?\n # default location:\n location / {\n # Access Rules ? todo\n # Access checks must...? todo\n # Proxy!\n add_header X-Served-By $host;\n proxy_set_header Host $host;\n proxy_set_header X-Forwarded-Scheme $scheme;\n proxy_set_header X-Forwarded-Proto $scheme;\n proxy_set_header X-Forwarded-For $remote_addr;\n proxy_http_version 1.1;\n # proxy a single host\n proxy_pass http://192.168.0.10:80;\n }\n # Legacy Custom Configuration\n include /data/nginx/custom/server_proxy[.]conf;\n}\n"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/hostID/put.json b/backend/embed/api_docs/paths/hosts/hostID/put.json
new file mode 100644
index 000000000..cff143bed
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/hostID/put.json
@@ -0,0 +1,74 @@
+{
+ "operationId": "updateHost",
+ "summary": "Update an existing Host",
+ "tags": ["Hosts"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "hostID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Host details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateHost}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1646279455000,
+ "updated_at": 1646279455000,
+ "user_id": 2,
+ "type": "proxy",
+ "nginx_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/hosts/post.json b/backend/embed/api_docs/paths/hosts/post.json
new file mode 100644
index 000000000..6e9d605e2
--- /dev/null
+++ b/backend/embed/api_docs/paths/hosts/post.json
@@ -0,0 +1,61 @@
+{
+ "operationId": "createHost",
+ "summary": "Create a new Host",
+ "tags": ["Hosts"],
+ "requestBody": {
+ "description": "Host to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateHost}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/HostObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1645700556000,
+ "updated_at": 1645700556000,
+ "user_id": 2,
+ "type": "proxy",
+ "nginx_template_id": 1,
+ "listen_interface": "",
+ "domain_names": ["jc21.com"],
+ "upstream_id": 0,
+ "certificate_id": 0,
+ "access_list_id": 0,
+ "ssl_forced": false,
+ "caching_enabled": false,
+ "block_exploits": false,
+ "allow_websocket_upgrade": false,
+ "http2_support": false,
+ "hsts_enabled": false,
+ "hsts_subdomains": false,
+ "paths": "",
+ "advanced_config": "",
+ "is_disabled": false
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/nginx-templates/get.json b/backend/embed/api_docs/paths/nginx-templates/get.json
new file mode 100644
index 000000000..89f389e55
--- /dev/null
+++ b/backend/embed/api_docs/paths/nginx-templates/get.json
@@ -0,0 +1,80 @@
+{
+ "operationId": "getNginxTemplates",
+ "summary": "Get a list of Nginx Templates",
+ "tags": ["Nginx Templates"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/NginxTemplateList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "created_at",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1646218093000,
+ "updated_at": 1646218093000,
+ "user_id": 1,
+ "name": "Default Proxy Template",
+ "type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/nginx-templates/post.json b/backend/embed/api_docs/paths/nginx-templates/post.json
new file mode 100644
index 000000000..3963d5d58
--- /dev/null
+++ b/backend/embed/api_docs/paths/nginx-templates/post.json
@@ -0,0 +1,47 @@
+{
+ "operationId": "createNginxTemplate",
+ "summary": "Create a new Nginx Template",
+ "tags": ["Nginx Templates"],
+ "requestBody": {
+ "description": "Template to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateNginxTemplate}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/NginxTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 10,
+ "created_at": 1646218093000,
+ "updated_at": 1646218093000,
+ "user_id": 1,
+ "name": "My proxy template",
+ "type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/nginx-templates/templateID/delete.json b/backend/embed/api_docs/paths/nginx-templates/templateID/delete.json
new file mode 100644
index 000000000..81e7ff524
--- /dev/null
+++ b/backend/embed/api_docs/paths/nginx-templates/templateID/delete.json
@@ -0,0 +1,58 @@
+{
+ "operationId": "deleteNginxTemplate",
+ "summary": "Delete a Nginx Template",
+ "tags": ["Nginx Templates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "templateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Template",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a template that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/nginx-templates/templateID/get.json b/backend/embed/api_docs/paths/nginx-templates/templateID/get.json
new file mode 100644
index 000000000..9cb2097f7
--- /dev/null
+++ b/backend/embed/api_docs/paths/nginx-templates/templateID/get.json
@@ -0,0 +1,51 @@
+{
+ "operationId": "getNginxTemplate",
+ "summary": "Get a Nginx Template object by ID",
+ "tags": ["Nginx Templates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "templateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Host Template",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/NginxTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1646218093000,
+ "updated_at": 1646218093000,
+ "user_id": 1,
+ "name": "Default Proxy Template",
+ "type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/nginx-templates/templateID/put.json b/backend/embed/api_docs/paths/nginx-templates/templateID/put.json
new file mode 100644
index 000000000..84602175b
--- /dev/null
+++ b/backend/embed/api_docs/paths/nginx-templates/templateID/put.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "updateNginxTemplate",
+ "summary": "Update an existing Nginx Template",
+ "tags": ["Nginx Templates"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "templateID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Template",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Template details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateNginxTemplate}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/NginxTemplateObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1646218093000,
+ "updated_at": 1646218093000,
+ "user_id": 1,
+ "name": "My renamed proxy template",
+ "type": "proxy",
+ "template": "# this is a proxy template"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/schema/get.json b/backend/embed/api_docs/paths/schema/get.json
new file mode 100644
index 000000000..e21ae8058
--- /dev/null
+++ b/backend/embed/api_docs/paths/schema/get.json
@@ -0,0 +1,9 @@
+{
+ "operationId": "schema",
+ "summary": "Returns this swagger API schema",
+ "responses": {
+ "200": {
+ "description": "200 response"
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/settings/get.json b/backend/embed/api_docs/paths/settings/get.json
new file mode 100644
index 000000000..09b98b8fe
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/get.json
@@ -0,0 +1,81 @@
+{
+ "operationId": "getSettings",
+ "summary": "Get a list of settings",
+ "tags": ["Settings"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1578010090000,
+ "updated_at": 1578010095000,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/settings/name/get.json b/backend/embed/api_docs/paths/settings/name/get.json
new file mode 100644
index 000000000..38afc7bac
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/name/get.json
@@ -0,0 +1,52 @@
+{
+ "operationId": "getSetting",
+ "summary": "Get a setting object by name",
+ "tags": ["Settings"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "name",
+ "schema": {
+ "type": "string",
+ "minLength": 2
+ },
+ "required": true,
+ "description": "Name of the setting",
+ "example": "default-site"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_at": 1578010090000,
+ "updated_at": 1578010095000,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/settings/name/put.json b/backend/embed/api_docs/paths/settings/name/put.json
new file mode 100644
index 000000000..f7d5dc780
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/name/put.json
@@ -0,0 +1,61 @@
+{
+ "operationId": "updateSetting",
+ "summary": "Update an existing Setting",
+ "tags": ["Settings"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "name",
+ "schema": {
+ "type": "string",
+ "minLength": 2
+ },
+ "required": true,
+ "description": "Name of the setting",
+ "example": "default-site"
+ }
+ ],
+ "requestBody": {
+ "description": "Setting details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateSetting}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_at": 1578010090000,
+ "updated_at": 1578010090000,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/settings/post.json b/backend/embed/api_docs/paths/settings/post.json
new file mode 100644
index 000000000..97fa62372
--- /dev/null
+++ b/backend/embed/api_docs/paths/settings/post.json
@@ -0,0 +1,48 @@
+{
+ "operationId": "createSetting",
+ "summary": "Create a new Setting",
+ "tags": ["Settings"],
+ "requestBody": {
+ "description": "Setting to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateSetting}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/SettingObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_at": 1578010090000,
+ "updated_at": 1578010090000,
+ "name": "default-site",
+ "value": {
+ "html": "not found
",
+ "type": "custom"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/streams/get.json b/backend/embed/api_docs/paths/streams/get.json
new file mode 100644
index 000000000..9964e47e9
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/get.json
@@ -0,0 +1,70 @@
+{
+ "operationId": "getStreams",
+ "summary": "Get a list of Streams",
+ "tags": ["Streams"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 1,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": ["TODO"]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/streams/post.json b/backend/embed/api_docs/paths/streams/post.json
new file mode 100644
index 000000000..28f56da6a
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/post.json
@@ -0,0 +1,39 @@
+{
+ "operationId": "createStream",
+ "summary": "Create a new Stream",
+ "tags": ["Streams"],
+ "requestBody": {
+ "description": "Stream to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateStream}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/streams/streamID/delete.json b/backend/embed/api_docs/paths/streams/streamID/delete.json
new file mode 100644
index 000000000..d0f352692
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteStream",
+ "summary": "Delete a Stream",
+ "tags": [
+ "Streams"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Stream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a Stream that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/streams/streamID/get.json b/backend/embed/api_docs/paths/streams/streamID/get.json
new file mode 100644
index 000000000..97a4a76c9
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/get.json
@@ -0,0 +1,43 @@
+{
+ "operationId": "getStream",
+ "summary": "Get a Stream object by ID",
+ "tags": ["Streams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Stream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/streams/streamID/put.json b/backend/embed/api_docs/paths/streams/streamID/put.json
new file mode 100644
index 000000000..787905c5c
--- /dev/null
+++ b/backend/embed/api_docs/paths/streams/streamID/put.json
@@ -0,0 +1,52 @@
+{
+ "operationId": "updateStream",
+ "summary": "Update an existing Stream",
+ "tags": ["Streams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "streamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Stream",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Stream details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateStream}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/StreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "TODO"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/get.json b/backend/embed/api_docs/paths/upstreams/get.json
new file mode 100644
index 000000000..6f96c7092
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/get.json
@@ -0,0 +1,286 @@
+{
+ "operationId": "getUpstreams",
+ "summary": "Get a list of Upstreams",
+ "tags": ["Upstreams"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "id,name.asc,value.desc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UpstreamList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 5,
+ "offset": 0,
+ "limit": 10,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "created_at": 1672804124000,
+ "updated_at": 1672804124000,
+ "user_id": 2,
+ "name": "API servers",
+ "nginx_template_id": 5,
+ "ip_hash": true,
+ "ntlm": false,
+ "keepalive": 10,
+ "keepalive_requests": 10,
+ "keepalive_time": "60s",
+ "keepalive_timeout": "3s",
+ "advanced_config": "",
+ "status": "ok",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 1,
+ "created_at": 1672804124000,
+ "updated_at": 1672804124000,
+ "upstream_group_id": 1,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 2,
+ "created_at": 1672804124000,
+ "updated_at": 1672804124000,
+ "upstream_group_id": 1,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ },
+ {
+ "id": 2,
+ "created_at": 1672804197000,
+ "updated_at": 1672804197000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ok",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 3,
+ "created_at": 1672804197000,
+ "updated_at": 1672804197000,
+ "upstream_group_id": 2,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 4,
+ "created_at": 1672804197000,
+ "updated_at": 1672804197000,
+ "upstream_group_id": 2,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ },
+ {
+ "id": 3,
+ "created_at": 1672804200000,
+ "updated_at": 1672804200000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ok",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 5,
+ "created_at": 1672804200000,
+ "updated_at": 1672804200000,
+ "upstream_group_id": 3,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 6,
+ "created_at": 1672804200000,
+ "updated_at": 1672804200000,
+ "upstream_group_id": 3,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ },
+ {
+ "id": 4,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ok",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 7,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "upstream_group_id": 4,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 8,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "upstream_group_id": 4,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ },
+ {
+ "id": 5,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ok",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 9,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "upstream_group_id": 5,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 10,
+ "created_at": 1672804201000,
+ "updated_at": 1672804201000,
+ "upstream_group_id": 5,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/post.json b/backend/embed/api_docs/paths/upstreams/post.json
new file mode 100644
index 000000000..13cdeaac8
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/post.json
@@ -0,0 +1,81 @@
+{
+ "operationId": "createUpstream",
+ "summary": "Create a new Upstream",
+ "tags": ["Upstreams"],
+ "requestBody": {
+ "description": "Upstream to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateUpstream}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UpstreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 6,
+ "created_at": 1672806857000,
+ "updated_at": 1672806857000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ready",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 11,
+ "created_at": 1672806857000,
+ "updated_at": 1672806857000,
+ "upstream_id": 6,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 12,
+ "created_at": 1672806857000,
+ "updated_at": 1672806857000,
+ "upstream_id": 6,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/upstreamID/delete.json b/backend/embed/api_docs/paths/upstreams/upstreamID/delete.json
new file mode 100644
index 000000000..0c54708ff
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/upstreamID/delete.json
@@ -0,0 +1,58 @@
+{
+ "operationId": "deleteUpstream",
+ "summary": "Delete a Upstream",
+ "tags": ["Upstreams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "upstreamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the Upstream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete a Upstream that is in use!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/upstreamID/get.json b/backend/embed/api_docs/paths/upstreams/upstreamID/get.json
new file mode 100644
index 000000000..3ad8305ce
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/upstreamID/get.json
@@ -0,0 +1,82 @@
+{
+ "operationId": "getUpstream",
+ "summary": "Get a Upstream object by ID",
+ "tags": ["Upstreams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "upstreamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Upstream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UpstreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1672786008000,
+ "updated_at": 1672786008000,
+ "user_id": 2,
+ "name": "API servers 3",
+ "ip_hash": true,
+ "ntlm": false,
+ "keepalive": 10,
+ "keepalive_requests": 10,
+ "keepalive_time": "60s",
+ "keepalive_timeout": "3s",
+ "advanced_config": "",
+ "servers": [
+ {
+ "id": 1,
+ "created_at": 1672786009000,
+ "updated_at": 1672786009000,
+ "upstream_id": 1,
+ "server": "api1.localhost:1234",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 2,
+ "created_at": 1672786009000,
+ "updated_at": 1672786009000,
+ "upstream_id": 1,
+ "server": "api2.localhost:1234",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": true
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/upstreamID/nginx-config/get.json b/backend/embed/api_docs/paths/upstreams/upstreamID/nginx-config/get.json
new file mode 100644
index 000000000..f17e4e6db
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/upstreamID/nginx-config/get.json
@@ -0,0 +1,43 @@
+{
+ "operationId": "getUpstreamNginxConfig",
+ "summary": "Get a Upstream Nginx Config object by ID",
+ "tags": ["Upstreams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "upstreamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Upstream",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "type": "string"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": "# ------------------------------------------------------------\n# Upstream 1: API servers\n# ------------------------------------------------------------\nupstream npm_upstream_1 {\nserver 192.168.0.10:80 weight=100 ;\n server 192.168.0.11:80 weight=50 ;\n}\n"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/upstreams/upstreamID/put.json b/backend/embed/api_docs/paths/upstreams/upstreamID/put.json
new file mode 100644
index 000000000..4df64537f
--- /dev/null
+++ b/backend/embed/api_docs/paths/upstreams/upstreamID/put.json
@@ -0,0 +1,94 @@
+{
+ "operationId": "updateUpstream",
+ "summary": "Update an existing Upstream",
+ "tags": ["Upstreams"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "upstreamID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "ID of the Upstream",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Upstream details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateUpstream}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UpstreamObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "created_at": 1673234177000,
+ "updated_at": 1673244559000,
+ "user_id": 2,
+ "name": "API servers 2",
+ "nginx_template_id": 5,
+ "ip_hash": false,
+ "ntlm": false,
+ "keepalive": 0,
+ "keepalive_requests": 0,
+ "keepalive_time": "",
+ "keepalive_timeout": "",
+ "advanced_config": "",
+ "status": "ready",
+ "error_message": "",
+ "servers": [
+ {
+ "id": 1,
+ "created_at": 1673234177000,
+ "updated_at": 1673244559000,
+ "upstream_id": 1,
+ "server": "192.168.0.10:80",
+ "weight": 100,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ },
+ {
+ "id": 2,
+ "created_at": 1673234177000,
+ "updated_at": 1673244559000,
+ "upstream_id": 1,
+ "server": "192.168.0.11:80",
+ "weight": 50,
+ "max_conns": 0,
+ "max_fails": 0,
+ "fail_timeout": 0,
+ "backup": false
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/get.json b/backend/embed/api_docs/paths/users/get.json
new file mode 100644
index 000000000..4adf79ff8
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/get.json
@@ -0,0 +1,111 @@
+{
+ "operationId": "getUsers",
+ "summary": "Get a list of users",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "query",
+ "name": "offset",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row offset, default 0",
+ "example": 0
+ },
+ {
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "type": "number"
+ },
+ "description": "The pagination row limit, default 10",
+ "example": 10
+ },
+ {
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "type": "string"
+ },
+ "description": "The sorting of the list",
+ "example": "name,email.asc"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserList"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "total": 3,
+ "offset": 0,
+ "limit": 100,
+ "sort": [
+ {
+ "field": "name",
+ "direction": "ASC"
+ },
+ {
+ "field": "email",
+ "direction": "ASC"
+ }
+ ],
+ "items": [
+ {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "email": "jc@jc21.com",
+ "created_at": 1578010090000,
+ "updated_at": 1578010095000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ },
+ {
+ "id": 2,
+ "name": "John Doe",
+ "email": "johdoe@example.com",
+ "created_at": 1578010100000,
+ "updated_at": 1578010105000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": [
+ "hosts.view",
+ "hosts.manage"
+ ]
+ },
+ {
+ "id": 3,
+ "name": "Jane Doe",
+ "email": "janedoe@example.com",
+ "created_at": 1578010110000,
+ "updated_at": 1578010115000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": [
+ "hosts.view",
+ "hosts.manage"
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/post.json b/backend/embed/api_docs/paths/users/post.json
new file mode 100644
index 000000000..2c4045982
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/post.json
@@ -0,0 +1,81 @@
+{
+ "operationId": "createUser",
+ "summary": "Create a new User",
+ "tags": ["Users"],
+ "requestBody": {
+ "description": "User to Create",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.CreateUser}}"
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "201 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "email": "jc@jc21.com",
+ "created_at": 1578010100000,
+ "updated_at": 1578010100000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "auth": {
+ "$ref": "#/components/schemas/UserAuthObject"
+ },
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["error"],
+ "properties": {
+ "result": {
+ "type": "object",
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "error": {
+ "code": 400,
+ "message": "An user already exists with this email address"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/auth/post.json b/backend/embed/api_docs/paths/users/userID/auth/post.json
new file mode 100644
index 000000000..ce80e960e
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/auth/post.json
@@ -0,0 +1,66 @@
+{
+ "operationId": "setPassword",
+ "summary": "Set a User's password",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "oneOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to set yourself",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "Credentials to set",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.SetAuth}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserAuthObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 2,
+ "created_at": 1648422222000,
+ "updated_at": 1648423979000,
+ "user_id": 3,
+ "type": "password"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/delete.json b/backend/embed/api_docs/paths/users/userID/delete.json
new file mode 100644
index 000000000..0ffa70242
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/delete.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "deleteUser",
+ "summary": "Delete a User",
+ "tags": [
+ "Users"
+ ],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "required": true,
+ "description": "Numeric ID of the user",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DeletedItemResponse"
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot delete yourself!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/backend/embed/api_docs/paths/users/userID/get.json b/backend/embed/api_docs/paths/users/userID/get.json
new file mode 100644
index 000000000..dab258243
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/get.json
@@ -0,0 +1,60 @@
+{
+ "operationId": "getUser",
+ "summary": "Get a user object by ID or 'me'",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to get yourself",
+ "example": 1
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "email": "jc@jc21.com",
+ "created_at": 1578010100000,
+ "updated_at": 1578010105000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/api_docs/paths/users/userID/put.json b/backend/embed/api_docs/paths/users/userID/put.json
new file mode 100644
index 000000000..1a2b0358c
--- /dev/null
+++ b/backend/embed/api_docs/paths/users/userID/put.json
@@ -0,0 +1,109 @@
+{
+ "operationId": "updateUser",
+ "summary": "Update an existing User",
+ "tags": ["Users"],
+ "parameters": [
+ {
+ "in": "path",
+ "name": "userID",
+ "schema": {
+ "anyOf": [
+ {
+ "type": "integer",
+ "minimum": 1
+ },
+ {
+ "type": "string",
+ "pattern": "^me$"
+ }
+ ]
+ },
+ "required": true,
+ "description": "Numeric ID of the user or 'me' to update yourself",
+ "example": 1
+ }
+ ],
+ "requestBody": {
+ "description": "User details to update",
+ "required": true,
+ "content": {
+ "application/json": {
+ "schema": "{{schema.UpdateUser}}"
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "200 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["result"],
+ "properties": {
+ "result": {
+ "$ref": "#/components/schemas/UserObject"
+ }
+ }
+ },
+ "examples": {
+ "default": {
+ "value": {
+ "result": {
+ "id": 1,
+ "name": "Jamie Curnow",
+ "email": "jc@jc21.com",
+ "created_at": 1578010100000,
+ "updated_at": 1578010110000,
+ "gravatar_url": "https://www.gravatar.com/avatar/6193176330f8d38747f038c170ddb193?d=mm&r=pg&s=128",
+ "is_disabled": false,
+ "capabilities": ["full-admin"]
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "400": {
+ "description": "400 response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "required": ["error"],
+ "properties": {
+ "result": {
+ "type": "object",
+ "nullable": true
+ },
+ "error": {
+ "$ref": "#/components/schemas/ErrorObject"
+ }
+ }
+ },
+ "examples": {
+ "duplicateemail": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "A user already exists with this email address"
+ }
+ }
+ },
+ "nodisable": {
+ "value": {
+ "result": null,
+ "error": {
+ "code": 400,
+ "message": "You cannot disable yourself!"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/backend/embed/main.go b/backend/embed/main.go
new file mode 100644
index 000000000..4323d2815
--- /dev/null
+++ b/backend/embed/main.go
@@ -0,0 +1,23 @@
+package embed
+
+import "embed"
+
+// APIDocFiles contain all the files used for swagger schema generation
+//
+//go:embed api_docs
+var APIDocFiles embed.FS
+
+// Assets are frontend assets served from within this app
+//
+//go:embed assets
+var Assets embed.FS
+
+// MigrationFiles are database migrations
+//
+//go:embed migrations
+var MigrationFiles embed.FS
+
+// NginxFiles hold nginx config templates
+//
+//go:embed nginx
+var NginxFiles embed.FS
diff --git a/backend/embed/migrations/mysql/20201013035318_initial_schema.sql b/backend/embed/migrations/mysql/20201013035318_initial_schema.sql
new file mode 100644
index 000000000..4f2726bcf
--- /dev/null
+++ b/backend/embed/migrations/mysql/20201013035318_initial_schema.sql
@@ -0,0 +1,246 @@
+-- migrate:up
+
+CREATE TABLE IF NOT EXISTS `jwt_keys`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `public_key` TEXT NOT NULL,
+ `private_key` TEXT NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `user`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `name` VARCHAR(50) NOT NULL,
+ `email` VARCHAR(255) NOT NULL,
+ `is_system` BOOLEAN NOT NULL DEFAULT FALSE,
+ `is_disabled` BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE IF NOT EXISTS `capability`
+(
+ `name` VARCHAR(50) PRIMARY KEY,
+ UNIQUE (`name`)
+);
+
+CREATE TABLE IF NOT EXISTS `user_has_capability`
+(
+ `user_id` INT NOT NULL,
+ `capability_name` VARCHAR(50) NOT NULL,
+ UNIQUE (`user_id`, `capability_name`),
+ FOREIGN KEY (`capability_name`) REFERENCES `capability`(`name`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `auth`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `type` VARCHAR(50) NOT NULL,
+ `identity` VARCHAR(255) NOT NULL,
+ `secret` VARCHAR(255) NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE,
+ UNIQUE (`user_id`, `type`)
+);
+
+CREATE TABLE IF NOT EXISTS `setting`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `name` VARCHAR(50) NOT NULL,
+ `description` VARCHAR(255) NOT NULL DEFAULT '',
+ `value` TEXT NOT NULL,
+ UNIQUE (`name`)
+);
+
+CREATE TABLE IF NOT EXISTS `audit_log`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `object_type` VARCHAR(50) NOT NULL,
+ `object_id` INT NOT NULL,
+ `action` VARCHAR(50) NOT NULL,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `certificate_authority`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `name` VARCHAR(50) NOT NULL,
+ `acmesh_server` VARCHAR(255) NOT NULL DEFAULT '',
+ `ca_bundle` VARCHAR(255) NOT NULL DEFAULT '',
+ `is_wildcard_supported` BOOLEAN NOT NULL DEFAULT FALSE, -- specific to each CA, acme v1 doesn't usually have wildcards
+ `max_domains` INT NOT NULL DEFAULT 5, -- per request
+ `is_readonly` BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE IF NOT EXISTS `dns_provider`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `name` VARCHAR(50) NOT NULL,
+ `acmesh_name` VARCHAR(50) NOT NULL,
+ `dns_sleep` INT NOT NULL DEFAULT 0,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `certificate`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `type` VARCHAR(50) NOT NULL, -- custom,dns,http
+ `certificate_authority_id` INT, -- null for a custom cert
+ `dns_provider_id` INT, -- 0, for a http or custom cert
+ `name` VARCHAR(50) NOT NULL,
+ `domain_names` TEXT NOT NULL,
+ `expires_on` BIGINT NOT NULL DEFAULT 0,
+ `status` VARCHAR(50) NOT NULL, -- ready,requesting,failed,provided
+ `error_message` TEXT NOT NULL,
+ `meta` TEXT NOT NULL,
+ `is_ecc` BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`certificate_authority_id`) REFERENCES `certificate_authority`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`dns_provider_id`) REFERENCES `dns_provider`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `stream`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `listen_interface` VARCHAR(50) NOT NULL,
+ `incoming_port` INT NOT NULL,
+ `tcp_forwarding` INT NOT NULL DEFAULT 0,
+ `udp_forwarding` INT NOT NULL DEFAULT 0,
+ `advanced_config` TEXT NOT NULL,
+ `is_disabled` BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `nginx_template`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `name` VARCHAR(50) NOT NULL,
+ `type` VARCHAR(50) NOT NULL,
+ `template` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`)
+);
+
+CREATE TABLE IF NOT EXISTS `upstream`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `name` VARCHAR(50) NOT NULL,
+ `nginx_template_id` INT NOT NULL,
+ `ip_hash` BOOLEAN NOT NULL DEFAULT FALSE,
+ `ntlm` BOOLEAN NOT NULL DEFAULT FALSE,
+ `keepalive` INT NOT NULL DEFAULT 0,
+ `keepalive_requests` INT NOT NULL DEFAULT 0,
+ `keepalive_time` VARCHAR(50) NOT NULL DEFAULT '',
+ `keepalive_timeout` VARCHAR(50) NOT NULL DEFAULT '',
+ `advanced_config` TEXT NOT NULL,
+ `status` VARCHAR(50) NOT NULL DEFAULT '',
+ `error_message` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`nginx_template_id`) REFERENCES `nginx_template`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `upstream_server`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `upstream_id` INT NOT NULL,
+ `server` VARCHAR(50) NOT NULL,
+ `weight` INT NOT NULL DEFAULT 0,
+ `max_conns` INT NOT NULL DEFAULT 0,
+ `max_fails` INT NOT NULL DEFAULT 0,
+ `fail_timeout` INT NOT NULL DEFAULT 0,
+ `is_backup` BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (`upstream_id`) REFERENCES `upstream`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `access_list`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `name` VARCHAR(50) NOT NULL,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `host`
+(
+ `id` INT AUTO_INCREMENT PRIMARY KEY,
+ `created_at` BIGINT NOT NULL DEFAULT 0,
+ `updated_at` BIGINT NOT NULL DEFAULT 0,
+ `is_deleted` INT NOT NULL DEFAULT 0, -- int on purpose, gormism
+ `user_id` INT NOT NULL,
+ `type` VARCHAR(50) NOT NULL,
+ `nginx_template_id` INT NOT NULL,
+ `listen_interface` VARCHAR(50) NOT NULL DEFAULT '',
+ `domain_names` TEXT NOT NULL,
+ `upstream_id` INT,
+ `proxy_scheme` VARCHAR(50) NOT NULL DEFAULT '',
+ `proxy_host` VARCHAR(50) NOT NULL DEFAULT '',
+ `proxy_port` INT NOT NULL DEFAULT 0,
+ `certificate_id` INT,
+ `access_list_id` INT,
+ `ssl_forced` BOOLEAN NOT NULL DEFAULT FALSE,
+ `caching_enabled` BOOLEAN NOT NULL DEFAULT FALSE,
+ `block_exploits` BOOLEAN NOT NULL DEFAULT FALSE,
+ `allow_websocket_upgrade` BOOLEAN NOT NULL DEFAULT FALSE,
+ `http2_support` BOOLEAN NOT NULL DEFAULT FALSE,
+ `hsts_enabled` BOOLEAN NOT NULL DEFAULT FALSE,
+ `hsts_subdomains` BOOLEAN NOT NULL DEFAULT FALSE,
+ `paths` TEXT NOT NULL,
+ `advanced_config` TEXT NOT NULL,
+ `status` VARCHAR(50) NOT NULL DEFAULT '',
+ `error_message` TEXT NOT NULL,
+ `is_disabled` BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (`user_id`) REFERENCES `user`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`nginx_template_id`) REFERENCES `nginx_template`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`upstream_id`) REFERENCES `upstream`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`certificate_id`) REFERENCES `certificate`(`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`access_list_id`) REFERENCES `access_list`(`id`) ON DELETE CASCADE
+);
+
+-- migrate:down
+
+-- Not allowed to go down from initial
diff --git a/backend/embed/migrations/mysql/20201013035839_initial_data.sql b/backend/embed/migrations/mysql/20201013035839_initial_data.sql
new file mode 100644
index 000000000..9f24a6a0b
--- /dev/null
+++ b/backend/embed/migrations/mysql/20201013035839_initial_data.sql
@@ -0,0 +1,337 @@
+-- migrate:up
+
+-- User permissions
+INSERT INTO `capability` (
+ `name`
+) VALUES
+ ("full-admin"),
+ ("access-lists.view"),
+ ("access-lists.manage"),
+ ("audit-log.view"),
+ ("certificates.view"),
+ ("certificates.manage"),
+ ("certificate-authorities.view"),
+ ("certificate-authorities.manage"),
+ ("dns-providers.view"),
+ ("dns-providers.manage"),
+ ("hosts.view"),
+ ("hosts.manage"),
+ ("nginx-templates.view"),
+ ("nginx-templates.manage"),
+ ("settings.manage"),
+ ("streams.view"),
+ ("streams.manage"),
+ ("users.manage");
+
+INSERT INTO `setting` (
+ `created_at`,
+ `updated_at`,
+ `name`,
+ `description`,
+ `value`
+) VALUES
+-- Default site
+(
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "default-site",
+ "What to show users who hit your Nginx server by default",
+ '"welcome"' -- remember this is json
+),
+(
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "auth-methods",
+ "Which methods are enabled for authentication",
+ '["local"]' -- remember this is json
+),
+(
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "oauth-auth",
+ "Configuration for OAuth authentication",
+ '{}' -- remember this is json
+),
+(
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "ldap-auth",
+ "Configuration for LDAP authentication",
+ '{"host": "", "dn": "", "sync_by": "uid"}' -- remember this is json
+);
+
+-- Default Certificate Authorities
+
+INSERT INTO `certificate_authority` (
+ `created_at`,
+ `updated_at`,
+ `name`,
+ `acmesh_server`,
+ `is_wildcard_supported`,
+ `max_domains`,
+ `is_readonly`
+) VALUES (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "ZeroSSL",
+ "zerossl",
+ TRUE,
+ 10,
+ TRUE
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "Let's Encrypt",
+ "https://acme-v02.api.letsencrypt.org/directory",
+ TRUE,
+ 10,
+ TRUE
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "Buypass Go SSL",
+ "https://api.buypass.com/acme/directory",
+ FALSE,
+ 5,
+ TRUE
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "SSL.com",
+ "ssl.com",
+ FALSE,
+ 10,
+ TRUE
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "Let's Encrypt (Testing)",
+ "https://acme-staging-v02.api.letsencrypt.org/directory",
+ TRUE,
+ 10,
+ TRUE
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "Buypass Go SSL (Testing)",
+ "https://api.test4.buypass.no/acme/directory",
+ FALSE,
+ 5,
+ TRUE
+);
+
+-- System User
+INSERT INTO `user` (
+ `created_at`,
+ `updated_at`,
+ `name`,
+ `email`,
+ `is_system`
+) VALUES (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ "System",
+ "system@localhost",
+ TRUE
+);
+
+-- Host Templates
+INSERT INTO `nginx_template` (
+ `created_at`,
+ `updated_at`,
+ `user_id`,
+ `name`,
+ `type`,
+ `template`
+) VALUES (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ (SELECT `id` FROM `user` WHERE `is_system` IS TRUE LIMIT 1),
+ "Default Proxy Template",
+ "proxy",
+ "# ------------------------------------------------------------
+{{#each Host.DomainNames}}
+# {{this}}
+{{/each}}
+# ------------------------------------------------------------
+
+server {
+ {{#if Config.Ipv4}}
+ listen 80;
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:80;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Config.Ipv4}}
+ listen 443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{/if}}
+
+ server_name {{#each Host.DomainNames}}{{this}} {{/each}};
+
+ {{#if Certificate.ID}}
+ include conf.d/include/ssl-ciphers.conf;
+ {{#if Certificate.IsAcme}}
+ ssl_certificate {{Certificate.Folder}}/fullchain.pem;
+ ssl_certificate_key {{Certificate.Folder}}/privkey.pem;
+ {{else}}
+ # Custom SSL
+ ssl_certificate /data/custom_ssl/npm-{{Certicicate.ID}}/fullchain.pem;
+ ssl_certificate_key /data/custom_ssl/npm-{{Certificate.ID}}/privkey.pem;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.CachingEnabled}}
+ include conf.d/include/assets.conf;
+ {{/if}}
+
+ {{#if Host.BlockExploits}}
+ include conf.d/include/block-exploits.conf;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security ""max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload"" always;
+ {{/if}}
+ # Force SSL
+ include conf.d/include/force-ssl.conf;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ access_log /data/logs/host-{{Host.ID}}_access.log proxy;
+ error_log /data/logs/host-{{Host.ID}}_error.log warn;
+
+ {{Host.AdvancedConfig}}
+
+ # locations ?
+
+ # default location:
+ location / {
+ {{#if Host.AccessListID}}
+ # Authorization
+ auth_basic ""Authorization required"";
+ auth_basic_user_file /data/access/{{Host.AccessListID}};
+ # access_list.passauth ? todo
+ {{/if}}
+
+ # Access Rules ? todo
+
+ # Access checks must...? todo
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security ""max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload"" always;
+ {{/if}}
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ {{/if}}
+
+ # Proxy!
+ add_header X-Served-By $host;
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_http_version 1.1;
+
+ {{#if Upstream.ID}}
+ # upstream
+ proxy_pass {{Host.ProxyScheme}}://npm_upstream_{{Upstream.ID}};
+ {{else}}
+ # proxy a single host
+ proxy_pass {{Host.ProxyScheme}}://{{Host.ProxyHost}}:{{Host.ProxyPort}};
+ {{/if}}
+ }
+
+ # Legacy Custom Configuration
+ include /data/nginx/custom/server_proxy[.]conf;
+}
+"
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ (SELECT `id` FROM `user` WHERE `is_system` IS TRUE LIMIT 1),
+ "Default Redirect Template",
+ "redirect",
+ "# this is a redirect template"
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ (SELECT `id` FROM `user` WHERE `is_system` IS TRUE LIMIT 1),
+ "Default Dead Template",
+ "dead",
+ "# this is a dead template"
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ (SELECT `id` FROM `user` WHERE `is_system` IS TRUE LIMIT 1),
+ "Default Stream Template",
+ "stream",
+ "# this is a stream template"
+), (
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ ROUND(UNIX_TIMESTAMP(CURTIME(4)) * 1000),
+ (SELECT `id` FROM `user` WHERE `is_system` IS TRUE LIMIT 1),
+ "Default Upstream Template",
+ "upstream",
+ "# ------------------------------------------------------------
+# Upstream {{Upstream.ID}}: {{Upstream.Name}}
+# ------------------------------------------------------------
+
+upstream npm_upstream_{{Upstream.ID}} {
+
+ {{#if Upstream.IPHash~}}
+ ip_hash;
+ {{~/if}}
+
+ {{#if Upstream.NTLM~}}
+ ntlm;
+ {{~/if}}
+
+ {{#if Upstream.Keepalive~}}
+ keepalive {{Upstream.Keepalive}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveRequests~}}
+ keepalive_requests {{Upstream.KeepaliveRequests}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTime~}}
+ keepalive_time {{Upstream.KeepaliveTime}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTimeout~}}
+ keepalive_timeout {{Upstream.KeepaliveTimeout}};
+ {{~/if}}
+
+ {{Upstream.AdvancedConfig}}
+
+ {{#each Upstream.Servers~}}
+ {{#unless IsDeleted~}}
+ server {{Server}} {{#if Weight}}weight={{Weight}} {{/if}}{{#if MaxConns}}max_conns={{MaxConns}} {{/if}}{{#if MaxFails}}max_fails={{MaxFails}} {{/if}}{{#if FailTimeout}}fail_timeout={{FailTimeout}} {{/if}}{{#if Backup}}backup{{/if}};
+ {{/unless}}
+ {{/each}}
+}
+"
+);
+
+-- migrate:down
diff --git a/backend/embed/migrations/postgres/20201013035318_initial_schema.sql b/backend/embed/migrations/postgres/20201013035318_initial_schema.sql
new file mode 100644
index 000000000..fa75dd7de
--- /dev/null
+++ b/backend/embed/migrations/postgres/20201013035318_initial_schema.sql
@@ -0,0 +1,212 @@
+-- migrate:up
+
+CREATE TABLE "jwt_keys" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "public_key" TEXT NOT NULL,
+ "private_key" TEXT NOT NULL
+);
+
+CREATE TABLE "user" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "name" VARCHAR(50) NOT NULL,
+ "email" VARCHAR(255) NOT NULL,
+ "is_system" BOOLEAN NOT NULL DEFAULT FALSE,
+ "is_disabled" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE "capability" (
+ "name" TEXT NOT NULL PRIMARY KEY,
+ UNIQUE ("name")
+);
+
+CREATE TABLE "user_has_capability" (
+ "user_id" INTEGER NOT NULL,
+ "capability_name" TEXT NOT NULL REFERENCES "capability"("name") ON DELETE CASCADE,
+ UNIQUE ("user_id", "capability_name")
+);
+
+CREATE TABLE "auth" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "type" VARCHAR(50) NOT NULL,
+ "identity" VARCHAR(255) NOT NULL,
+ "secret" VARCHAR(255) NOT NULL,
+ UNIQUE ("user_id", "type")
+);
+
+CREATE TABLE "setting" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "name" VARCHAR(50) NOT NULL,
+ "description" VARCHAR(255) NOT NULL DEFAULT '',
+ "value" TEXT NOT NULL,
+ UNIQUE ("name")
+);
+
+CREATE TABLE "audit_log" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "object_type" VARCHAR(50) NOT NULL,
+ "object_id" INTEGER NOT NULL,
+ "action" VARCHAR(50) NOT NULL,
+ "meta" TEXT NOT NULL
+);
+
+CREATE TABLE "certificate_authority" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "name" VARCHAR(50) NOT NULL,
+ "acmesh_server" VARCHAR(255) NOT NULL DEFAULT '',
+ "ca_bundle" VARCHAR(255) NOT NULL DEFAULT '',
+ "is_wildcard_supported" BOOLEAN NOT NULL DEFAULT FALSE, -- specific to each CA, acme v1 doesn't usually have wildcards
+ "max_domains" INTEGER NOT NULL DEFAULT 5, -- per request
+ "is_readonly" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE "dns_provider" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "name" VARCHAR(50) NOT NULL,
+ "acmesh_name" VARCHAR(50) NOT NULL,
+ "dns_sleep" INTEGER NOT NULL DEFAULT 0,
+ "meta" TEXT NOT NULL
+);
+
+CREATE TABLE "certificate" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "type" VARCHAR(50) NOT NULL, -- custom,dns,http
+ "certificate_authority_id" INTEGER REFERENCES "certificate_authority"("id") ON DELETE CASCADE, -- 0 for a custom cert
+ "dns_provider_id" INTEGER REFERENCES "dns_provider"("id") ON DELETE CASCADE, -- 0, for a http or custom cert
+ "name" VARCHAR(50) NOT NULL,
+ "domain_names" TEXT NOT NULL,
+ "expires_on" BIGINT NOT NULL DEFAULT 0,
+ "status" VARCHAR(50) NOT NULL, -- ready,requesting,failed,provided
+ "error_message" TEXT NOT NULL DEFAULT '',
+ "meta" TEXT NOT NULL,
+ "is_ecc" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE "stream" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "listen_interface" VARCHAR(50) NOT NULL,
+ "incoming_port" INTEGER NOT NULL,
+ "tcp_forwarding" INTEGER NOT NULL DEFAULT 0,
+ "udp_forwarding" INTEGER NOT NULL DEFAULT 0,
+ "advanced_config" TEXT NOT NULL,
+ "is_disabled" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE "nginx_template" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "name" VARCHAR(50) NOT NULL,
+ "type" VARCHAR(50) NOT NULL,
+ "template" TEXT NOT NULL
+);
+
+CREATE TABLE "upstream" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "name" VARCHAR(50) NOT NULL,
+ "nginx_template_id" INTEGER NOT NULL REFERENCES "nginx_template"("id") ON DELETE CASCADE,
+ "ip_hash" BOOLEAN NOT NULL DEFAULT FALSE,
+ "ntlm" BOOLEAN NOT NULL DEFAULT FALSE,
+ "keepalive" INTEGER NOT NULL DEFAULT 0,
+ "keepalive_requests" INTEGER NOT NULL DEFAULT 0,
+ "keepalive_time" VARCHAR(50) NOT NULL DEFAULT '',
+ "keepalive_timeout" VARCHAR(50) NOT NULL DEFAULT '',
+ "advanced_config" TEXT NOT NULL,
+ "status" VARCHAR(50) NOT NULL DEFAULT '',
+ "error_message" TEXT NOT NULL DEFAULT ''
+);
+
+CREATE TABLE "upstream_server" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "upstream_id" INTEGER NOT NULL REFERENCES "upstream"("id") ON DELETE CASCADE,
+ "server" VARCHAR(50) NOT NULL,
+ "weight" INTEGER NOT NULL DEFAULT 0,
+ "max_conns" INTEGER NOT NULL DEFAULT 0,
+ "max_fails" INTEGER NOT NULL DEFAULT 0,
+ "fail_timeout" INTEGER NOT NULL DEFAULT 0,
+ "is_backup" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+CREATE TABLE "access_list" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "name" VARCHAR(50) NOT NULL,
+ "meta" TEXT NOT NULL
+);
+
+CREATE TABLE "host" (
+ "id" SERIAL PRIMARY KEY,
+ "created_at" BIGINT NOT NULL DEFAULT 0,
+ "updated_at" BIGINT NOT NULL DEFAULT 0,
+ "is_deleted" INTEGER NOT NULL DEFAULT 0, -- int on purpose, gormism
+ "user_id" INTEGER NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
+ "type" TEXT NOT NULL,
+ "nginx_template_id" INTEGER NOT NULL REFERENCES "nginx_template"("id") ON DELETE CASCADE,
+ "listen_interface" TEXT NOT NULL DEFAULT '',
+ "domain_names" TEXT NOT NULL,
+ "upstream_id" INTEGER REFERENCES "upstream"("id") ON DELETE CASCADE,
+ "proxy_scheme" TEXT NOT NULL DEFAULT '',
+ "proxy_host" TEXT NOT NULL DEFAULT '',
+ "proxy_port" INTEGER NOT NULL DEFAULT 0,
+ "certificate_id" INTEGER REFERENCES "certificate"("id") ON DELETE CASCADE,
+ "access_list_id" INTEGER REFERENCES "access_list"("id") ON DELETE CASCADE,
+ "ssl_forced" BOOLEAN NOT NULL DEFAULT FALSE,
+ "caching_enabled" BOOLEAN NOT NULL DEFAULT FALSE,
+ "block_exploits" BOOLEAN NOT NULL DEFAULT FALSE,
+ "allow_websocket_upgrade" BOOLEAN NOT NULL DEFAULT FALSE,
+ "http2_support" BOOLEAN NOT NULL DEFAULT FALSE,
+ "hsts_enabled" BOOLEAN NOT NULL DEFAULT FALSE,
+ "hsts_subdomains" BOOLEAN NOT NULL DEFAULT FALSE,
+ "paths" TEXT NOT NULL DEFAULT '',
+ "advanced_config" TEXT NOT NULL DEFAULT '',
+ "status" TEXT NOT NULL DEFAULT '',
+ "error_message" TEXT NOT NULL DEFAULT '',
+ "is_disabled" BOOLEAN NOT NULL DEFAULT FALSE
+);
+
+-- migrate:down
+
+-- Not allowed to go down from initial
diff --git a/backend/embed/migrations/postgres/20201013035839_initial_data.sql b/backend/embed/migrations/postgres/20201013035839_initial_data.sql
new file mode 100644
index 000000000..0937851af
--- /dev/null
+++ b/backend/embed/migrations/postgres/20201013035839_initial_data.sql
@@ -0,0 +1,337 @@
+-- migrate:up
+
+-- User permissions
+INSERT INTO "capability" (
+ "name"
+) VALUES
+ ('full-admin'),
+ ('access-lists.view'),
+ ('access-lists.manage'),
+ ('audit-log.view'),
+ ('certificates.view'),
+ ('certificates.manage'),
+ ('certificate-authorities.view'),
+ ('certificate-authorities.manage'),
+ ('dns-providers.view'),
+ ('dns-providers.manage'),
+ ('hosts.view'),
+ ('hosts.manage'),
+ ('nginx-templates.view'),
+ ('nginx-templates.manage'),
+ ('settings.manage'),
+ ('streams.view'),
+ ('streams.manage'),
+ ('users.manage');
+
+INSERT INTO "setting" (
+ "created_at",
+ "updated_at",
+ "name",
+ "description",
+ "value"
+) VALUES
+-- Default site
+(
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'default-site',
+ 'What to show users who hit your Nginx server by default',
+ '"welcome"' -- remember this is json
+),
+(
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'auth-methods',
+ 'Which methods are enabled for authentication',
+ '["local"]' -- remember this is json
+),
+(
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'oauth-auth',
+ 'Configuration for OAuth authentication',
+ '{}' -- remember this is json
+),
+(
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'ldap-auth',
+ 'Configuration for LDAP authentication',
+ '{"host": "", "dn": "", "sync_by": "uid"}' -- remember this is json
+);
+
+-- Default Certificate Authorities
+
+INSERT INTO "certificate_authority" (
+ "created_at",
+ "updated_at",
+ "name",
+ "acmesh_server",
+ "is_wildcard_supported",
+ "max_domains",
+ "is_readonly"
+) VALUES (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'ZeroSSL',
+ 'zerossl',
+ TRUE,
+ 10,
+ TRUE
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'Let''s Encrypt',
+ 'https://acme-v02.api.letsencrypt.org/directory',
+ TRUE,
+ 10,
+ TRUE
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'Buypass Go SSL',
+ 'https://api.buypass.com/acme/directory',
+ FALSE,
+ 5,
+ TRUE
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'SSL.com',
+ 'ssl.com',
+ FALSE,
+ 10,
+ TRUE
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'Let''s Encrypt (Testing)',
+ 'https://acme-staging-v02.api.letsencrypt.org/directory',
+ TRUE,
+ 10,
+ TRUE
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'Buypass Go SSL (Testing)',
+ 'https://api.test4.buypass.no/acme/directory',
+ FALSE,
+ 5,
+ TRUE
+);
+
+-- System User
+INSERT INTO "user" (
+ "created_at",
+ "updated_at",
+ "name",
+ "email",
+ "is_system"
+) VALUES (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ 'System',
+ 'system@localhost',
+ TRUE
+);
+
+-- Host Templates
+INSERT INTO "nginx_template" (
+ "created_at",
+ "updated_at",
+ "user_id",
+ "name",
+ "type",
+ "template"
+) VALUES (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ (SELECT "id" FROM "user" WHERE "is_system" IS TRUE LIMIT 1),
+ 'Default Proxy Template',
+ 'proxy',
+ '# ------------------------------------------------------------
+{{#each Host.DomainNames}}
+# {{this}}
+{{/each}}
+# ------------------------------------------------------------
+
+server {
+ {{#if Config.Ipv4}}
+ listen 80;
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:80;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Config.Ipv4}}
+ listen 443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{/if}}
+
+ server_name {{#each Host.DomainNames}}{{this}} {{/each}};
+
+ {{#if Certificate.ID}}
+ include conf.d/include/ssl-ciphers.conf;
+ {{#if Certificate.IsAcme}}
+ ssl_certificate {{Certificate.Folder}}/fullchain.pem;
+ ssl_certificate_key {{Certificate.Folder}}/privkey.pem;
+ {{else}}
+ # Custom SSL
+ ssl_certificate /data/custom_ssl/npm-{{Certicicate.ID}}/fullchain.pem;
+ ssl_certificate_key /data/custom_ssl/npm-{{Certificate.ID}}/privkey.pem;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.CachingEnabled}}
+ include conf.d/include/assets.conf;
+ {{/if}}
+
+ {{#if Host.BlockExploits}}
+ include conf.d/include/block-exploits.conf;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security "max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload" always;
+ {{/if}}
+ # Force SSL
+ include conf.d/include/force-ssl.conf;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ access_log /data/logs/host-{{Host.ID}}_access.log proxy;
+ error_log /data/logs/host-{{Host.ID}}_error.log warn;
+
+ {{Host.AdvancedConfig}}
+
+ # locations ?
+
+ # default location:
+ location / {
+ {{#if Host.AccessListID}}
+ # Authorization
+ auth_basic "Authorization required";
+ auth_basic_user_file /data/access/{{Host.AccessListID}};
+ # access_list.passauth ? todo
+ {{/if}}
+
+ # Access Rules ? todo
+
+ # Access checks must...? todo
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security "max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload" always;
+ {{/if}}
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ {{/if}}
+
+ # Proxy!
+ add_header X-Served-By $host;
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_http_version 1.1;
+
+ {{#if Upstream.ID}}
+ # upstream
+ proxy_pass {{Host.ProxyScheme}}://npm_upstream_{{Upstream.ID}};
+ {{else}}
+ # proxy a single host
+ proxy_pass {{Host.ProxyScheme}}://{{Host.ProxyHost}}:{{Host.ProxyPort}};
+ {{/if}}
+ }
+
+ # Legacy Custom Configuration
+ include /data/nginx/custom/server_proxy[.]conf;
+}
+'
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ (SELECT "id" FROM "user" WHERE "is_system" IS TRUE LIMIT 1),
+ 'Default Redirect Template',
+ 'redirect',
+ '# this is a redirect template'
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ (SELECT "id" FROM "user" WHERE "is_system" IS TRUE LIMIT 1),
+ 'Default Dead Template',
+ 'dead',
+ '# this is a dead template'
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ (SELECT "id" FROM "user" WHERE "is_system" IS TRUE LIMIT 1),
+ 'Default Stream Template',
+ 'stream',
+ '# this is a stream template'
+), (
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ EXTRACT(EPOCH FROM TIMESTAMP '2011-05-17 10:40:28.876944') * 1000,
+ (SELECT "id" FROM "user" WHERE "is_system" IS TRUE LIMIT 1),
+ 'Default Upstream Template',
+ 'upstream',
+ '# ------------------------------------------------------------
+# Upstream {{Upstream.ID}}: {{Upstream.Name}}
+# ------------------------------------------------------------
+
+upstream npm_upstream_{{Upstream.ID}} {
+
+ {{#if Upstream.IPHash~}}
+ ip_hash;
+ {{~/if}}
+
+ {{#if Upstream.NTLM~}}
+ ntlm;
+ {{~/if}}
+
+ {{#if Upstream.Keepalive~}}
+ keepalive {{Upstream.Keepalive}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveRequests~}}
+ keepalive_requests {{Upstream.KeepaliveRequests}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTime~}}
+ keepalive_time {{Upstream.KeepaliveTime}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTimeout~}}
+ keepalive_timeout {{Upstream.KeepaliveTimeout}};
+ {{~/if}}
+
+ {{Upstream.AdvancedConfig}}
+
+ {{#each Upstream.Servers~}}
+ {{#unless IsDeleted~}}
+ server {{Server}} {{#if Weight}}weight={{Weight}} {{/if}}{{#if MaxConns}}max_conns={{MaxConns}} {{/if}}{{#if MaxFails}}max_fails={{MaxFails}} {{/if}}{{#if FailTimeout}}fail_timeout={{FailTimeout}} {{/if}}{{#if Backup}}backup{{/if}};
+ {{/unless}}
+ {{/each}}
+}
+'
+);
+
+-- migrate:down
diff --git a/backend/embed/migrations/sqlite/20201013035318_initial_schema.sql b/backend/embed/migrations/sqlite/20201013035318_initial_schema.sql
new file mode 100644
index 000000000..602be0323
--- /dev/null
+++ b/backend/embed/migrations/sqlite/20201013035318_initial_schema.sql
@@ -0,0 +1,246 @@
+-- migrate:up
+
+CREATE TABLE IF NOT EXISTS `jwt_keys`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `public_key` TEXT NOT NULL,
+ `private_key` TEXT NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS `user`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `name` TEXT NOT NULL,
+ `email` TEXT NOT NULL,
+ `is_system` INTEGER NOT NULL DEFAULT 0,
+ `is_disabled` INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE TABLE IF NOT EXISTS `capability`
+(
+ `name` TEXT PRIMARY KEY,
+ UNIQUE (`name`)
+);
+
+CREATE TABLE IF NOT EXISTS `user_has_capability`
+(
+ `user_id` INTEGER NOT NULL,
+ `capability_name` TEXT NOT NULL,
+ UNIQUE (`user_id`, `capability_name`),
+ FOREIGN KEY (`capability_name`) REFERENCES `capability` (`name`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `auth`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `type` TEXT NOT NULL,
+ `identity` TEXT NOT NULL,
+ `secret` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
+ UNIQUE (`user_id`, `type`)
+);
+
+CREATE TABLE IF NOT EXISTS `setting`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `name` TEXT NOT NULL,
+ `description` TEXT NOT NULL DEFAULT "",
+ `value` TEXT NOT NULL,
+ UNIQUE (`name`)
+);
+
+CREATE TABLE IF NOT EXISTS `audit_log`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `object_type` TEXT NOT NULL,
+ `object_id` INTEGER NOT NULL,
+ `action` TEXT NOT NULL,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `certificate_authority`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `name` TEXT NOT NULL,
+ `acmesh_server` TEXT NOT NULL DEFAULT "",
+ `ca_bundle` TEXT NOT NULL DEFAULT "",
+ `is_wildcard_supported` INTEGER NOT NULL DEFAULT 0, -- specific to each CA, acme v1 doesn't usually have wildcards
+ `max_domains` INTEGER NOT NULL DEFAULT 5, -- per request
+ `is_readonly` INTEGER NOT NULL DEFAULT 0
+);
+
+CREATE TABLE IF NOT EXISTS `dns_provider`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `name` TEXT NOT NULL,
+ `acmesh_name` TEXT NOT NULL,
+ `dns_sleep` INTEGER NOT NULL DEFAULT 0,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `certificate`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `type` TEXT NOT NULL, -- custom,dns,http
+ `certificate_authority_id` INTEGER, -- 0 for a custom cert
+ `dns_provider_id` INTEGER, -- 0, for a http or custom cert
+ `name` TEXT NOT NULL,
+ `domain_names` TEXT NOT NULL,
+ `expires_on` INTEGER NOT NULL DEFAULT 0,
+ `status` TEXT NOT NULL, -- ready,requesting,failed,provided
+ `error_message` TEXT NOT NULL DEFAULT "",
+ `meta` TEXT NOT NULL,
+ `is_ecc` INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`certificate_authority_id`) REFERENCES `certificate_authority` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`dns_provider_id`) REFERENCES `dns_provider` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `stream`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `listen_interface` TEXT NOT NULL,
+ `incoming_port` INTEGER NOT NULL,
+ `tcp_forwarding` INTEGER NOT NULL DEFAULT 0,
+ `udp_forwarding` INTEGER NOT NULL DEFAULT 0,
+ `advanced_config` TEXT NOT NULL,
+ `is_disabled` INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `nginx_template`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `name` TEXT NOT NULL,
+ `type` TEXT NOT NULL,
+ `template` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `upstream`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `name` TEXT NOT NULL,
+ `nginx_template_id` INTEGER NOT NULL,
+ `ip_hash` INTEGER NOT NULL DEFAULT 0,
+ `ntlm` INTEGER NOT NULL DEFAULT 0,
+ `keepalive` INTEGER NOT NULL DEFAULT 0,
+ `keepalive_requests` INTEGER NOT NULL DEFAULT 0,
+ `keepalive_time` TEXT NOT NULL DEFAULT "",
+ `keepalive_timeout` TEXT NOT NULL DEFAULT "",
+ `advanced_config` TEXT NOT NULL,
+ `status` TEXT NOT NULL DEFAULT "",
+ `error_message` TEXT NOT NULL DEFAULT "",
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`nginx_template_id`) REFERENCES `nginx_template` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `upstream_server`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `upstream_id` INTEGER NOT NULL,
+ `server` TEXT NOT NULL,
+ `weight` INTEGER NOT NULL DEFAULT 0,
+ `max_conns` INTEGER NOT NULL DEFAULT 0,
+ `max_fails` INTEGER NOT NULL DEFAULT 0,
+ `fail_timeout` INTEGER NOT NULL DEFAULT 0,
+ `is_backup` INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (`upstream_id`) REFERENCES `upstream` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `access_list`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `name` TEXT NOT NULL,
+ `meta` TEXT NOT NULL,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE
+);
+
+CREATE TABLE IF NOT EXISTS `host`
+(
+ `id` INTEGER PRIMARY KEY AUTOINCREMENT,
+ `created_at` INTEGER NOT NULL DEFAULT 0,
+ `updated_at` INTEGER NOT NULL DEFAULT 0,
+ `is_deleted` INTEGER NOT NULL DEFAULT 0,
+ `user_id` INTEGER NOT NULL,
+ `type` TEXT NOT NULL,
+ `nginx_template_id` INTEGER NOT NULL,
+ `listen_interface` TEXT NOT NULL DEFAULT "",
+ `domain_names` TEXT NOT NULL,
+ `upstream_id` INTEGER,
+ `proxy_scheme` TEXT NOT NULL DEFAULT "",
+ `proxy_host` TEXT NOT NULL DEFAULT "",
+ `proxy_port` INTEGER NOT NULL DEFAULT 0,
+ `certificate_id` INTEGER,
+ `access_list_id` INTEGER,
+ `ssl_forced` INTEGER NOT NULL DEFAULT 0,
+ `caching_enabled` INTEGER NOT NULL DEFAULT 0,
+ `block_exploits` INTEGER NOT NULL DEFAULT 0,
+ `allow_websocket_upgrade` INTEGER NOT NULL DEFAULT 0,
+ `http2_support` INTEGER NOT NULL DEFAULT 0,
+ `hsts_enabled` INTEGER NOT NULL DEFAULT 0,
+ `hsts_subdomains` INTEGER NOT NULL DEFAULT 0,
+ `paths` TEXT NOT NULL DEFAULT "",
+ `advanced_config` TEXT NOT NULL DEFAULT "",
+ `status` TEXT NOT NULL DEFAULT "",
+ `error_message` TEXT NOT NULL DEFAULT "",
+ `is_disabled` INTEGER NOT NULL DEFAULT 0,
+ FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`nginx_template_id`) REFERENCES `nginx_template` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`upstream_id`) REFERENCES `upstream` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`certificate_id`) REFERENCES `certificate` (`id`) ON DELETE CASCADE,
+ FOREIGN KEY (`access_list_id`) REFERENCES `access_list` (`id`) ON DELETE CASCADE
+);
+
+-- migrate:down
+
+-- Not allowed to go down from initial
diff --git a/backend/embed/migrations/sqlite/20201013035839_initial_data.sql b/backend/embed/migrations/sqlite/20201013035839_initial_data.sql
new file mode 100644
index 000000000..5bfe40161
--- /dev/null
+++ b/backend/embed/migrations/sqlite/20201013035839_initial_data.sql
@@ -0,0 +1,336 @@
+-- migrate:up
+
+-- User permissions
+INSERT INTO `capability` (
+ name
+) VALUES
+ ("full-admin"),
+ ("access-lists.view"),
+ ("access-lists.manage"),
+ ("audit-log.view"),
+ ("certificates.view"),
+ ("certificates.manage"),
+ ("certificate-authorities.view"),
+ ("certificate-authorities.manage"),
+ ("dns-providers.view"),
+ ("dns-providers.manage"),
+ ("hosts.view"),
+ ("hosts.manage"),
+ ("nginx-templates.view"),
+ ("nginx-templates.manage"),
+ ("settings.manage"),
+ ("streams.view"),
+ ("streams.manage"),
+ ("users.manage");
+
+-- Default site
+INSERT INTO `setting` (
+ created_at,
+ updated_at,
+ name,
+ description,
+ value
+) VALUES (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "default-site",
+ "What to show users who hit your Nginx server by default",
+ '"welcome"' -- remember this is json
+),
+(
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "auth-methods",
+ "Which methods are enabled for authentication",
+ '["local"]' -- remember this is json
+),
+(
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "oauth-auth",
+ "Configuration for OAuth authentication",
+ '{}' -- remember this is json
+),
+(
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "ldap-auth",
+ "Configuration for LDAP authentication",
+ '{"host": "", "dn": "", "sync_by": "uid"}' -- remember this is json
+);
+
+-- Default Certificate Authorities
+
+INSERT INTO `certificate_authority` (
+ created_at,
+ updated_at,
+ name,
+ acmesh_server,
+ is_wildcard_supported,
+ max_domains,
+ is_readonly
+) VALUES (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "ZeroSSL",
+ "zerossl",
+ 1,
+ 10,
+ 1
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "Let's Encrypt",
+ "https://acme-v02.api.letsencrypt.org/directory",
+ 1,
+ 10,
+ 1
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "Buypass Go SSL",
+ "https://api.buypass.com/acme/directory",
+ 0,
+ 5,
+ 1
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "SSL.com",
+ "ssl.com",
+ 0,
+ 10,
+ 1
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "Let's Encrypt (Testing)",
+ "https://acme-staging-v02.api.letsencrypt.org/directory",
+ 1,
+ 10,
+ 1
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "Buypass Go SSL (Testing)",
+ "https://api.test4.buypass.no/acme/directory",
+ 0,
+ 5,
+ 1
+);
+
+-- System User
+INSERT INTO `user` (
+ created_at,
+ updated_at,
+ name,
+ email,
+ is_system
+) VALUES (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ "System",
+ "system@localhost",
+ 1
+);
+
+-- Host Templates
+INSERT INTO `nginx_template` (
+ created_at,
+ updated_at,
+ user_id,
+ name,
+ type,
+ template
+) VALUES (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Proxy Template",
+ "proxy",
+ "# ------------------------------------------------------------
+{{#each Host.DomainNames}}
+# {{this}}
+{{/each}}
+# ------------------------------------------------------------
+
+server {
+ {{#if Config.Ipv4}}
+ listen 80;
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:80;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Config.Ipv4}}
+ listen 443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{#if Config.Ipv6}}
+ listen [::]:443 ssl {{#if Host.HTTP2Support}}http2{{/if}};
+ {{/if}}
+ {{/if}}
+
+ server_name {{#each Host.DomainNames}}{{this}} {{/each}};
+
+ {{#if Certificate.ID}}
+ include conf.d/include/ssl-ciphers.conf;
+ {{#if Certificate.IsAcme}}
+ ssl_certificate {{Certificate.Folder}}/fullchain.pem;
+ ssl_certificate_key {{Certificate.Folder}}/privkey.pem;
+ {{else}}
+ # Custom SSL
+ ssl_certificate /data/custom_ssl/npm-{{Certicicate.ID}}/fullchain.pem;
+ ssl_certificate_key /data/custom_ssl/npm-{{Certificate.ID}}/privkey.pem;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.CachingEnabled}}
+ include conf.d/include/assets.conf;
+ {{/if}}
+
+ {{#if Host.BlockExploits}}
+ include conf.d/include/block-exploits.conf;
+ {{/if}}
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security ""max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload"" always;
+ {{/if}}
+ # Force SSL
+ include conf.d/include/force-ssl.conf;
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ access_log /data/logs/host-{{Host.ID}}_access.log proxy;
+ error_log /data/logs/host-{{Host.ID}}_error.log warn;
+
+ {{Host.AdvancedConfig}}
+
+ # locations ?
+
+ # default location:
+ location / {
+ {{#if Host.AccessListID}}
+ # Authorization
+ auth_basic ""Authorization required"";
+ auth_basic_user_file /data/access/{{Host.AccessListID}};
+ # access_list.passauth ? todo
+ {{/if}}
+
+ # Access Rules ? todo
+
+ # Access checks must...? todo
+
+ {{#if Certificate.ID}}
+ {{#if Host.SSLForced}}
+ {{#if Host.HSTSEnabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security ""max-age=63072000;{{#if Host.HSTSSubdomains}} includeSubDomains;{{/if}} preload"" always;
+ {{/if}}
+ {{/if}}
+ {{/if}}
+
+ {{#if Host.AllowWebsocketUpgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ {{/if}}
+
+ # Proxy!
+ add_header X-Served-By $host;
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_http_version 1.1;
+
+ {{#if Upstream.ID}}
+ # upstream
+ proxy_pass {{Host.ProxyScheme}}://npm_upstream_{{Upstream.ID}};
+ {{else}}
+ # proxy a single host
+ proxy_pass {{Host.ProxyScheme}}://{{Host.ProxyHost}}:{{Host.ProxyPort}};
+ {{/if}}
+ }
+
+ # Legacy Custom Configuration
+ include /data/nginx/custom/server_proxy[.]conf;
+}
+"
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Redirect Template",
+ "redirect",
+ "# this is a redirect template"
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Dead Template",
+ "dead",
+ "# this is a dead template"
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Stream Template",
+ "stream",
+ "# this is a stream template"
+), (
+ unixepoch() * 1000,
+ unixepoch() * 1000,
+ (SELECT id FROM user WHERE is_system = 1 LIMIT 1),
+ "Default Upstream Template",
+ "upstream",
+ "# ------------------------------------------------------------
+# Upstream {{Upstream.ID}}: {{Upstream.Name}}
+# ------------------------------------------------------------
+
+upstream npm_upstream_{{Upstream.ID}} {
+
+ {{#if Upstream.IPHash~}}
+ ip_hash;
+ {{~/if}}
+
+ {{#if Upstream.NTLM~}}
+ ntlm;
+ {{~/if}}
+
+ {{#if Upstream.Keepalive~}}
+ keepalive {{Upstream.Keepalive}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveRequests~}}
+ keepalive_requests {{Upstream.KeepaliveRequests}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTime~}}
+ keepalive_time {{Upstream.KeepaliveTime}};
+ {{~/if}}
+
+ {{#if Upstream.KeepaliveTimeout~}}
+ keepalive_timeout {{Upstream.KeepaliveTimeout}};
+ {{~/if}}
+
+ {{Upstream.AdvancedConfig}}
+
+ {{#each Upstream.Servers~}}
+ {{#unless IsDeleted~}}
+ server {{Server}} {{#if Weight}}weight={{Weight}} {{/if}}{{#if MaxConns}}max_conns={{MaxConns}} {{/if}}{{#if MaxFails}}max_fails={{MaxFails}} {{/if}}{{#if FailTimeout}}fail_timeout={{FailTimeout}} {{/if}}{{#if Backup}}backup{{/if}};
+ {{/unless}}
+ {{/each}}
+}
+"
+);
+
+-- migrate:down
diff --git a/backend/embed/nginx/_assets.conf.hbs b/backend/embed/nginx/_assets.conf.hbs
new file mode 100644
index 000000000..736397673
--- /dev/null
+++ b/backend/embed/nginx/_assets.conf.hbs
@@ -0,0 +1,4 @@
+{{#if caching_enabled}}
+ # Asset Caching
+ include conf.d/include/assets.conf;
+{{/if}}
diff --git a/backend/embed/nginx/_certificates.conf.hbs b/backend/embed/nginx/_certificates.conf.hbs
new file mode 100644
index 000000000..d114f9820
--- /dev/null
+++ b/backend/embed/nginx/_certificates.conf.hbs
@@ -0,0 +1,13 @@
+{{#if certificate}}
+ {{#if (equal certificate.certificate_authority_id "0")}}
+ # Custom SSL
+ ssl_certificate {{npm_data_dir}}/custom_ssl/npm-{{certificate.id}}/fullchain.pem;
+ ssl_certificate_key {{npm_data_dir}}/custom_ssl/npm-{{certificate.id}}/privkey.pem;
+ {{else}}
+ # Acme SSL
+ include {{nginx_conf_dir}}/npm/conf.d/acme-challenge.conf;
+ include {{nginx_conf_dir}}/npm/conf.d/include/ssl-ciphers.conf;
+ ssl_certificate {{acme_certs_dir}}/npm-{{certificate.id}}/fullchain.pem;
+ ssl_certificate_key {{acme_certs_dir}}/npm-{{certificate.id}}/privkey.pem;
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_forced_ssl.conf.hbs b/backend/embed/nginx/_forced_ssl.conf.hbs
new file mode 100644
index 000000000..970296e03
--- /dev/null
+++ b/backend/embed/nginx/_forced_ssl.conf.hbs
@@ -0,0 +1,6 @@
+{{#if certificate}}
+ {{#if ssl_forced}}
+ # Force SSL
+ include {{nginx_conf_dir}}/npm/conf.d/include/force-ssl.conf;
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_hsts.conf.hbs b/backend/embed/nginx/_hsts.conf.hbs
new file mode 100644
index 000000000..c27da5aa3
--- /dev/null
+++ b/backend/embed/nginx/_hsts.conf.hbs
@@ -0,0 +1,8 @@
+{{#if certificate}}
+ {{#if ssl_forced}}
+ {{#if hsts_enabled}}
+ # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
+ add_header Strict-Transport-Security "max-age=63072000;{{#if hsts_subdomains}} includeSubDomains;{{/if}} preload" always;
+ {{/if}}
+ {{/if}}
+{{/if}}
diff --git a/backend/embed/nginx/_listen.conf.hbs b/backend/embed/nginx/_listen.conf.hbs
new file mode 100644
index 000000000..217da00f9
--- /dev/null
+++ b/backend/embed/nginx/_listen.conf.hbs
@@ -0,0 +1,18 @@
+listen 80;
+
+{{#if ipv6}}
+ listen [::]:80;
+{{else}}
+ #listen [::]:80;
+{{/if}}
+
+{{#if certificate}}
+ listen 443 ssl{% if http2_support %} http2{% endif %};
+ {{#if ipv6}}
+ listen [::]:443;
+ {{else}}
+ #listen [::]:443;
+ {{/if}}
+{{/if}}
+
+server_name{{#each domain_names}} {{this}}{{/each}};
diff --git a/backend/embed/nginx/_location.conf.hbs b/backend/embed/nginx/_location.conf.hbs
new file mode 100644
index 000000000..167d46eb8
--- /dev/null
+++ b/backend/embed/nginx/_location.conf.hbs
@@ -0,0 +1,40 @@
+location {{path}} {
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_pass {{forward_scheme}}://{{forward_host}}:{{forward_port}}{{forward_path}};
+
+ {{#if access_list}}
+ {{#if access_list.items}}
+ # Authorization
+ auth_basic "Authorization required";
+ auth_basic_user_file {{npm_data_dir}}/access/{{access_list.id}};
+ {{access_list.passauth}}
+ {{/if}}
+
+ # Access Rules
+ {{#each access_list.clients as |client clientIdx|}}
+ {{client.rule}};
+ {{/each}}deny all;
+
+ # Access checks must...
+ {{#if access_list.satisfy}}
+ {{access_list.satisfy}};
+ {{/if}}
+ {{/if}}
+
+ {{> inc_assets}}
+ {{> inc_forced_ssl}}
+ {{> inc_hsts}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ {{advanced_config}}
+ }
+
diff --git a/backend/embed/nginx/acme-request.conf.hbs b/backend/embed/nginx/acme-request.conf.hbs
new file mode 100644
index 000000000..aac148959
--- /dev/null
+++ b/backend/embed/nginx/acme-request.conf.hbs
@@ -0,0 +1,15 @@
+server {
+ listen 80;
+ {{#if ipv6}}
+ listen [::]:80;
+ {{/if}}
+
+ server_name{{#each domain_names}} {{this}}{{/each}};
+ access_log {{npm_data_dir}}/logs/acme-requests_access.log standard;
+ error_log {{npm_data_dir}}/logs/acme-requests_error.log warn;
+ {{nginx_conf_dir}}/npm/conf.d/include/letsencrypt-acme-challenge.conf;
+
+ location / {
+ return 404;
+ }
+}
diff --git a/backend/embed/nginx/dead_host.conf.hbs b/backend/embed/nginx/dead_host.conf.hbs
new file mode 100644
index 000000000..289940c89
--- /dev/null
+++ b/backend/embed/nginx/dead_host.conf.hbs
@@ -0,0 +1,20 @@
+{{#if enabled}}
+ server {
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ access_log {{npm_data_dir}}/logs/dead-host-{{id}}_access.log standard;
+ error_log {{npm_data_dir}}/logs/dead-host-{{id}}_error.log warn;
+
+ {{advanced_config}}
+
+ {{#if use_default_location}}
+ location / {
+ {{> inc_hsts}}
+ return 404;
+ }
+ {{/if}}
+ }
+{{/if}}
diff --git a/backend/embed/nginx/default.conf.hbs b/backend/embed/nginx/default.conf.hbs
new file mode 100644
index 000000000..190ec02bb
--- /dev/null
+++ b/backend/embed/nginx/default.conf.hbs
@@ -0,0 +1,35 @@
+{{#if (equal value "congratulations")}}
+ # Skipping output, congratulations page configration is baked in.
+{{else}}
+ server {
+ listen 80 default;
+ {{#if ipv6}}
+ listen [::]:80;
+ {{else}}
+ #listen [::]:80;
+ {{/if}}
+
+ server_name default-host.localhost;
+ access_log {{npm_data_dir}}/logs/default-host_access.log combined;
+ error_log {{npm_data_dir}}/logs/default-host_error.log warn;
+
+ {{#if (equal value "404")}}
+ location / {
+ return 404;
+ }
+ {{/if}}
+
+ {{#if (equal value "redirect")}}
+ location / {
+ return 301 {{meta.redirect}};
+ }
+ {{/if}}
+
+ {{#if (equal value "html")}}
+ root {{npm_data_dir}}/nginx/default_www;
+ location / {
+ try_files $uri /index.html;
+ }
+ {{/if}}
+ }
+{{/if}}
diff --git a/backend/embed/nginx/ip_ranges.conf.hbs b/backend/embed/nginx/ip_ranges.conf.hbs
new file mode 100644
index 000000000..7b7c3d07e
--- /dev/null
+++ b/backend/embed/nginx/ip_ranges.conf.hbs
@@ -0,0 +1,3 @@
+{{#each ip_ranges as |range rangeIdx|}}
+ set_real_ip_from {{range}};
+{{/each}}
diff --git a/backend/embed/nginx/proxy_host.conf.hbs b/backend/embed/nginx/proxy_host.conf.hbs
new file mode 100644
index 000000000..e7681f4ba
--- /dev/null
+++ b/backend/embed/nginx/proxy_host.conf.hbs
@@ -0,0 +1,62 @@
+{{#if enabled}}
+ server {
+ set $forward_scheme {{forward_scheme}};
+ set $server "{{forward_host}}";
+ set $port {{forward_port}};
+
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_assets}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ access_log {{npm_data_dir}}/logs/proxy-host-{{id}}_access.log proxy;
+ error_log {{npm_data_dir}}/logs/proxy-host-{{id}}_error.log warn;
+
+ {{advanced_config}}
+ {{locations}}
+
+ {{#if use_default_location}}
+ location / {
+ {{#if access_list}}
+ {{#if access_list.items}}
+ # Authorization
+ auth_basic "Authorization required";
+ auth_basic_user_file {{npm_data_dir}}/access/{{access_list.id}};
+ {{access_list.passauth}}
+ {{/if}}
+
+ # Access Rules
+ {{#each access_list.clients as |client clientIdx|}}
+ {{client.rule}};
+ {{/each}}deny all;
+
+ # Access checks must...
+ {{#if access_list.satisfy}}
+ {{access_list.satisfy}};
+ {{/if}}
+ {{/if}}
+
+ {{> inc_hsts}}
+
+ {{#if allow_websocket_upgrade}}
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection $http_connection;
+ proxy_http_version 1.1;
+ {{/if}}
+
+ # Proxy!
+ include {{nginx_conf_dir}}/npm/conf.d/include/proxy.conf;
+ }
+ {{/if}}
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_proxy[.]conf;
+ }
+{{/if}}
diff --git a/backend/embed/nginx/redirection_host.conf.hbs b/backend/embed/nginx/redirection_host.conf.hbs
new file mode 100644
index 000000000..18208c6c3
--- /dev/null
+++ b/backend/embed/nginx/redirection_host.conf.hbs
@@ -0,0 +1,28 @@
+{{#if enabled}}
+ server {
+ {{> inc_listen}}
+ {{> inc_certificates}}
+ {{> inc_assets}}
+ {{> inc_hsts}}
+ {{> inc_forced_ssl}}
+
+ access_log {{npm_data_dir}}/logs/redirection-host-{{ id }}_access.log standard;
+ error_log {{npm_data_dir}}/logs/redirection-host-{{ id }}_error.log warn;
+
+ {{advanced_config}}
+
+ {{#if use_default_location}}
+ location / {
+ {{> inc_hsts}}
+ {{#if preserve_path}}
+ return {{forward_http_code}} {{forward_scheme}}://{{forward_domain_name}}$request_uri;
+ {{else}}
+ return {{forward_http_code}} {{forward_scheme}}://{{forward_domain_name}};
+ {{/if}}
+ }
+ {{/if}}
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_redirect[.]conf;
+ }
+{{/if}}
diff --git a/backend/embed/nginx/stream.conf.hbs b/backend/embed/nginx/stream.conf.hbs
new file mode 100644
index 000000000..bc85bbfa4
--- /dev/null
+++ b/backend/embed/nginx/stream.conf.hbs
@@ -0,0 +1,34 @@
+{{#if enabled}}
+ {{#if tcp_forwarding}}
+ server {
+ listen {{incoming_port}};
+ {{#if ipv6}}
+ listen [::]:{{incoming_port}};
+ {{else}}
+ #listen [::]:{{incoming_port}};
+ {{/if}}
+
+ proxy_pass {{forward_ip}}:{{forwarding_port}};
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_stream[.]conf;
+ include {{npm_data_dir}}/nginx/custom/server_stream_tcp[.]conf;
+ }
+ {{/if}}
+
+ {{#if udp_forwarding}}
+ server {
+ listen {{incoming_port}} udp;
+ {{#if ipv6}}
+ listen [::]:{{ incoming_port }} udp;
+ {{else}}
+ #listen [::]:{{incoming_port}} udp;
+ {{/if}}
+ proxy_pass {{forward_ip}}:{{forwarding_port}};
+
+ # Custom
+ include {{npm_data_dir}}/nginx/custom/server_stream[.]conf;
+ include {{npm_data_dir}}/nginx/custom/server_stream_udp[.]conf;
+ }
+ {{/if}}
+{{/if}}
diff --git a/backend/go.mod b/backend/go.mod
new file mode 100644
index 000000000..43b54830a
--- /dev/null
+++ b/backend/go.mod
@@ -0,0 +1,98 @@
+module npm
+
+go 1.24
+
+toolchain go1.24.3
+
+require (
+ github.com/DATA-DOG/go-sqlmock v1.5.2
+ github.com/alexflint/go-arg v1.5.1
+ github.com/amacneil/dbmate/v2 v2.20.0
+ github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible
+ github.com/dgrijalva/jwt-go v3.2.0+incompatible
+ github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e
+ github.com/fatih/color v1.18.0
+ github.com/glebarez/sqlite v1.11.0
+ github.com/go-chi/chi/v5 v5.2.2
+ github.com/go-chi/cors v1.2.2
+ github.com/go-chi/jwtauth/v5 v5.3.3
+ github.com/go-ldap/ldap/v3 v3.4.11
+ github.com/jc21/go-sse v1.7.0
+ github.com/jc21/jsref v0.0.0-20250501111625-0ce4620b7d96
+ github.com/patrickmn/go-cache v2.1.0+incompatible
+ github.com/qri-io/jsonschema v0.2.1
+ github.com/rotisserie/eris v0.5.4
+ github.com/stretchr/testify v1.10.0
+ github.com/vrischmann/envconfig v1.4.1
+ go.uber.org/automaxprocs v1.6.0
+ go.uber.org/goleak v1.3.0
+ golang.org/x/crypto v0.39.0
+ golang.org/x/oauth2 v0.30.0
+ gorm.io/datatypes v1.2.6
+ gorm.io/driver/mysql v1.6.0
+ gorm.io/driver/postgres v1.6.0
+ gorm.io/gorm v1.30.0
+ gorm.io/plugin/soft_delete v1.2.1
+)
+
+require (
+ filippo.io/edwards25519 v1.1.0 // indirect
+ github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
+ github.com/alexflint/go-scalar v1.2.0 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect
+ github.com/dustin/go-humanize v1.0.1 // indirect
+ github.com/glebarez/go-sqlite v1.22.0 // indirect
+ github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
+ github.com/go-sql-driver/mysql v1.8.1 // indirect
+ github.com/goccy/go-json v0.10.3 // indirect
+ github.com/google/uuid v1.6.0 // indirect
+ github.com/jackc/pgpassfile v1.0.0 // indirect
+ github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
+ github.com/jackc/pgx/v5 v5.7.1 // indirect
+ github.com/jackc/puddle/v2 v2.2.2 // indirect
+ github.com/jinzhu/inflection v1.0.0 // indirect
+ github.com/jinzhu/now v1.1.5 // indirect
+ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
+ github.com/lestrrat-go/blackmagic v1.0.2 // indirect
+ github.com/lestrrat-go/httpcc v1.0.1 // indirect
+ github.com/lestrrat-go/httprc v1.0.6 // indirect
+ github.com/lestrrat-go/iter v1.0.2 // indirect
+ github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c // indirect
+ github.com/lestrrat-go/jwx/v2 v2.1.3 // indirect
+ github.com/lestrrat-go/option v1.0.1 // indirect
+ github.com/lestrrat-go/pdebug/v3 v3.0.1 // indirect
+ github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb // indirect
+ github.com/lib/pq v1.10.9 // indirect
+ github.com/mattn/go-colorable v0.1.13 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/ncruces/go-strftime v0.1.9 // indirect
+ github.com/pkg/errors v0.9.1 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
+ github.com/qri-io/jsonpointer v0.1.1 // indirect
+ github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
+ github.com/rogpeppe/go-internal v1.12.0 // indirect
+ github.com/segmentio/asm v1.2.0 // indirect
+ github.com/stretchr/objx v0.5.2 // indirect
+ golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect
+ golang.org/x/mod v0.25.0 // indirect
+ golang.org/x/sync v0.15.0 // indirect
+ golang.org/x/sys v0.33.0 // indirect
+ golang.org/x/text v0.26.0 // indirect
+ golang.org/x/tools v0.33.0 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+ lukechampine.com/uint128 v1.3.0 // indirect
+ modernc.org/cc/v3 v3.41.0 // indirect
+ modernc.org/ccgo/v3 v3.17.0 // indirect
+ modernc.org/libc v1.61.0 // indirect
+ modernc.org/mathutil v1.6.0 // indirect
+ modernc.org/memory v1.8.0 // indirect
+ modernc.org/opt v0.1.3 // indirect
+ modernc.org/sqlite v1.28.0 // indirect
+ modernc.org/strutil v1.2.0 // indirect
+ modernc.org/token v1.1.0 // indirect
+)
+
+replace github.com/amacneil/dbmate/v2 => github.com/jc21/dbmate/v2 v2.0.0-20230527023241-0aaa124cc0f1
+
+replace modernc.org/sqlite => gitlab.com/jc21com/sqlite v1.22.2-0.20230527022643-b56cedb3bc85
diff --git a/backend/go.sum b/backend/go.sum
new file mode 100644
index 000000000..bb7796ce9
--- /dev/null
+++ b/backend/go.sum
@@ -0,0 +1,261 @@
+filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
+filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
+github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
+github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
+github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
+github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
+github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI=
+github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
+github.com/alexflint/go-arg v1.5.1 h1:nBuWUCpuRy0snAG+uIJ6N0UvYxpxA0/ghA/AaHxlT8Y=
+github.com/alexflint/go-arg v1.5.1/go.mod h1:A7vTJzvjoaSTypg4biM5uYNTkJ27SkNTArtYXnlqVO8=
+github.com/alexflint/go-scalar v1.2.0 h1:WR7JPKkeNpnYIOfHRa7ivM21aWAdHD0gEWHCx+WQBRw=
+github.com/alexflint/go-scalar v1.2.0/go.mod h1:LoFvNMqS1CPrMVltza4LvnGKhaSpc3oyLEBUZVhhS2o=
+github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible h1:Ppm0npCCsmuR9oQaBtRuZcmILVE74aXE+AmrJj8L2ns=
+github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 h1:rpfIENRNNilwHwZeG5+P150SMrnNEcHYvcCuK6dPZSg=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e h1:2R8DvYLNr5DL25eWwpOdPno1eIbTNjJC0d7v8ti5cus=
+github.com/drexedam/gravatar v0.0.0-20210327211422-e94eea8c338e/go.mod h1:YjikoytuRI4q+GRd3xrOrKJN+Ayi2dwRomHLDDeMHfs=
+github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
+github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
+github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
+github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
+github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
+github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
+github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
+github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
+github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
+github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
+github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
+github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
+github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
+github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
+github.com/go-chi/jwtauth/v5 v5.3.3 h1:50Uzmacu35/ZP9ER2Ht6SazwPsnLQ9LRJy6zTZJpHEo=
+github.com/go-chi/jwtauth/v5 v5.3.3/go.mod h1:O4QvPRuZLZghl9WvfVaON+ARfGzpD2PBX/QY5vUz7aQ=
+github.com/go-ldap/ldap/v3 v3.4.11 h1:4k0Yxweg+a3OyBLjdYn5OKglv18JNvfDykSoI8bW0gU=
+github.com/go-ldap/ldap/v3 v3.4.11/go.mod h1:bY7t0FLK8OAVpp/vV6sSlpz3EQDGcQwc8pF0ujLgKvM=
+github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
+github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
+github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
+github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
+github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
+github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
+github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
+github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
+github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
+github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
+github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
+github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
+github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
+github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
+github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
+github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs=
+github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA=
+github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
+github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
+github.com/jc21/dbmate/v2 v2.0.0-20230527023241-0aaa124cc0f1 h1:WEZwsDG5eXdgh0NfA9SpuShOP6rJCah22ihvZsaoimM=
+github.com/jc21/dbmate/v2 v2.0.0-20230527023241-0aaa124cc0f1/go.mod h1:XAPcHsokw7nyvFbuN9FdcYb8JSEUTaJDFYOirnNxEvc=
+github.com/jc21/go-sse v1.7.0 h1:Lavb7FGssS2UdJMK1P3r4rQJgg2JZx7BINi8pd/QzKg=
+github.com/jc21/go-sse v1.7.0/go.mod h1:dbA0LtDgvSEhlAXz+meN1KAcdvcCLiF0aVdhjhZjbGI=
+github.com/jc21/jsref v0.0.0-20250501111625-0ce4620b7d96 h1:LzBXyNdALQHC7DUy8PE7IFTihmnSRop4Ps2cBIr/WP8=
+github.com/jc21/jsref v0.0.0-20250501111625-0ce4620b7d96/go.mod h1:Vno1sw0yqqImXnYy3q6ueIG+h+Vwwfbkfw9DTHCrHfY=
+github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
+github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
+github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
+github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
+github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=
+github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=
+github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
+github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
+github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8=
+github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=
+github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=
+github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
+github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
+github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
+github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
+github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
+github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
+github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCyc/Ib3bDTKd379tNMpB/7/H5TjM2Y9QJ5THLbE=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/lestrrat-go/blackmagic v1.0.2 h1:Cg2gVSc9h7sz9NOByczrbUvLopQmXrfFx//N+AkAr5k=
+github.com/lestrrat-go/blackmagic v1.0.2/go.mod h1:UrEqBzIR2U6CnzVyUtfM6oZNMt/7O7Vohk2J0OGSAtU=
+github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
+github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
+github.com/lestrrat-go/httprc v1.0.6 h1:qgmgIRhpvBqexMJjA/PmwSvhNk679oqD1RbovdCGW8k=
+github.com/lestrrat-go/httprc v1.0.6/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo=
+github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI=
+github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4=
+github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c h1:pGh5EFIfczeDHwgMHgfwjhZzL+8/E3uZF6T7vER/W8c=
+github.com/lestrrat-go/jspointer v0.0.0-20181205001929-82fadba7561c/go.mod h1:xw2Gm4Mg+ST9s8fHR1VkUIyOJMJnSloRZlPQB+wyVpY=
+github.com/lestrrat-go/jwx/v2 v2.1.3 h1:Ud4lb2QuxRClYAmRleF50KrbKIoM1TddXgBrneT5/Jo=
+github.com/lestrrat-go/jwx/v2 v2.1.3/go.mod h1:q6uFgbgZfEmQrfJfrCo90QcQOcXFMfbI/fO0NqRtvZo=
+github.com/lestrrat-go/option v0.0.0-20210103042652-6f1ecfceda35/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
+github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
+github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
+github.com/lestrrat-go/pdebug/v3 v3.0.1 h1:3G5sX/aw/TbMTtVc9U7IHBWRZtMvwvBziF1e4HoQtv8=
+github.com/lestrrat-go/pdebug/v3 v3.0.1/go.mod h1:za+m+Ve24yCxTEhR59N7UlnJomWwCiIqbJRmKeiADU4=
+github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb h1:DDg5u5lk2v8O8qxs8ecQkMUBj3tLW6wkSLzxxOyi1Ig=
+github.com/lestrrat-go/structinfo v0.0.0-20210312050401-7f8bd69d6acb/go.mod h1:i+E8Uf04vf2QjOWyJdGY75vmG+4rxiZW2kIj1lTB5mo=
+github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
+github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-sqlite3 v1.14.3/go.mod h1:WVKg1VTActs4Qso6iwGbiFih2UIHo0ENGwNd0Lj+XmI=
+github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
+github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
+github.com/microsoft/go-mssqldb v1.7.2 h1:CHkFJiObW7ItKTJfHo1QX7QBBD1iV+mn1eOyRP3b/PA=
+github.com/microsoft/go-mssqldb v1.7.2/go.mod h1:kOvZKUdrhhFQmxLZqbwUV0rHkNkZpthMITIb2Ko1IoA=
+github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
+github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
+github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
+github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
+github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
+github.com/qri-io/jsonpointer v0.1.1 h1:prVZBZLL6TW5vsSB9fFHFAMBLI4b0ri5vribQlTJiBA=
+github.com/qri-io/jsonpointer v0.1.1/go.mod h1:DnJPaYgiKu56EuDp8TU5wFLdZIcAnb/uH9v37ZaMV64=
+github.com/qri-io/jsonschema v0.2.1 h1:NNFoKms+kut6ABPf6xiKNM5214jzxAhDBrPHCJ97Wg0=
+github.com/qri-io/jsonschema v0.2.1/go.mod h1:g7DPkiOsK1xv6T/Ao5scXRkd+yTFygcANPBaaqW+VrI=
+github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
+github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
+github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
+github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
+github.com/rotisserie/eris v0.5.4 h1:Il6IvLdAapsMhvuOahHWiBnl1G++Q0/L5UIkI5mARSk=
+github.com/rotisserie/eris v0.5.4/go.mod h1:Z/kgYTJiJtocxCbFfvRmO+QejApzG6zpyky9G1A4g9s=
+github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
+github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
+github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
+github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/vrischmann/envconfig v1.4.1 h1:fucz2HsoAkJCLgIngWdWqLNxNjdWD14zfrLF6EQPdY4=
+github.com/vrischmann/envconfig v1.4.1/go.mod h1:cX3p+/PEssil6fWwzIS7kf8iFpli3giuxXGHxckucYc=
+github.com/zenizh/go-capturer v0.0.0-20211219060012-52ea6c8fed04 h1:qXafrlZL1WsJW5OokjraLLRURHiw0OzKHD/RNdspp4w=
+github.com/zenizh/go-capturer v0.0.0-20211219060012-52ea6c8fed04/go.mod h1:FiwNQxz6hGoNFBC4nIx+CxZhI3nne5RmIOlT/MXcSD4=
+gitlab.com/jc21com/sqlite v1.22.2-0.20230527022643-b56cedb3bc85 h1:NPHauobrOymc80Euu+e0tsMyXcdtLCX5bQPKX5zsI38=
+gitlab.com/jc21com/sqlite v1.22.2-0.20230527022643-b56cedb3bc85/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
+go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
+go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
+go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
+go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
+golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
+golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
+golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY=
+golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8=
+golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
+golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
+golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
+golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
+golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
+golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
+golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
+golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
+golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
+golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
+golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
+golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gorm.io/datatypes v1.2.6 h1:KafLdXvFUhzNeL2ncm03Gl3eTLONQfNKZ+wJ+9Y4Nck=
+gorm.io/datatypes v1.2.6/go.mod h1:M2iO+6S3hhi4nAyYe444Pcb0dcIiOMJ7QHaUXxyiNZY=
+gorm.io/driver/mysql v1.6.0 h1:eNbLmNTpPpTOVZi8MMxCi2aaIm0ZpInbORNXDwyLGvg=
+gorm.io/driver/mysql v1.6.0/go.mod h1:D/oCC2GWK3M/dqoLxnOlaNKmXz8WNTfcS9y5ovaSqKo=
+gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
+gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
+gorm.io/driver/sqlite v1.1.3/go.mod h1:AKDgRWk8lcSQSw+9kxCJnX/yySj8G3rdwYlU57cB45c=
+gorm.io/driver/sqlite v1.4.3 h1:HBBcZSDnWi5BW3B3rwvVTc510KGkBkexlOg0QrmLUuU=
+gorm.io/driver/sqlite v1.4.3/go.mod h1:0Aq3iPO+v9ZKbcdiz8gLWRw5VOPcBOPUQJFLq5e2ecI=
+gorm.io/driver/sqlserver v1.6.0 h1:VZOBQVsVhkHU/NzNhRJKoANt5pZGQAS1Bwc6m6dgfnc=
+gorm.io/driver/sqlserver v1.6.0/go.mod h1:WQzt4IJo/WHKnckU9jXBLMJIVNMVeTu25dnOzehntWw=
+gorm.io/gorm v1.20.1/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
+gorm.io/gorm v1.23.0/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
+gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
+gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
+gorm.io/plugin/soft_delete v1.2.1 h1:qx9D/c4Xu6w5KT8LviX8DgLcB9hkKl6JC9f44Tj7cGU=
+gorm.io/plugin/soft_delete v1.2.1/go.mod h1:Zv7vQctOJTGOsJ/bWgrN1n3od0GBAZgnLjEx+cApLGk=
+lukechampine.com/uint128 v1.3.0 h1:cDdUVfRwDUDovz610ABgFD17nXD4/uDgVHl2sC3+sbo=
+lukechampine.com/uint128 v1.3.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
+modernc.org/cc/v3 v3.41.0 h1:QoR1Sn3YWlmA1T4vLaKZfawdVtSiGx8H+cEojbC7v1Q=
+modernc.org/cc/v3 v3.41.0/go.mod h1:Ni4zjJYJ04CDOhG7dn640WGfwBzfE0ecX8TyMB0Fv0Y=
+modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ=
+modernc.org/cc/v4 v4.21.4/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ=
+modernc.org/ccgo/v3 v3.17.0 h1:o3OmOqx4/OFnl4Vm3G8Bgmqxnvxnh0nbxeT5p/dWChA=
+modernc.org/ccgo/v3 v3.17.0/go.mod h1:Sg3fwVpmLvCUTaqEUjiBDAvshIaKDB0RXaf+zgqFu8I=
+modernc.org/ccgo/v4 v4.21.0 h1:kKPI3dF7RIag8YcToh5ZwDcVMIv6VGa0ED5cvh0LMW4=
+modernc.org/ccgo/v4 v4.21.0/go.mod h1:h6kt6H/A2+ew/3MW/p6KEoQmrq/i3pr0J/SiwiaF/g0=
+modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk=
+modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ=
+modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
+modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
+modernc.org/gc/v2 v2.5.0 h1:bJ9ChznK1L1mUtAQtxi0wi5AtAs5jQuw4PrPHO5pb6M=
+modernc.org/gc/v2 v2.5.0/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
+modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
+modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
+modernc.org/libc v1.61.0 h1:eGFcvWpqlnoGwzZeZe3PWJkkKbM/3SUGyk1DVZQ0TpE=
+modernc.org/libc v1.61.0/go.mod h1:DvxVX89wtGTu+r72MLGhygpfi3aUGgZRdAYGCAVVud0=
+modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
+modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
+modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
+modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
+modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
+modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
+modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc=
+modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss=
+modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
+modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
+modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY=
+modernc.org/tcl v1.15.2/go.mod h1:3+k/ZaEbKrC8ePv8zJWPtBSW0V7Gg9g8rkmhI1Kfs3c=
+modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
+modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
+modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY=
+modernc.org/z v1.7.3/go.mod h1:Ipv4tsdxZRbQyLq9Q1M6gdbkxYzdlrciF2Hi/lS7nWE=
diff --git a/backend/index.js b/backend/index.js
deleted file mode 100644
index 8d42d0969..000000000
--- a/backend/index.js
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env node
-
-const logger = require('./logger').global;
-
-async function appStart () {
- // Create config file db settings if environment variables have been set
- await createDbConfigFromEnvironment();
-
- const migrate = require('./migrate');
- const setup = require('./setup');
- const app = require('./app');
- const apiValidator = require('./lib/validator/api');
- const internalCertificate = require('./internal/certificate');
- const internalIpRanges = require('./internal/ip_ranges');
-
- return migrate.latest()
- .then(setup)
- .then(() => {
- return apiValidator.loadSchemas;
- })
- .then(internalIpRanges.fetch)
- .then(() => {
-
- internalCertificate.initTimer();
- internalIpRanges.initTimer();
-
- const server = app.listen(3000, () => {
- logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
-
- process.on('SIGTERM', () => {
- logger.info('PID ' + process.pid + ' received SIGTERM');
- server.close(() => {
- logger.info('Stopping.');
- process.exit(0);
- });
- });
- });
- })
- .catch((err) => {
- logger.error(err.message);
- setTimeout(appStart, 1000);
- });
-}
-
-async function createDbConfigFromEnvironment() {
- return new Promise((resolve, reject) => {
- const envMysqlHost = process.env.DB_MYSQL_HOST || null;
- const envMysqlPort = process.env.DB_MYSQL_PORT || null;
- const envMysqlUser = process.env.DB_MYSQL_USER || null;
- const envMysqlName = process.env.DB_MYSQL_NAME || null;
- let envSqliteFile = process.env.DB_SQLITE_FILE || null;
-
- const fs = require('fs');
- const filename = (process.env.NODE_CONFIG_DIR || './config') + '/' + (process.env.NODE_ENV || 'default') + '.json';
- let configData = {};
-
- try {
- configData = require(filename);
- } catch (err) {
- // do nothing
- }
-
- if (configData.database && configData.database.engine && !configData.database.fromEnv) {
- logger.info('Manual db configuration already exists, skipping config creation from environment variables');
- resolve();
- return;
- }
-
- if ((!envMysqlHost || !envMysqlPort || !envMysqlUser || !envMysqlName) && !envSqliteFile){
- envSqliteFile = '/data/database.sqlite';
- logger.info(`No valid environment variables for database provided, using default SQLite file '${envSqliteFile}'`);
- }
-
- if (envMysqlHost && envMysqlPort && envMysqlUser && envMysqlName) {
- const newConfig = {
- fromEnv: true,
- engine: 'mysql',
- host: envMysqlHost,
- port: envMysqlPort,
- user: envMysqlUser,
- password: process.env.DB_MYSQL_PASSWORD,
- name: envMysqlName,
- };
-
- if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
- // Config is unchanged, skip overwrite
- resolve();
- return;
- }
-
- logger.info('Generating MySQL knex configuration from environment variables');
- configData.database = newConfig;
-
- } else {
- const newConfig = {
- fromEnv: true,
- engine: 'knex-native',
- knex: {
- client: 'sqlite3',
- connection: {
- filename: envSqliteFile
- },
- useNullAsDefault: true
- }
- };
- if (JSON.stringify(configData.database) === JSON.stringify(newConfig)) {
- // Config is unchanged, skip overwrite
- resolve();
- return;
- }
-
- logger.info('Generating SQLite knex configuration');
- configData.database = newConfig;
- }
-
- // Write config
- fs.writeFile(filename, JSON.stringify(configData, null, 2), (err) => {
- if (err) {
- logger.error('Could not write db config to config file: ' + filename);
- reject(err);
- } else {
- logger.debug('Wrote db configuration to config file: ' + filename);
- resolve();
- }
- });
- });
-}
-
-try {
- appStart();
-} catch (err) {
- logger.error(err.message, err);
- process.exit(1);
-}
-
diff --git a/backend/internal/access-list.js b/backend/internal/access-list.js
deleted file mode 100644
index 083bfa62e..000000000
--- a/backend/internal/access-list.js
+++ /dev/null
@@ -1,534 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const batchflow = require('batchflow');
-const logger = require('../logger').access;
-const error = require('../lib/error');
-const accessListModel = require('../models/access_list');
-const accessListAuthModel = require('../models/access_list_auth');
-const accessListClientModel = require('../models/access_list_client');
-const proxyHostModel = require('../models/proxy_host');
-const internalAuditLog = require('./audit-log');
-const internalNginx = require('./nginx');
-const utils = require('../lib/utils');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalAccessList = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('access_lists:create', data)
- .then((/*access_data*/) => {
- return accessListModel
- .query()
- .omit(omissions())
- .insertAndFetch({
- name: data.name,
- satisfy_any: data.satisfy_any,
- pass_auth: data.pass_auth,
- owner_user_id: access.token.getUserId(1)
- });
- })
- .then((row) => {
- data.id = row.id;
-
- let promises = [];
-
- // Now add the items
- data.items.map((item) => {
- promises.push(accessListAuthModel
- .query()
- .insert({
- access_list_id: row.id,
- username: item.username,
- password: item.password
- })
- );
- });
-
- // Now add the clients
- if (typeof data.clients !== 'undefined' && data.clients) {
- data.clients.map((client) => {
- promises.push(accessListClientModel
- .query()
- .insert({
- access_list_id: row.id,
- address: client.address,
- directive: client.directive
- })
- );
- });
- }
-
- return Promise.all(promises);
- })
- .then(() => {
- // re-fetch with expansions
- return internalAccessList.get(access, {
- id: data.id,
- expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
- }, true /* <- skip masking */);
- })
- .then((row) => {
- // Audit log
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- return internalAccessList.build(row)
- .then(() => {
- if (row.proxy_host_count) {
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- }
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'access-list',
- object_id: row.id,
- meta: internalAccessList.maskItems(data)
- });
- })
- .then(() => {
- return internalAccessList.maskItems(row);
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.name]
- * @param {String} [data.items]
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('access_lists:update', data.id)
- .then((/*access_data*/) => {
- return internalAccessList.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Access List could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
- })
- .then(() => {
- // patch name if specified
- if (typeof data.name !== 'undefined' && data.name) {
- return accessListModel
- .query()
- .where({id: data.id})
- .patch({
- name: data.name,
- satisfy_any: data.satisfy_any,
- pass_auth: data.pass_auth,
- });
- }
- })
- .then(() => {
- // Check for items and add/update/remove them
- if (typeof data.items !== 'undefined' && data.items) {
- let promises = [];
- let items_to_keep = [];
-
- data.items.map(function (item) {
- if (item.password) {
- promises.push(accessListAuthModel
- .query()
- .insert({
- access_list_id: data.id,
- username: item.username,
- password: item.password
- })
- );
- } else {
- // This was supplied with an empty password, which means keep it but don't change the password
- items_to_keep.push(item.username);
- }
- });
-
- let query = accessListAuthModel
- .query()
- .delete()
- .where('access_list_id', data.id);
-
- if (items_to_keep.length) {
- query.andWhere('username', 'NOT IN', items_to_keep);
- }
-
- return query
- .then(() => {
- // Add new items
- if (promises.length) {
- return Promise.all(promises);
- }
- });
- }
- })
- .then(() => {
- // Check for clients and add/update/remove them
- if (typeof data.clients !== 'undefined' && data.clients) {
- let promises = [];
-
- data.clients.map(function (client) {
- if (client.address) {
- promises.push(accessListClientModel
- .query()
- .insert({
- access_list_id: data.id,
- address: client.address,
- directive: client.directive
- })
- );
- }
- });
-
- let query = accessListClientModel
- .query()
- .delete()
- .where('access_list_id', data.id);
-
- return query
- .then(() => {
- // Add new items
- if (promises.length) {
- return Promise.all(promises);
- }
- });
- }
- })
- .then(internalNginx.reload)
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'access-list',
- object_id: data.id,
- meta: internalAccessList.maskItems(data)
- });
- })
- .then(() => {
- // re-fetch with expansions
- return internalAccessList.get(access, {
- id: data.id,
- expand: ['owner', 'items', 'clients', 'proxy_hosts.access_list.[clients,items]']
- }, true /* <- skip masking */);
- })
- .then((row) => {
- return internalAccessList.build(row)
- .then(() => {
- if (row.proxy_host_count) {
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- }
- })
- .then(() => {
- return internalAccessList.maskItems(row);
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @param {Boolean} [skip_masking]
- * @return {Promise}
- */
- get: (access, data, skip_masking) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('access_lists:get', data.id)
- .then((access_data) => {
- let query = accessListModel
- .query()
- .select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
- .joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
- .where('access_list.is_deleted', 0)
- .andWhere('access_list.id', data.id)
- .allowEager('[owner,items,clients,proxy_hosts.[*, access_list.[clients,items]]]')
- .omit(['access_list.is_deleted'])
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
- row = internalAccessList.maskItems(row);
- }
-
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('access_lists:delete', data.id)
- .then(() => {
- return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items', 'clients']});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- // 1. update row to be deleted
- // 2. update any proxy hosts that were using it (ignoring permissions)
- // 3. reconfigure those hosts
- // 4. audit log
-
- // 1. update row to be deleted
- return accessListModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // 2. update any proxy hosts that were using it (ignoring permissions)
- if (row.proxy_hosts) {
- return proxyHostModel
- .query()
- .where('access_list_id', '=', row.id)
- .patch({access_list_id: 0})
- .then(() => {
- // 3. reconfigure those hosts, then reload nginx
-
- // set the access_list_id to zero for these items
- row.proxy_hosts.map(function (val, idx) {
- row.proxy_hosts[idx].access_list_id = 0;
- });
-
- return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
- })
- .then(() => {
- return internalNginx.reload();
- });
- }
- })
- .then(() => {
- // delete the htpasswd file
- let htpasswd_file = internalAccessList.getFilename(row);
-
- try {
- fs.unlinkSync(htpasswd_file);
- } catch (err) {
- // do nothing
- }
- })
- .then(() => {
- // 4. audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'access-list',
- object_id: row.id,
- meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Lists
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('access_lists:list')
- .then((access_data) => {
- let query = accessListModel
- .query()
- .select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
- .joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
- .where('access_list.is_deleted', 0)
- .groupBy('access_list.id')
- .omit(['access_list.is_deleted'])
- .allowEager('[owner,items,clients]')
- .orderBy('access_list.name', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('name', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (rows) {
- rows.map(function (row, idx) {
- if (typeof row.items !== 'undefined' && row.items) {
- rows[idx] = internalAccessList.maskItems(row);
- }
- });
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Integer} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = accessListModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * @param {Object} list
- * @returns {Object}
- */
- maskItems: (list) => {
- if (list && typeof list.items !== 'undefined') {
- list.items.map(function (val, idx) {
- let repeat_for = 8;
- let first_char = '*';
-
- if (typeof val.password !== 'undefined' && val.password) {
- repeat_for = val.password.length - 1;
- first_char = val.password.charAt(0);
- }
-
- list.items[idx].hint = first_char + ('*').repeat(repeat_for);
- list.items[idx].password = '';
- });
- }
-
- return list;
- },
-
- /**
- * @param {Object} list
- * @param {Integer} list.id
- * @returns {String}
- */
- getFilename: (list) => {
- return '/data/access/' + list.id;
- },
-
- /**
- * @param {Object} list
- * @param {Integer} list.id
- * @param {String} list.name
- * @param {Array} list.items
- * @returns {Promise}
- */
- build: (list) => {
- logger.info('Building Access file #' + list.id + ' for: ' + list.name);
-
- return new Promise((resolve, reject) => {
- let htpasswd_file = internalAccessList.getFilename(list);
-
- // 1. remove any existing access file
- try {
- fs.unlinkSync(htpasswd_file);
- } catch (err) {
- // do nothing
- }
-
- // 2. create empty access file
- try {
- fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
- resolve(htpasswd_file);
- } catch (err) {
- reject(err);
- }
- })
- .then((htpasswd_file) => {
- // 3. generate password for each user
- if (list.items.length) {
- return new Promise((resolve, reject) => {
- batchflow(list.items).sequential()
- .each((i, item, next) => {
- if (typeof item.password !== 'undefined' && item.password.length) {
- logger.info('Adding: ' + item.username);
-
- utils.exec('/usr/bin/htpasswd -b "' + htpasswd_file + '" "' + item.username + '" "' + item.password + '"')
- .then((/*result*/) => {
- next();
- })
- .catch((err) => {
- logger.error(err);
- next(err);
- });
- }
- })
- .error((err) => {
- logger.error(err);
- reject(err);
- })
- .end((results) => {
- logger.success('Built Access file #' + list.id + ' for: ' + list.name);
- resolve(results);
- });
- });
- }
- });
- }
-};
-
-module.exports = internalAccessList;
diff --git a/backend/internal/acme/acmesh.go b/backend/internal/acme/acmesh.go
new file mode 100644
index 000000000..a7e57d0b1
--- /dev/null
+++ b/backend/internal/acme/acmesh.go
@@ -0,0 +1,210 @@
+package acme
+
+// Some light reading:
+// https://github.com/acmesh-official/acme.sh/wiki/How-to-issue-a-cert
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+
+ "npm/internal/config"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/logger"
+
+ "github.com/rotisserie/eris"
+)
+
+func getAcmeShFilePath() (string, error) {
+ path, err := exec.LookPath("acme.sh")
+ if err != nil {
+ return path, eris.Wrapf(err, "Cannot find acme.sh execuatable script in PATH")
+ }
+ return path, nil
+}
+
+func getCommonEnvVars() []string {
+ return []string{
+ fmt.Sprintf("ACMESH_CONFIG_HOME=%s", os.Getenv("ACMESH_CONFIG_HOME")),
+ fmt.Sprintf("ACMESH_HOME=%s", os.Getenv("ACMESH_HOME")),
+ fmt.Sprintf("CERT_HOME=%s", os.Getenv("CERT_HOME")),
+ fmt.Sprintf("LE_CONFIG_HOME=%s", os.Getenv("LE_CONFIG_HOME")),
+ fmt.Sprintf("LE_WORKING_DIR=%s", os.Getenv("LE_WORKING_DIR")),
+ }
+}
+
+// GetAcmeShVersion will return the acme.sh script version
+func GetAcmeShVersion() string {
+ if r, err := shExec([]string{"--version"}, nil); err == nil {
+ // modify the output
+ r = strings.Trim(r, "\n")
+ v := strings.Split(r, "\n")
+ return v[len(v)-1]
+ }
+ return ""
+}
+
+// CreateAccountKey is required for each server initially
+func CreateAccountKey(ca *certificateauthority.Model) error {
+ args := []string{"--create-account-key", "--accountkeylength", "2048"}
+ if ca != nil {
+ logger.Info("Acme.sh CreateAccountKey for %s", ca.AcmeshServer)
+ args = append(args, "--server", ca.AcmeshServer)
+ if ca.CABundle != "" {
+ args = append(args, "--ca-bundle", ca.CABundle)
+ }
+ } else {
+ logger.Info("Acme.sh CreateAccountKey")
+ }
+
+ args = append(args, getCommonArgs()...)
+ ret, err := shExec(args, nil)
+ if err != nil {
+ return err
+ }
+
+ logger.Debug("CreateAccountKey returned:\n%+v", ret)
+
+ return nil
+}
+
+// RequestCert does all the heavy lifting
+func RequestCert(domains []string, method, outputFullchainFile, outputKeyFile string, dnsProvider *dnsprovider.Model, ca *certificateauthority.Model, force bool) (string, error) {
+ args, err := buildCertRequestArgs(domains, method, outputFullchainFile, outputKeyFile, dnsProvider, ca, force)
+ if err != nil {
+ return err.Error(), err
+ }
+
+ envs := make([]string, 0)
+ if dnsProvider != nil {
+ envs, err = dnsProvider.GetAcmeShEnvVars()
+ if err != nil {
+ return err.Error(), err
+ }
+ }
+
+ ret, err := shExec(args, envs)
+ if err != nil {
+ return ret, err
+ }
+
+ return "", nil
+}
+
+// shExec executes the acme.sh with arguments
+func shExec(args []string, envs []string) (string, error) {
+ acmeSh, err := getAcmeShFilePath()
+ if err != nil {
+ logger.Error("AcmeShError", err)
+ return "", err
+ }
+
+ logger.Debug("CMD: %s %v", acmeSh, args)
+ // nolint: gosec
+ c := exec.Command(acmeSh, args...)
+ c.Env = append(getCommonEnvVars(), envs...)
+
+ b, e := c.CombinedOutput()
+
+ if e != nil {
+ // logger.Error("AcmeShError", eris.Wrapf(e, "Command error: %s -- %v\n%+v", acmeSh, args, e))
+ logger.Warn(string(b))
+ }
+
+ return string(b), e
+}
+
+func getCommonArgs() []string {
+ args := make([]string, 0)
+
+ if config.Configuration.Acmesh.Home != "" {
+ args = append(args, "--home", config.Configuration.Acmesh.Home)
+ }
+ if config.Configuration.Acmesh.ConfigHome != "" {
+ args = append(args, "--config-home", config.Configuration.Acmesh.ConfigHome)
+ }
+ if config.Configuration.Acmesh.CertHome != "" {
+ args = append(args, "--cert-home", config.Configuration.Acmesh.CertHome)
+ }
+
+ args = append(args, "--log", "/data/logs/acme.sh.log")
+ args = append(args, "--debug", "2")
+
+ return args
+}
+
+// This is split out into it's own function so it's testable
+func buildCertRequestArgs(
+ domains []string,
+ method,
+ outputFullchainFile,
+ outputKeyFile string,
+ dnsProvider *dnsprovider.Model,
+ ca *certificateauthority.Model,
+ force bool,
+) ([]string, error) {
+ // The argument order matters.
+ // see https://github.com/acmesh-official/acme.sh/wiki/How-to-issue-a-cert#3-multiple-domains-san-mode--hybrid-mode
+ // for multiple domains and note that the method of validation is required just after the domain arg, each time.
+
+ // TODO log file location configurable
+ args := []string{"--issue"}
+
+ if ca != nil {
+ args = append(args, "--server", ca.AcmeshServer)
+ if ca.CABundle != "" {
+ args = append(args, "--ca-bundle", ca.CABundle)
+ }
+ }
+
+ if outputFullchainFile != "" {
+ args = append(args, "--fullchain-file", outputFullchainFile)
+ }
+
+ if outputKeyFile != "" {
+ args = append(args, "--key-file", outputKeyFile)
+ }
+
+ methodArgs := make([]string, 0)
+ switch method {
+ case "dns":
+ if dnsProvider == nil {
+ return nil, ErrDNSNeedsDNSProvider
+ }
+ methodArgs = append(methodArgs, "--dns", dnsProvider.AcmeshName)
+ if dnsProvider.DNSSleep > 0 {
+ // See: https://github.com/acmesh-official/acme.sh/wiki/dnscheck
+ methodArgs = append(methodArgs, "--dnssleep", fmt.Sprintf("%d", dnsProvider.DNSSleep))
+ }
+
+ case "http":
+ if dnsProvider != nil {
+ return nil, ErrHTTPHasDNSProvider
+ }
+ methodArgs = append(methodArgs, "-w", config.Configuration.Acmesh.GetWellknown())
+ default:
+ return nil, ErrMethodNotSupported
+ }
+
+ hasMethod := false
+
+ // Add domains to args
+ for _, domain := range domains {
+ args = append(args, "-d", domain)
+ // Method has to appear after each domain
+ if !hasMethod {
+ args = append(args, methodArgs...)
+ hasMethod = true
+ }
+ }
+
+ if force {
+ args = append(args, "--force")
+ }
+
+ args = append(args, getCommonArgs()...)
+
+ return args, nil
+}
diff --git a/backend/internal/acme/acmesh_test.go b/backend/internal/acme/acmesh_test.go
new file mode 100644
index 000000000..267327055
--- /dev/null
+++ b/backend/internal/acme/acmesh_test.go
@@ -0,0 +1,252 @@
+package acme
+
+import (
+ "fmt"
+ "testing"
+
+ "npm/internal/config"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+// TODO configurable
+const acmeLogFile = "/data/logs/acme.sh.log"
+
+func TestBuildCertRequestArgs(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type want struct {
+ args []string
+ err error
+ }
+
+ wellknown := config.Configuration.Acmesh.GetWellknown()
+ exampleKey := fmt.Sprintf("%s/example.com.key", config.Configuration.Acmesh.CertHome)
+ exampleChain := fmt.Sprintf("%s/a.crt", config.Configuration.Acmesh.CertHome)
+
+ tests := []struct {
+ name string
+ domains []string
+ method string
+ outputFullchainFile string
+ outputKeyFile string
+ dnsProvider *dnsprovider.Model
+ ca *certificateauthority.Model
+ want want
+ }{
+ {
+ name: "http single domain",
+ domains: []string{"example.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "-w",
+ wellknown,
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "http multiple domains",
+ domains: []string{"example.com", "example-two.com", "example-three.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "-w",
+ wellknown,
+ "-d",
+ "example-two.com",
+ "-d",
+ "example-three.com",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "http single domain with dns provider",
+ domains: []string{"example.com"},
+ method: "http",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: nil,
+ err: ErrHTTPHasDNSProvider,
+ },
+ },
+ {
+ name: "dns single domain",
+ domains: []string{"example.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "--dns",
+ "dns_cf",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns multiple domains",
+ domains: []string{"example.com", "example-two.com", "example-three.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: &dnsprovider.Model{
+ AcmeshName: "dns_cf",
+ },
+ ca: nil,
+ want: want{
+ args: []string{
+ "--issue",
+ "--fullchain-file",
+ exampleChain,
+ "--key-file",
+ exampleKey,
+ "-d",
+ "example.com",
+ "--dns",
+ "dns_cf",
+ "-d",
+ "example-two.com",
+ "-d",
+ "example-three.com",
+ "--log",
+ acmeLogFile,
+ "--debug",
+ "2",
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns single domain no provider",
+ domains: []string{"example.com"},
+ method: "dns",
+ outputFullchainFile: exampleChain,
+ outputKeyFile: exampleKey,
+ dnsProvider: nil,
+ ca: nil,
+ want: want{
+ args: nil,
+ err: ErrDNSNeedsDNSProvider,
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ args, err := buildCertRequestArgs(tt.domains, tt.method, tt.outputFullchainFile, tt.outputKeyFile, tt.dnsProvider, tt.ca, false)
+
+ assert.Equal(t, tt.want.args, args)
+ assert.Equal(t, tt.want.err, err)
+ })
+ }
+}
+
+func TestGetAcmeShFilePath(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ path, err := getAcmeShFilePath()
+ if err != nil {
+ assert.Equal(t, "Cannot find acme.sh execuatable script in PATH: exec: \"acme.sh\": executable file not found in $PATH", err.Error())
+ assert.Equal(t, "", path)
+ } else {
+ assert.Equal(t, "/bin/acme.sh", path)
+ }
+ })
+}
+
+func TestGetCommonEnvVars(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ t.Setenv("ACMESH_CONFIG_HOME", "/data/.acme.sh/config")
+ t.Setenv("ACMESH_HOME", "/data/.acme.sh")
+ t.Setenv("CERT_HOME", "/data/.acme.sh/certs")
+ t.Setenv("LE_CONFIG_HOME", "/data/.acme.sh/config")
+ t.Setenv("LE_WORKING_DIR", "/data/.acme.sh")
+
+ expected := []string{
+ "ACMESH_CONFIG_HOME=/data/.acme.sh/config",
+ "ACMESH_HOME=/data/.acme.sh",
+ "CERT_HOME=/data/.acme.sh/certs",
+ "LE_CONFIG_HOME=/data/.acme.sh/config",
+ "LE_WORKING_DIR=/data/.acme.sh",
+ }
+ vals := getCommonEnvVars()
+ assert.Equal(t, expected, vals)
+ })
+}
+
+func TestGetAcmeShVersion(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ resp := GetAcmeShVersion()
+ // Seems like a pointless test, however when this is run in CI
+ // it doesn't have access to the acme.sh command so it will
+ // always be empty. But when running in Docker, it will.
+ if resp != "" {
+ assert.Equal(t, "v", resp[:1])
+ }
+ })
+}
diff --git a/backend/internal/acme/errors.go b/backend/internal/acme/errors.go
new file mode 100644
index 000000000..d68850ff2
--- /dev/null
+++ b/backend/internal/acme/errors.go
@@ -0,0 +1,12 @@
+package acme
+
+import (
+ "github.com/rotisserie/eris"
+)
+
+// All errors relating to Acme.sh use
+var (
+ ErrDNSNeedsDNSProvider = eris.New("RequestCert dns method requires a dns provider")
+ ErrHTTPHasDNSProvider = eris.New("RequestCert http method does not need a dns provider")
+ ErrMethodNotSupported = eris.New("RequestCert method not supported")
+)
diff --git a/backend/internal/api/context/context.go b/backend/internal/api/context/context.go
new file mode 100644
index 000000000..dc9f32b93
--- /dev/null
+++ b/backend/internal/api/context/context.go
@@ -0,0 +1,27 @@
+package context
+
+var (
+ // BodyCtxKey is the name of the Body value on the context
+ BodyCtxKey = &contextKey{"Body"}
+ // UserIDCtxKey is the name of the UserID value on the context
+ UserIDCtxKey = &contextKey{"UserID"}
+ // FiltersCtxKey is the name of the Filters value on the context
+ FiltersCtxKey = &contextKey{"Filters"}
+ // SortCtxKey is the name of the Sort value on the context
+ SortCtxKey = &contextKey{"Sort"}
+ // PrettyPrintCtxKey is the name of the pretty print context
+ PrettyPrintCtxKey = &contextKey{"Pretty"}
+ // ExpansionCtxKey is the name of the expansion context
+ ExpansionCtxKey = &contextKey{"Expansion"}
+)
+
+// contextKey is a value for use with context.WithValue. It's used as
+// a pointer so it fits in an interface{} without allocation. This technique
+// for defining context keys was copied from Go 1.7's new use of context in net/http.
+type contextKey struct {
+ name string
+}
+
+func (k *contextKey) String() string {
+ return "context value: " + k.name
+}
diff --git a/backend/internal/api/context/context_test.go b/backend/internal/api/context/context_test.go
new file mode 100644
index 000000000..9eee4c669
--- /dev/null
+++ b/backend/internal/api/context/context_test.go
@@ -0,0 +1,17 @@
+package context
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetString(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ assert.Equal(t, "context value: Body", BodyCtxKey.String())
+ })
+}
diff --git a/backend/internal/api/handler/access_lists.go b/backend/internal/api/handler/access_lists.go
new file mode 100644
index 000000000..6a8335d13
--- /dev/null
+++ b/backend/internal/api/handler/access_lists.go
@@ -0,0 +1,129 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/accesslist"
+)
+
+// GetAccessLists will return a list of Access Lists
+// Route: GET /access-lists
+func GetAccessLists() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ items, err := accesslist.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+ }
+}
+
+// GetAccessList will return a single access list
+// Route: GET /access-lists/{accessListID}
+func GetAccessList() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var accessListID uint
+ if accessListID, err = getURLParamInt(r, "accessListID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := accesslist.GetByID(accessListID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// CreateAccessList will create an access list
+// Route: POST /access-lists
+func CreateAccessList() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newItem accesslist.Model
+ err := json.Unmarshal(bodyBytes, &newItem)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newItem.UserID = userID
+
+ if err = newItem.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Access List: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newItem)
+ }
+}
+
+// UpdateAccessList is self explanatory
+// Route: PUT /access-lists/{accessListID}
+func UpdateAccessList() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var accessListID uint
+ if accessListID, err = getURLParamInt(r, "accessListID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := accesslist.GetByID(accessListID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &item)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = item.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// DeleteAccessList is self explanatory
+// Route: DELETE /access-lists/{accessListID}
+func DeleteAccessList() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var accessListID uint
+ if accessListID, err = getURLParamInt(r, "accessListID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := accesslist.GetByID(accessListID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ }
+ }
+}
diff --git a/backend/internal/api/handler/auth.go b/backend/internal/api/handler/auth.go
new file mode 100644
index 000000000..54f428c21
--- /dev/null
+++ b/backend/internal/api/handler/auth.go
@@ -0,0 +1,240 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+ "slices"
+ "time"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/setting"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ njwt "npm/internal/jwt"
+ "npm/internal/logger"
+
+ "gorm.io/gorm"
+)
+
+// tokenPayload is the structure we expect from a incoming login request
+type tokenPayload struct {
+ Type string `json:"type"`
+ Identity string `json:"identity"`
+ Secret string `json:"secret"`
+}
+
+// GetAuthConfig is anonymous and returns the types of authentication
+// enabled for this site
+func GetAuthConfig() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ val, err := setting.GetAuthMethods()
+ if err == gorm.ErrRecordNotFound {
+ h.ResultResponseJSON(w, r, http.StatusOK, nil)
+ return
+ } else if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+ h.ResultResponseJSON(w, r, http.StatusOK, val)
+ }
+}
+
+// NewToken Also known as a Login, requesting a new token with credentials
+// Route: POST /auth
+func NewToken() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ // Read the bytes from the body
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var payload tokenPayload
+ err := json.Unmarshal(bodyBytes, &payload)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Check that this auth type is enabled
+ if authMethods, err := setting.GetAuthMethods(); err == gorm.ErrRecordNotFound {
+ h.ResultResponseJSON(w, r, http.StatusOK, nil)
+ return
+ } else if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ } else if !slices.Contains(authMethods, payload.Type) {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidAuthType.Error(), nil)
+ return
+ }
+
+ switch payload.Type {
+ case "ldap":
+ newTokenLDAP(w, r, payload)
+ case "local":
+ newTokenLocal(w, r, payload)
+ }
+ }
+}
+
+func newTokenLocal(w http.ResponseWriter, r *http.Request, payload tokenPayload) {
+ // Find user by email
+ userObj, userErr := user.GetByEmail(payload.Identity)
+ if userErr != nil {
+ logger.Debug("%s: %s", errors.ErrInvalidLogin.Error(), userErr.Error())
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ if userObj.IsDisabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, errors.ErrUserDisabled.Error(), nil)
+ return
+ }
+
+ // Get Auth
+ authObj, authErr := auth.GetByUserIDType(userObj.ID, payload.Type)
+ if authErr != nil {
+ logger.Debug("%s: %s", errors.ErrInvalidLogin.Error(), authErr.Error())
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ // Verify Auth
+ validateErr := authObj.ValidateSecret(payload.Secret)
+ if validateErr != nil {
+ logger.Debug("%s: %s", errors.ErrInvalidLogin.Error(), validateErr.Error())
+ // Sleep for 1 second to prevent brute force password guessing
+ time.Sleep(time.Second)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ if response, err := njwt.Generate(&userObj, false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+}
+
+func newTokenLDAP(w http.ResponseWriter, r *http.Request, payload tokenPayload) {
+ // Get LDAP settings
+ ldapSettings, err := setting.GetLDAPSettings()
+ if err != nil {
+ logger.Error("LDAP settings not found", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ // Lets try to authenticate with LDAP
+ ldapUser, err := auth.LDAPAuthenticate(payload.Identity, payload.Secret)
+ if err != nil {
+ logger.Error("LDAP Auth Error", err)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ // Get Auth by identity
+ authObj, authErr := auth.GetByIdenityType(ldapUser.Username, payload.Type)
+ if authErr == gorm.ErrRecordNotFound {
+ // Auth is not found for this identity. We can create it
+ if !ldapSettings.AutoCreateUser {
+ // LDAP Login was successful, but user does not have an auth record
+ // and auto create is disabled. Showing account disabled error
+ // for the time being
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrUserDisabled.Error(), nil)
+ return
+ }
+
+ // Attempt to find user by email
+ foundUser, err := user.GetByEmail(ldapUser.Email)
+ if err == gorm.ErrRecordNotFound {
+ // User not found, create user
+ foundUser, err = user.CreateFromLDAPUser(ldapUser)
+ if err != nil {
+ logger.Error("user.CreateFromLDAPUser", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+ logger.Info("Created user from LDAP: %s, %s", ldapUser.Username, foundUser.Email)
+ } else if err != nil {
+ logger.Error("user.GetByEmail", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ // Create auth record and attach to this user
+ authObj = auth.Model{
+ UserID: foundUser.ID,
+ Type: auth.TypeLDAP,
+ Identity: ldapUser.Username,
+ }
+ if err := authObj.Save(); err != nil {
+ logger.Error("auth.Save", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+ logger.Info("Created LDAP auth for user: %s, %s", ldapUser.Username, foundUser.Email)
+ } else if authErr != nil {
+ logger.Error("auth.GetByIdenityType", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, authErr.Error(), nil)
+ return
+ }
+
+ userObj, userErr := user.GetByID(authObj.UserID)
+ if userErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userErr.Error(), nil)
+ return
+ }
+
+ if userObj.IsDisabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, errors.ErrUserDisabled.Error(), nil)
+ return
+ }
+
+ if response, err := njwt.Generate(&userObj, false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+}
+
+// RefreshToken an existing token by given them a new one with the same claims
+// Route: POST /auth/refresh
+func RefreshToken() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ // TODO: Use your own methods to verify an existing user is
+ // able to refresh their token and then give them a new one
+ userObj, _ := user.GetByEmail("jc@jc21.com")
+ if response, err := njwt.Generate(&userObj, false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+ }
+}
+
+// NewSSEToken will generate and return a very short lived token for
+// use by the /sse/* endpoint. It requires an app token to generate this
+// Route: POST /auth/sse
+func NewSSEToken() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ userID := r.Context().Value(c.UserIDCtxKey).(uint)
+
+ // Find user
+ userObj, userErr := user.GetByID(userID)
+ if userErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrInvalidLogin.Error(), nil)
+ return
+ }
+
+ if userObj.IsDisabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, errors.ErrUserDisabled.Error(), nil)
+ return
+ }
+
+ if response, err := njwt.Generate(&userObj, true); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, response)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/certificate_authorities.go b/backend/internal/api/handler/certificate_authorities.go
new file mode 100644
index 000000000..cdd9e5257
--- /dev/null
+++ b/backend/internal/api/handler/certificate_authorities.go
@@ -0,0 +1,152 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ "npm/internal/acme"
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/logger"
+
+ "gorm.io/gorm"
+)
+
+// GetCertificateAuthorities will return a list of Certificate Authorities
+// Route: GET /certificate-authorities
+func GetCertificateAuthorities() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ certificates, err := certificateauthority.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, certificates)
+ }
+ }
+}
+
+// GetCertificateAuthority will return a single Certificate Authority
+// Route: GET /certificate-authorities/{caID}
+func GetCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID uint
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := certificateauthority.GetByID(caID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateCertificateAuthority will create a Certificate Authority
+// Route: POST /certificate-authorities
+func CreateCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newCA certificateauthority.Model
+ err := json.Unmarshal(bodyBytes, &newCA)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = newCA.Check(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = newCA.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Certificate Authority: %s", err.Error()), nil)
+ return
+ }
+
+ if err = acme.CreateAccountKey(&newCA); err != nil {
+ logger.Error("CreateAccountKeyError", err)
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newCA)
+ }
+}
+
+// UpdateCertificateAuthority updates a ca
+// Route: PUT /certificate-authorities/{caID}
+func UpdateCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID uint
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ ca, err := certificateauthority.GetByID(caID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &ca)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = ca.Check(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = ca.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, ca)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteCertificateAuthority deletes a ca
+// Route: DELETE /certificate-authorities/{caID}
+func DeleteCertificateAuthority() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var caID uint
+ if caID, err = getURLParamInt(r, "caID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := certificateauthority.GetByID(caID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/certificates.go b/backend/internal/api/handler/certificates.go
new file mode 100644
index 000000000..618d6d4fc
--- /dev/null
+++ b/backend/internal/api/handler/certificates.go
@@ -0,0 +1,187 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/api/schema"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/jobqueue"
+ "npm/internal/logger"
+
+ "gorm.io/gorm"
+)
+
+// GetCertificates will return a list of Certificates
+// Route: GET /certificates
+func GetCertificates() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ certificates, err := certificate.List(pageInfo, middleware.GetFiltersFromContext(r), middleware.GetExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, certificates)
+ }
+ }
+}
+
+// GetCertificate will return a single Certificate
+// Route: GET /certificates/{certificateID}
+func GetCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if item := getCertificateFromRequest(w, r); item != nil {
+ // nolint: errcheck,gosec
+ item.Expand(middleware.GetExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// CreateCertificate will create a Certificate
+// Route: POST /certificates
+func CreateCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var item certificate.Model
+ if fillObjectFromBody(w, r, "", &item) {
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ item.UserID = userID
+
+ if err := item.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Certificate: %s", err.Error()), nil)
+ return
+ }
+
+ configureCertificate(item)
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// UpdateCertificate updates a cert
+// Route: PUT /certificates/{certificateID}
+func UpdateCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if item := getCertificateFromRequest(w, r); item != nil {
+ // This is a special endpoint, as it needs to verify the schema payload
+ // based on the certificate type, without being given a type in the payload.
+ // The middleware would normally handle this.
+ if fillObjectFromBody(w, r, schema.UpdateCertificate(item.Type), item) {
+ if err := item.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ // configureCertificate(*item, item.Request)
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+ }
+}
+
+// DeleteCertificate deletes a cert
+// Route: DELETE /certificates/{certificateID}
+func DeleteCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if item := getCertificateFromRequest(w, r); item != nil {
+ cnt := host.GetCertificateUseCount(item.ID)
+ if cnt > 0 {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "Cannot delete certificate that is in use by at least 1 host", nil)
+ return
+ }
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ }
+ }
+}
+
+// RenewCertificate is self explanatory
+// Route: PUT /certificates/{certificateID}/renew
+func RenewCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if item := getCertificateFromRequest(w, r); item != nil {
+ configureCertificate(*item)
+ h.ResultResponseJSON(w, r, http.StatusOK, true)
+ }
+ }
+}
+
+// DownloadCertificate is self explanatory
+// Route: PUT /certificates/{certificateID}/download
+func DownloadCertificate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if item := getCertificateFromRequest(w, r); item != nil {
+ // todo
+ h.ResultResponseJSON(w, r, http.StatusOK, "ok")
+ }
+ }
+}
+
+// getCertificateFromRequest has some reusable code for all endpoints that
+// have a certificate id in the url. it will write errors to the output.
+func getCertificateFromRequest(w http.ResponseWriter, r *http.Request) *certificate.Model {
+ var err error
+ var certificateID uint
+ if certificateID, err = getURLParamInt(r, "certificateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return nil
+ }
+
+ certificateObject, err := certificate.GetByID(certificateID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ return &certificateObject
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ return nil
+}
+
+// fillObjectFromBody has some reusable code for all endpoints that
+// have a certificate id in the url. it will write errors to the output.
+func fillObjectFromBody(w http.ResponseWriter, r *http.Request, validationSchema string, o any) bool {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ if validationSchema != "" {
+ schemaErrors, jsonErr := middleware.CheckRequestSchema(r.Context(), validationSchema, bodyBytes)
+ if jsonErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", jsonErr), nil)
+ return false
+ }
+ if len(schemaErrors) > 0 {
+ h.ResultSchemaErrorJSON(w, r, schemaErrors)
+ return false
+ }
+ }
+
+ err := json.Unmarshal(bodyBytes, o)
+ if err != nil {
+ logger.Debug("Unmarshal Error: %+v", err)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return false
+ }
+
+ return true
+}
+
+func configureCertificate(cert certificate.Model) {
+ err := jobqueue.AddJob(jobqueue.Job{
+ Name: "RequestCertificate",
+ Action: cert.Request,
+ })
+ if err != nil {
+ logger.Error("ConfigureCertificateError", err)
+ }
+}
diff --git a/backend/internal/api/handler/config.go b/backend/internal/api/handler/config.go
new file mode 100644
index 000000000..3f6bc9544
--- /dev/null
+++ b/backend/internal/api/handler/config.go
@@ -0,0 +1,16 @@
+package handler
+
+import (
+ "net/http"
+
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+// Config returns the entire configuration, for debug purposes
+// Route: GET /config
+func Config() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ h.ResultResponseJSON(w, r, http.StatusOK, config.Configuration)
+ }
+}
diff --git a/backend/internal/api/handler/dns_providers.go b/backend/internal/api/handler/dns_providers.go
new file mode 100644
index 000000000..517d14bee
--- /dev/null
+++ b/backend/internal/api/handler/dns_providers.go
@@ -0,0 +1,173 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/dnsproviders"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/errors"
+
+ "gorm.io/gorm"
+)
+
+// GetDNSProviders will return a list of DNS Providers
+// Route: GET /dns-providers
+func GetDNSProviders() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ items, err := dnsprovider.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+ }
+}
+
+// GetDNSProvider will return a single DNS Provider
+// Route: GET /dns-providers/{providerID}
+func GetDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID uint
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateDNSProvider will create a DNS Provider
+// Route: POST /dns-providers
+func CreateDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newItem dnsprovider.Model
+ err := json.Unmarshal(bodyBytes, &newItem)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newItem.UserID = userID
+
+ if err = newItem.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save DNS Provider: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newItem)
+ }
+}
+
+// UpdateDNSProvider updates a provider
+// Route: PUT /dns-providers/{providerID}
+func UpdateDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID uint
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &item)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = item.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteDNSProvider removes a provider
+// Route: DELETE /dns-providers/{providerID}
+func DeleteDNSProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var providerID uint
+ if providerID, err = getURLParamInt(r, "providerID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := dnsprovider.GetByID(providerID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// GetAcmeshProviders will return a list of acme.sh providers
+// Route: GET /dns-providers/acmesh
+func GetAcmeshProviders() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ h.ResultResponseJSON(w, r, http.StatusOK, dnsproviders.List())
+ }
+}
+
+// GetAcmeshProvider will return a single acme.sh provider
+// Route: GET /dns-providers/acmesh/{acmeshID}
+func GetAcmeshProvider() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var acmeshID string
+ var err error
+ if acmeshID, err = getURLParamString(r, "acmeshID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, getErr := dnsproviders.Get(acmeshID)
+ switch getErr {
+ case errors.ErrProviderNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, getErr.Error(), nil)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/health.go b/backend/internal/api/handler/health.go
new file mode 100644
index 000000000..6f0ac3137
--- /dev/null
+++ b/backend/internal/api/handler/health.go
@@ -0,0 +1,33 @@
+package handler
+
+import (
+ "net/http"
+
+ "npm/internal/acme"
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+type healthCheckResponse struct {
+ Version string `json:"version"`
+ Commit string `json:"commit"`
+ AcmeShVersion string `json:"acme.sh"`
+ Healthy bool `json:"healthy"`
+ IsSetup bool `json:"setup"`
+}
+
+// Health returns the health of the api
+// Route: GET /health
+func Health() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ health := healthCheckResponse{
+ Version: config.Version,
+ Commit: config.Commit,
+ Healthy: true,
+ IsSetup: config.IsSetup,
+ AcmeShVersion: acme.GetAcmeShVersion(),
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, health)
+ }
+}
diff --git a/backend/internal/api/handler/helpers.go b/backend/internal/api/handler/helpers.go
new file mode 100644
index 000000000..f04b42253
--- /dev/null
+++ b/backend/internal/api/handler/helpers.go
@@ -0,0 +1,104 @@
+package handler
+
+import (
+ "net/http"
+ "strconv"
+
+ "npm/internal/model"
+
+ "github.com/go-chi/chi/v5"
+ "github.com/rotisserie/eris"
+)
+
+const defaultLimit = 10
+
+func getPageInfoFromRequest(r *http.Request) (model.PageInfo, error) {
+ pageInfo := model.PageInfo{}
+ var err error
+
+ pageInfo.Offset, pageInfo.Limit, err = getPagination(r)
+ if err != nil {
+ return pageInfo, err
+ }
+
+ return pageInfo, nil
+}
+
+func getQueryVarString(r *http.Request, varName string, required bool, defaultValue string) (string, error) {
+ queryValues := r.URL.Query()
+ varValue := queryValues.Get(varName)
+
+ if varValue == "" && required {
+ return "", eris.Errorf("%v was not supplied in the request", varName)
+ } else if varValue == "" {
+ return defaultValue, nil
+ }
+
+ return varValue, nil
+}
+
+func getQueryVarInt(r *http.Request, varName string, required bool, defaultValue int) (int, error) {
+ queryValues := r.URL.Query()
+ varValue := queryValues.Get(varName)
+
+ if varValue == "" && required {
+ return 0, eris.Errorf("%v was not supplied in the request", varName)
+ } else if varValue == "" {
+ return defaultValue, nil
+ }
+
+ varInt, intErr := strconv.Atoi(varValue)
+ if intErr != nil {
+ return 0, eris.Wrapf(intErr, "%v is not a valid number", varName)
+ }
+
+ return varInt, nil
+}
+
+func getURLParamInt(r *http.Request, varName string) (uint, error) {
+ var defaultValue uint
+
+ required := true
+ paramStr := chi.URLParam(r, varName)
+
+ if paramStr == "" && required {
+ return 0, eris.Errorf("%v was not supplied in the request", varName)
+ } else if paramStr == "" {
+ return defaultValue, nil
+ }
+
+ paramUint, err := strconv.ParseUint(paramStr, 10, 32)
+ if err != nil {
+ return 0, eris.Wrapf(err, "%v is not a valid number", varName)
+ }
+
+ return uint(paramUint), nil
+}
+
+func getURLParamString(r *http.Request, varName string) (string, error) {
+ required := true
+ defaultValue := ""
+ paramStr := chi.URLParam(r, varName)
+
+ if paramStr == "" && required {
+ return "", eris.Errorf("%v was not supplied in the request", varName)
+ } else if paramStr == "" {
+ return defaultValue, nil
+ }
+
+ return paramStr, nil
+}
+
+func getPagination(r *http.Request) (int, int, error) {
+ var err error
+ offset, err := getQueryVarInt(r, "offset", false, 0)
+ if err != nil {
+ return 0, 0, err
+ }
+ limit, err := getQueryVarInt(r, "limit", false, defaultLimit)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ return offset, limit, nil
+}
diff --git a/backend/internal/api/handler/helpers_test.go b/backend/internal/api/handler/helpers_test.go
new file mode 100644
index 000000000..c8c1fd6c6
--- /dev/null
+++ b/backend/internal/api/handler/helpers_test.go
@@ -0,0 +1,119 @@
+package handler
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "npm/internal/model"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGetPageInfoFromRequest(t *testing.T) {
+ t.Run("basic test", func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, "/hosts", nil)
+ p, err := getPageInfoFromRequest(r)
+
+ var nilStringSlice []string
+ var nilSortSlice []model.Sort
+ defaultSort := model.Sort{Field: "name", Direction: "asc"}
+
+ assert.Equal(t, nil, err)
+ assert.Equal(t, 0, p.Offset)
+ assert.Equal(t, 10, p.Limit)
+ assert.Equal(t, nilStringSlice, p.Expand)
+ assert.Equal(t, nilSortSlice, p.Sort)
+ assert.Equal(t, []model.Sort{defaultSort}, p.GetSort(defaultSort))
+ })
+}
+
+func TestGetQueryVarInt(t *testing.T) {
+ type want struct {
+ val int
+ err string
+ }
+
+ tests := []struct {
+ name string
+ url string
+ queryVar string
+ required bool
+ defaultValue int
+ want want
+ }{
+ {
+ name: "simple default",
+ url: "/hosts",
+ queryVar: "something",
+ required: false,
+ defaultValue: 100,
+ want: want{
+ val: 100,
+ err: "",
+ },
+ },
+ {
+ name: "required flag",
+ url: "/hosts",
+ queryVar: "something",
+ required: true,
+ want: want{
+ val: 0,
+ err: "something was not supplied in the request",
+ },
+ },
+ {
+ name: "simple get",
+ url: "/hosts?something=50",
+ queryVar: "something",
+ required: true,
+ want: want{
+ val: 50,
+ err: "",
+ },
+ },
+ {
+ name: "invalid number",
+ url: "/hosts?something=aaa",
+ queryVar: "something",
+ required: true,
+ want: want{
+ val: 0,
+ err: "",
+ },
+ },
+ {
+ name: "preceding zeros",
+ url: "/hosts?something=0000050",
+ queryVar: "something",
+ required: true,
+ want: want{
+ val: 50,
+ err: "",
+ },
+ },
+ {
+ name: "decimals",
+ url: "/hosts?something=50.50",
+ queryVar: "something",
+ required: true,
+ want: want{
+ val: 0,
+ err: "",
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, tt.url, nil)
+ val, err := getQueryVarInt(r, tt.queryVar, tt.required, tt.defaultValue)
+ assert.Equal(t, tt.want.val, val)
+ if tt.want.err != "" {
+ assert.NotEqual(t, nil, err)
+ assert.Equal(t, tt.want.err, err.Error())
+ }
+ })
+ }
+}
diff --git a/backend/internal/api/handler/hosts.go b/backend/internal/api/handler/hosts.go
new file mode 100644
index 000000000..c902b57db
--- /dev/null
+++ b/backend/internal/api/handler/hosts.go
@@ -0,0 +1,215 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/host"
+ "npm/internal/jobqueue"
+ "npm/internal/logger"
+ "npm/internal/nginx"
+ "npm/internal/validator"
+
+ "gorm.io/gorm"
+)
+
+// GetHosts will return a list of Hosts
+// Route: GET /hosts
+func GetHosts() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hosts, err := host.List(pageInfo, middleware.GetFiltersFromContext(r), middleware.GetExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hosts)
+ }
+ }
+}
+
+// GetHost will return a single Host
+// Route: GET /hosts/{hostID}
+func GetHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := host.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // nolint: errcheck,gosec
+ item.Expand(middleware.GetExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateHost will create a Host
+// Route: POST /hosts
+func CreateHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newHost host.Model
+ err := json.Unmarshal(bodyBytes, &newHost)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newHost.UserID = userID
+
+ if err = validator.ValidateHost(newHost); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = newHost.Save(false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Host: %s", err.Error()), nil)
+ return
+ }
+
+ if newHost.UpstreamID.Uint > 0 {
+ // nolint: errcheck, gosec
+ newHost.Expand([]string{"upstream"})
+ }
+
+ configureHost(newHost)
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newHost)
+ }
+}
+
+// UpdateHost updates a host
+// Route: PUT /hosts/{hostID}
+func UpdateHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hostObject, err := host.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &hostObject)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = validator.ValidateHost(hostObject); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = hostObject.Save(false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ // nolint: errcheck,gosec
+ hostObject.Expand(middleware.GetExpandFromContext(r))
+
+ configureHost(hostObject)
+
+ h.ResultResponseJSON(w, r, http.StatusOK, hostObject)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteHost removes a host
+// Route: DELETE /hosts/{hostID}
+func DeleteHost() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := host.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ configureHost(item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// GetHostNginxConfig will return a Host's nginx config from disk
+// Route: GET /hosts/{hostID}/nginx-config
+// Route: GET /hosts/{hostID}/nginx-config.txt
+func GetHostNginxConfig(format string) func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := host.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // Get the config from disk
+ content, nErr := nginx.GetHostConfigContent(item)
+ if nErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, nErr.Error(), nil)
+ return
+ }
+ if format == "text" {
+ h.ResultResponseText(w, http.StatusOK, content)
+ return
+ }
+ h.ResultResponseJSON(w, r, http.StatusOK, content)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+func configureHost(hst host.Model) {
+ err := jobqueue.AddJob(jobqueue.Job{
+ Name: "NginxConfigureHost",
+ Action: func() error {
+ return nginx.ConfigureHost(hst)
+ },
+ })
+ if err != nil {
+ logger.Error("ConfigureHostError", err)
+ }
+}
diff --git a/backend/internal/api/handler/nginx_templates.go b/backend/internal/api/handler/nginx_templates.go
new file mode 100644
index 000000000..f5df7e06c
--- /dev/null
+++ b/backend/internal/api/handler/nginx_templates.go
@@ -0,0 +1,142 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/nginxtemplate"
+
+ "gorm.io/gorm"
+)
+
+// GetNginxTemplates will return a list of Nginx Templates
+// Route: GET /nginx-templates
+func GetNginxTemplates() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ items, err := nginxtemplate.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+ }
+}
+
+// GetNginxTemplate will return a single Nginx Template
+// Route: GET /nginx-templates/{templateID}
+func GetNginxTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID uint
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := nginxtemplate.GetByID(templateID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateNginxTemplate will create a Nginx Template
+// Route: POST /nginx-templates
+func CreateNginxTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newNginxTemplate nginxtemplate.Model
+ err := json.Unmarshal(bodyBytes, &newNginxTemplate)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newNginxTemplate.UserID = userID
+
+ if err = newNginxTemplate.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Nginx Template: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newNginxTemplate)
+ }
+}
+
+// UpdateNginxTemplate updates a nginx template
+// Route: PUT /nginx-templates/{templateID}
+func UpdateNginxTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID uint
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ // reconfigure, _ := getQueryVarBool(r, "reconfigure", false, false)
+
+ nginxTemplate, err := nginxtemplate.GetByID(templateID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &nginxTemplate)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = nginxTemplate.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, nginxTemplate)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteNginxTemplate removes a nginx template
+// Route: DELETE /nginx-templates/{templateID}
+func DeleteNginxTemplate() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var templateID uint
+ if templateID, err = getURLParamInt(r, "templateID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := nginxtemplate.GetByID(templateID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/not_allowed.go b/backend/internal/api/handler/not_allowed.go
new file mode 100644
index 000000000..966debab3
--- /dev/null
+++ b/backend/internal/api/handler/not_allowed.go
@@ -0,0 +1,14 @@
+package handler
+
+import (
+ "net/http"
+
+ h "npm/internal/api/http"
+)
+
+// NotAllowed is a json error handler for when method is not allowed
+func NotAllowed() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not allowed", nil)
+ }
+}
diff --git a/backend/internal/api/handler/not_found.go b/backend/internal/api/handler/not_found.go
new file mode 100644
index 000000000..761260423
--- /dev/null
+++ b/backend/internal/api/handler/not_found.go
@@ -0,0 +1,82 @@
+package handler
+
+import (
+ "io"
+ "io/fs"
+ "mime"
+ "net/http"
+ "path/filepath"
+ "strings"
+
+ "npm/embed"
+ h "npm/internal/api/http"
+
+ "github.com/rotisserie/eris"
+)
+
+var (
+ assetsSub fs.FS
+ errIsDir = eris.New("path is dir")
+)
+
+// NotFound is a json error handler for 404's and method not allowed.
+// It also serves the react frontend as embedded files in the golang binary.
+func NotFound() func(http.ResponseWriter, *http.Request) {
+ assetsSub, _ = fs.Sub(embed.Assets, "assets")
+
+ return func(w http.ResponseWriter, r *http.Request) {
+ defaultFile := "index.html"
+ path := strings.TrimLeft(r.URL.Path, "/")
+
+ isAPI := false
+ if len(path) >= 3 && path[0:3] == "api" {
+ isAPI = true
+ }
+
+ if path == "" {
+ path = defaultFile
+ }
+
+ err := tryRead(assetsSub, path, w)
+ if err == errIsDir {
+ err = tryRead(assetsSub, defaultFile, w)
+ if err != nil {
+ h.NotFound(w, r)
+ }
+ } else if err == nil {
+ return
+ }
+
+ // Check if the path has an extension and not in the "/api" path
+ ext := filepath.Ext(path)
+ if !isAPI && ext == "" {
+ // Not an api endpoint and Not a specific file, return the default index file
+ err := tryRead(assetsSub, defaultFile, w)
+ if err == nil {
+ return
+ }
+ }
+
+ h.NotFound(w, r)
+ }
+}
+
+func tryRead(folder fs.FS, requestedPath string, w http.ResponseWriter) error {
+ f, err := folder.Open(requestedPath)
+ if err != nil {
+ return err
+ }
+
+ // nolint: errcheck
+ defer f.Close()
+
+ stat, _ := f.Stat()
+ if stat.IsDir() {
+ return errIsDir
+ }
+
+ contentType := mime.TypeByExtension(filepath.Ext(requestedPath))
+ w.Header().Set("Content-Type", contentType)
+ _, err = io.Copy(w, f)
+ return err
+}
diff --git a/backend/internal/api/handler/oauth.go b/backend/internal/api/handler/oauth.go
new file mode 100644
index 000000000..f88c5b846
--- /dev/null
+++ b/backend/internal/api/handler/oauth.go
@@ -0,0 +1,156 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ h "npm/internal/api/http"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/setting"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ njwt "npm/internal/jwt"
+ "npm/internal/logger"
+
+ "gorm.io/gorm"
+)
+
+// getRequestIPAddress will use X-FORWARDED-FOR header if it exists
+// otherwise it will use RemoteAddr
+func getRequestIPAddress(r *http.Request) string {
+ // this Get is case insensitive
+ xff := r.Header.Get("X-FORWARDED-FOR")
+ if xff != "" {
+ ip, _, _ := strings.Cut(xff, ",")
+ return strings.TrimSpace(ip)
+ }
+ return r.RemoteAddr
+}
+
+// OAuthLogin ...
+// Route: GET /oauth/login
+func OAuthLogin() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if !setting.AuthMethodEnabled(auth.TypeOAuth) {
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not found", nil)
+ return
+ }
+
+ redirectBase, _ := getQueryVarString(r, "redirect_base", false, "")
+ url, err := auth.OAuthLogin(redirectBase, getRequestIPAddress(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, url)
+ }
+}
+
+// OAuthRedirect ...
+// Route: GET /oauth/redirect
+func OAuthRedirect() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ if !setting.AuthMethodEnabled(auth.TypeOAuth) {
+ h.ResultErrorJSON(w, r, http.StatusNotFound, "Not found", nil)
+ return
+ }
+
+ code, err := getQueryVarString(r, "code", true, "")
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ ou, err := auth.OAuthReturn(r.Context(), code, getRequestIPAddress(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if ou.Identifier == "" {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "User found, but OAuth identifier seems misconfigured", nil)
+ return
+ }
+
+ jwt, err := newTokenOAuth(ou)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ // encode jwt to json
+ j, _ := json.Marshal(jwt)
+
+ // Redirect to frontend with success
+ http.Redirect(w, r, fmt.Sprintf("/?token_response=%s", url.QueryEscape(string(j))), http.StatusSeeOther)
+ }
+}
+
+// newTokenOAuth takes a OAuthUser and creates a new token,
+// optionally creating a new user if one does not exist
+func newTokenOAuth(ou *auth.OAuthUser) (*njwt.GeneratedResponse, error) {
+ // Get OAuth settings
+ oAuthSettings, err := setting.GetOAuthSettings()
+ if err != nil {
+ logger.Error("OAuth settings not found", err)
+ return nil, err
+ }
+
+ // Get Auth by identity
+ authObj, authErr := auth.GetByIdenityType(ou.GetID(), auth.TypeOAuth)
+ if authErr == gorm.ErrRecordNotFound {
+ // Auth is not found for this identity. We can create it
+ if !oAuthSettings.AutoCreateUser {
+ // user does not have an auth record
+ // and auto create is disabled. Showing account disabled error
+ // for the time being
+ return nil, errors.ErrUserDisabled
+ }
+
+ // Attempt to find user by email
+ foundUser, err := user.GetByEmail(ou.GetEmail())
+ if err == gorm.ErrRecordNotFound {
+ // User not found, create user
+ foundUser, err = user.CreateFromOAuthUser(ou)
+ if err != nil {
+ logger.Error("user.CreateFromOAuthUser", err)
+ return nil, err
+ }
+ logger.Info("Created user from OAuth: %s, %s", ou.GetID(), foundUser.Email)
+ } else if err != nil {
+ logger.Error("user.GetByEmail", err)
+ return nil, err
+ }
+
+ // Create auth record and attach to this user
+ authObj = auth.Model{
+ UserID: foundUser.ID,
+ Type: auth.TypeOAuth,
+ Identity: ou.GetID(),
+ }
+ if err := authObj.Save(); err != nil {
+ logger.Error("auth.Save", err)
+ return nil, err
+ }
+ logger.Info("Created OAuth auth for user: %s, %s", ou.GetID(), foundUser.Email)
+ } else if authErr != nil {
+ logger.Error("auth.GetByIdenityType", err)
+ return nil, authErr
+ }
+
+ userObj, userErr := user.GetByID(authObj.UserID)
+ if userErr != nil {
+ return nil, userErr
+ }
+
+ if userObj.IsDisabled {
+ return nil, errors.ErrUserDisabled
+ }
+
+ jwt, err := njwt.Generate(&userObj, false)
+ return &jwt, err
+}
diff --git a/backend/internal/api/handler/schema.go b/backend/internal/api/handler/schema.go
new file mode 100644
index 000000000..d7a3f0cf1
--- /dev/null
+++ b/backend/internal/api/handler/schema.go
@@ -0,0 +1,111 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/fs"
+ "net/http"
+ "strings"
+
+ "npm/embed"
+ "npm/internal/api/schema"
+ "npm/internal/config"
+ "npm/internal/logger"
+
+ "github.com/jc21/jsref"
+ "github.com/jc21/jsref/provider"
+)
+
+var (
+ swaggerSchema []byte
+ apiDocsSub fs.FS
+)
+
+// Schema simply reads the swagger schema from disk and returns is raw
+// Route: GET /schema
+func Schema() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, _ *http.Request) {
+ w.Header().Set("Content-Type", "application/json; charset=utf-8")
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprint(w, string(getSchema()))
+ }
+}
+
+func getSchema() []byte {
+ if swaggerSchema == nil {
+ apiDocsSub, _ = fs.Sub(embed.APIDocFiles, "api_docs")
+
+ // nolint:gosec
+ swaggerSchema, _ = fs.ReadFile(apiDocsSub, "api.swagger.json")
+
+ // Replace {{VERSION}} with Config Version
+ swaggerSchema = []byte(strings.ReplaceAll(string(swaggerSchema), "{{VERSION}}", config.Version))
+
+ // Dereference the JSON Schema:
+ var sch any
+ if err := json.Unmarshal(swaggerSchema, &sch); err != nil {
+ logger.Error("SwaggerUnmarshalError", err)
+ return nil
+ }
+
+ prov := provider.NewIoFS(apiDocsSub, "")
+ resolver := jsref.New()
+ err := resolver.AddProvider(prov)
+ if err != nil {
+ logger.Error("SchemaProviderError", err)
+ }
+
+ result, err := resolver.Resolve(sch, "", []jsref.Option{jsref.WithRecursiveResolution(true)}...)
+ if err != nil {
+ logger.Error("SwaggerResolveError", err)
+ } else {
+ var marshalErr error
+ swaggerSchema, marshalErr = json.MarshalIndent(result, "", " ")
+ if marshalErr != nil {
+ logger.Error("SwaggerMarshalError", err)
+ }
+ }
+ // End dereference
+
+ // Replace incoming schemas with those we actually use in code
+ swaggerSchema = replaceIncomingSchemas(swaggerSchema)
+ }
+ return swaggerSchema
+}
+
+func replaceIncomingSchemas(swaggerSchema []byte) []byte {
+ str := string(swaggerSchema)
+
+ // Remember to include the double quotes in the replacement!
+ str = strings.ReplaceAll(str, `"{{schema.SetAuth}}"`, schema.SetAuth())
+ str = strings.ReplaceAll(str, `"{{schema.GetToken}}"`, schema.GetToken())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateCertificateAuthority}}"`, schema.CreateCertificateAuthority())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateCertificateAuthority}}"`, schema.UpdateCertificateAuthority())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateCertificate}}"`, schema.CreateCertificate())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateCertificate}}"`, schema.UpdateCertificate(""))
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateSetting}}"`, schema.CreateSetting())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateSetting}}"`, schema.UpdateSetting())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateUser}}"`, schema.CreateUser())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateUser}}"`, schema.UpdateUser())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateHost}}"`, schema.CreateHost())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateHost}}"`, schema.UpdateHost())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateNginxTemplate}}"`, schema.CreateNginxTemplate())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateNginxTemplate}}"`, schema.UpdateNginxTemplate())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateStream}}"`, schema.CreateStream())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateStream}}"`, schema.UpdateStream())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateDNSProvider}}"`, schema.CreateDNSProvider())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateDNSProvider}}"`, schema.UpdateDNSProvider())
+
+ str = strings.ReplaceAll(str, `"{{schema.CreateUpstream}}"`, schema.CreateUpstream())
+ str = strings.ReplaceAll(str, `"{{schema.UpdateUpstream}}"`, schema.UpdateUpstream())
+
+ return []byte(str)
+}
diff --git a/backend/internal/api/handler/settings.go b/backend/internal/api/handler/settings.go
new file mode 100644
index 000000000..b6c908909
--- /dev/null
+++ b/backend/internal/api/handler/settings.go
@@ -0,0 +1,110 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/setting"
+
+ "github.com/go-chi/chi/v5"
+ "gorm.io/gorm"
+)
+
+// GetSettings will return a list of Settings
+// Route: GET /settings
+func GetSettings() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ settings, err := setting.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, settings)
+ }
+ }
+}
+
+// GetSetting will return a single Setting
+// Route: GET /settings/{name}
+func GetSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ name := chi.URLParam(r, "name")
+
+ item, err := setting.GetByName(name)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateSetting will create a Setting
+// Route: POST /settings
+func CreateSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newSetting setting.Model
+ err := json.Unmarshal(bodyBytes, &newSetting)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Check if the setting already exists
+ if _, err := setting.GetByName(newSetting.Name); err == nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Setting with name '%s' already exists", newSetting.Name), nil)
+ return
+ }
+
+ if err = newSetting.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Setting: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newSetting)
+ }
+}
+
+// UpdateSetting updates a setting
+// Route: PUT /settings/{name}
+// TODO: Add validation for the setting value, for system settings they should be validated against the setting name and type
+func UpdateSetting() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ settingName := chi.URLParam(r, "name")
+
+ setting, err := setting.GetByName(settingName)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ if err := json.Unmarshal(bodyBytes, &setting); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err := setting.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, setting)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/sse_notification.go b/backend/internal/api/handler/sse_notification.go
new file mode 100644
index 000000000..998af5f35
--- /dev/null
+++ b/backend/internal/api/handler/sse_notification.go
@@ -0,0 +1,28 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/serverevents"
+)
+
+// TestSSENotification specifically fires of a SSE message for testing purposes
+// Route: POST /sse-notification
+func TestSSENotification() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var msg serverevents.Message
+ err := json.Unmarshal(bodyBytes, &msg)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ serverevents.Send(msg, "")
+ h.ResultResponseJSON(w, r, http.StatusOK, true)
+ }
+}
diff --git a/backend/internal/api/handler/streams.go b/backend/internal/api/handler/streams.go
new file mode 100644
index 000000000..2b097705f
--- /dev/null
+++ b/backend/internal/api/handler/streams.go
@@ -0,0 +1,140 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/stream"
+
+ "gorm.io/gorm"
+)
+
+// GetStreams will return a list of Streams
+// Route: GET /hosts/streams
+func GetStreams() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ hosts, err := stream.List(pageInfo, middleware.GetFiltersFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, hosts)
+ }
+ }
+}
+
+// GetStream will return a single Streams
+// Route: GET /hosts/streams/{hostID}
+func GetStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := stream.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateStream will create a Stream
+// Route: POST /hosts/steams
+func CreateStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newHost stream.Model
+ err := json.Unmarshal(bodyBytes, &newHost)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newHost.UserID = userID
+
+ if err = newHost.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Stream: %s", err.Error()), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newHost)
+ }
+}
+
+// UpdateStream updates a stream
+// Route: PUT /hosts/streams/{hostID}
+func UpdateStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ host, err := stream.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &host)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = host.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, host)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteStream removes a stream
+// Route: DELETE /hosts/streams/{hostID}
+func DeleteStream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var hostID uint
+ if hostID, err = getURLParamInt(r, "hostID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := stream.GetByID(hostID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
diff --git a/backend/internal/api/handler/upstreams.go b/backend/internal/api/handler/upstreams.go
new file mode 100644
index 000000000..4b1a4e785
--- /dev/null
+++ b/backend/internal/api/handler/upstreams.go
@@ -0,0 +1,214 @@
+package handler
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/host"
+ "npm/internal/entity/upstream"
+ "npm/internal/jobqueue"
+ "npm/internal/logger"
+ "npm/internal/nginx"
+ "npm/internal/validator"
+
+ "gorm.io/gorm"
+)
+
+// GetUpstreams will return a list of Upstreams
+// Route: GET /upstreams
+func GetUpstreams() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ items, err := upstream.List(pageInfo, middleware.GetFiltersFromContext(r), middleware.GetExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, items)
+ }
+ }
+}
+
+// GetUpstream will return a single Upstream
+// Route: GET /upstreams/{upstreamID}
+func GetUpstream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var upstreamID uint
+ if upstreamID, err = getURLParamInt(r, "upstreamID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := upstream.GetByID(upstreamID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // nolint: errcheck,gosec
+ item.Expand(middleware.GetExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateUpstream will create a Upstream
+// Route: POST /upstreams
+func CreateUpstream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newUpstream upstream.Model
+ err := json.Unmarshal(bodyBytes, &newUpstream)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ // Get userID from token
+ userID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ newUpstream.UserID = userID
+
+ if err = validator.ValidateUpstream(newUpstream); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = newUpstream.Save(false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, fmt.Sprintf("Unable to save Upstream: %s", err.Error()), nil)
+ return
+ }
+
+ configureUpstream(newUpstream)
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newUpstream)
+ }
+}
+
+// UpdateUpstream updates a stream
+// Route: PUT /upstreams/{upstreamID}
+func UpdateUpstream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var upstreamID uint
+ if upstreamID, err = getURLParamInt(r, "upstreamID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := upstream.GetByID(upstreamID)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &item)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = validator.ValidateUpstream(item); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ if err = item.Save(false); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ // nolint: errcheck,gosec
+ // item.Expand(middleware.GetExpandFromContext(r))
+
+ configureUpstream(item)
+
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ }
+ }
+}
+
+// DeleteUpstream removes a upstream
+// Route: DELETE /upstreams/{upstreamID}
+func DeleteUpstream() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var upstreamID uint
+ if upstreamID, err = getURLParamInt(r, "upstreamID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := upstream.GetByID(upstreamID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // Ensure that this upstream isn't in use by a host
+ cnt := host.GetUpstreamUseCount(upstreamID)
+ if cnt > 0 {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "Cannot delete upstream that is in use by at least 1 host", nil)
+ return
+ }
+ h.ResultResponseJSON(w, r, http.StatusOK, item.Delete())
+ configureUpstream(item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// GetUpstreamNginxConfig will return a Host's nginx config from disk
+// Route: GET /upstreams/{upstreamID}/nginx-config
+// Route: GET /upstreams/{upstreamID}/nginx-config.txt
+func GetUpstreamNginxConfig(format string) func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var err error
+ var upstreamID uint
+ if upstreamID, err = getURLParamInt(r, "upstreamID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ item, err := upstream.GetByID(upstreamID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // Get the config from disk
+ content, nErr := nginx.GetUpstreamConfigContent(item)
+ if nErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, nErr.Error(), nil)
+ return
+ }
+ if format == "text" {
+ h.ResultResponseText(w, http.StatusOK, content)
+ return
+ }
+ h.ResultResponseJSON(w, r, http.StatusOK, content)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+func configureUpstream(u upstream.Model) {
+ err := jobqueue.AddJob(jobqueue.Job{
+ Name: "NginxConfigureUpstream",
+ Action: func() error {
+ return nginx.ConfigureUpstream(u)
+ },
+ })
+ if err != nil {
+ logger.Error("ConfigureUpstreamError", err)
+ }
+}
diff --git a/backend/internal/api/handler/users.go b/backend/internal/api/handler/users.go
new file mode 100644
index 000000000..a15848f57
--- /dev/null
+++ b/backend/internal/api/handler/users.go
@@ -0,0 +1,333 @@
+package handler
+
+import (
+ "encoding/json"
+ "net/http"
+ "time"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/api/middleware"
+ "npm/internal/config"
+ "npm/internal/entity/auth"
+ "npm/internal/entity/user"
+ "npm/internal/errors"
+ "npm/internal/logger"
+
+ "github.com/go-chi/chi/v5"
+ "gorm.io/gorm"
+)
+
+type setAuthModel struct {
+ // The json tags are required, as the change password form decodes into this object
+ Type string `json:"type"`
+ Secret string `json:"secret"`
+ CurrentSecret string `json:"current_secret"`
+}
+
+// GetUsers returns all users
+// Route: GET /users
+func GetUsers() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pageInfo, err := getPageInfoFromRequest(r)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ users, err := user.List(pageInfo, middleware.GetFiltersFromContext(r), middleware.GetExpandFromContext(r))
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, users)
+ }
+ }
+}
+
+// GetUser returns a specific user
+// Route: GET /users/{userID}
+func GetUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ userID, _, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ item, err := user.GetByID(userID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // nolint: errcheck,gosec
+ item.Expand(middleware.GetExpandFromContext(r))
+ h.ResultResponseJSON(w, r, http.StatusOK, item)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// UpdateUser updates a user
+// Route: PUT /users/{userID}
+func UpdateUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ userID, self, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ userObject, err := user.GetByID(userID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ // nolint: errcheck,gosec
+ userObject.Expand([]string{"capabilities"})
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+ err := json.Unmarshal(bodyBytes, &userObject)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if userObject.IsDisabled && self {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "You cannot disable yourself!", nil)
+ return
+ }
+
+ if err = userObject.Save(); err != nil {
+ if err == errors.ErrDuplicateEmailUser || err == errors.ErrSystemUserReadonly {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ logger.Error("UpdateUserError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save User", nil)
+ }
+ return
+ }
+
+ if !self {
+ err = userObject.SaveCapabilities()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+ }
+
+ // nolint: errcheck,gosec
+ userObject.Expand(middleware.GetExpandFromContext(r))
+
+ h.ResultResponseJSON(w, r, http.StatusOK, userObject)
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// DeleteUser removes a user
+// Route: DELETE /users/{userID}
+func DeleteUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ var userID uint
+ var err error
+ if userID, err = getURLParamInt(r, "userID"); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ return
+ }
+
+ myUserID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+ if myUserID == userID {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "You cannot delete yourself!", nil)
+ return
+ }
+
+ item, err := user.GetByID(userID)
+ switch err {
+ case gorm.ErrRecordNotFound:
+ h.NotFound(w, r)
+ case nil:
+ if err := item.Delete(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ h.ResultResponseJSON(w, r, http.StatusOK, true)
+ }
+ default:
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+}
+
+// CreateUser creates a user
+// Route: POST /users
+func CreateUser() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newUser user.Model
+ err := json.Unmarshal(bodyBytes, &newUser)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ if err = newUser.Save(); err != nil {
+ if err == errors.ErrDuplicateEmailUser || err == errors.ErrSystemUserReadonly {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ logger.Error("UpdateUserError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save User", nil)
+ }
+ return
+ }
+
+ // Set the permissions to full-admin for this user
+ if !config.IsSetup {
+ newUser.Capabilities = []string{user.CapabilityFullAdmin}
+ }
+
+ // nolint: errcheck,gosec
+ err = newUser.SaveCapabilities()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ // newUser has been saved, now save their auth
+ if newUser.Auth.Secret != "" && newUser.Auth.ID == 0 {
+ newUser.Auth.UserID = newUser.ID
+ if newUser.Auth.Type == auth.TypeLocal {
+ err = newUser.Auth.SetPassword(newUser.Auth.Secret)
+ if err != nil {
+ logger.Error("SetPasswordError", err)
+ }
+ }
+
+ if err = newUser.Auth.Save(); err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save Authentication for User", nil)
+ return
+ }
+
+ newUser.Auth.Secret = ""
+ }
+
+ if !config.IsSetup {
+ config.IsSetup = true
+ logger.Info("A new user was created, leaving Setup Mode")
+ }
+
+ h.ResultResponseJSON(w, r, http.StatusOK, newUser)
+ }
+}
+
+// DeleteUsers is only available in debug mode for cypress tests
+// Route: DELETE /users
+func DeleteUsers() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ err := user.DeleteAll()
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ } else {
+ // also change setup to true
+ config.IsSetup = false
+ logger.Info("Users have been wiped, entering Setup Mode")
+ h.ResultResponseJSON(w, r, http.StatusOK, true)
+ }
+ }
+}
+
+func getUserIDFromRequest(r *http.Request) (uint, bool, error) {
+ userIDstr := chi.URLParam(r, "userID")
+ selfUserID, _ := r.Context().Value(c.UserIDCtxKey).(uint)
+
+ var userID uint
+ self := false
+ if userIDstr == "me" {
+ // Get user id from Token
+ userID = selfUserID
+ self = true
+ } else {
+ var userIDerr error
+ if userID, userIDerr = getURLParamInt(r, "userID"); userIDerr != nil {
+ return 0, false, userIDerr
+ }
+ self = selfUserID == userID
+ }
+ return userID, self, nil
+}
+
+// SetAuth sets a auth method. This can be used for "me" and `2` for example
+// Route: POST /users/:userID/auth
+func SetAuth() func(http.ResponseWriter, *http.Request) {
+ return func(w http.ResponseWriter, r *http.Request) {
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ var newAuth setAuthModel
+ err := json.Unmarshal(bodyBytes, &newAuth)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, h.ErrInvalidPayload.Error(), nil)
+ return
+ }
+
+ userID, isSelf, userIDErr := getUserIDFromRequest(r)
+ if userIDErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userIDErr.Error(), nil)
+ return
+ }
+
+ // Load user
+ thisUser, thisUserErr := user.GetByID(userID)
+ if thisUserErr == gorm.ErrRecordNotFound {
+ h.NotFound(w, r)
+ return
+ } else if thisUserErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, thisUserErr.Error(), nil)
+ return
+ }
+
+ if thisUser.IsSystem {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, "Cannot set password for system user", nil)
+ return
+ }
+
+ // Load existing auth for user
+ userAuth, userAuthErr := auth.GetByUserIDType(userID, newAuth.Type)
+ if userAuthErr != nil {
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, userAuthErr.Error(), nil)
+ return
+ }
+
+ if isSelf {
+ // confirm that the current_secret given is valid for the one stored in the database
+ validateErr := userAuth.ValidateSecret(newAuth.CurrentSecret)
+ if validateErr != nil {
+ logger.Debug("%s: %s", "Password change: current password was incorrect", validateErr.Error())
+ // Sleep for 1 second to prevent brute force password guessing
+ time.Sleep(time.Second)
+
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, errors.ErrCurrentPasswordInvalid.Error(), nil)
+ return
+ }
+ }
+
+ if newAuth.Type == auth.TypeLocal {
+ err := userAuth.SetPassword(newAuth.Secret)
+ if err != nil {
+ logger.Error("SetPasswordError", err)
+ h.ResultErrorJSON(w, r, http.StatusBadRequest, err.Error(), nil)
+ }
+ }
+
+ if err = userAuth.Save(); err != nil {
+ logger.Error("AuthSaveError", err)
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, "Unable to save Authentication for User", nil)
+ return
+ }
+
+ userAuth.Secret = ""
+
+ // todo: add to audit-log
+
+ h.ResultResponseJSON(w, r, http.StatusOK, userAuth)
+ }
+}
diff --git a/backend/internal/api/http/responses.go b/backend/internal/api/http/responses.go
new file mode 100644
index 000000000..f0aa25c3b
--- /dev/null
+++ b/backend/internal/api/http/responses.go
@@ -0,0 +1,114 @@
+package http
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "reflect"
+
+ c "npm/internal/api/context"
+ "npm/internal/errors"
+ "npm/internal/logger"
+
+ "github.com/qri-io/jsonschema"
+ "github.com/rotisserie/eris"
+)
+
+var (
+ // ErrInvalidPayload is an error for invalid incoming data
+ ErrInvalidPayload = eris.New("Payload is invalid")
+)
+
+// Response interface for standard API results
+type Response struct {
+ Result any `json:"result"`
+ Error any `json:"error,omitempty"`
+}
+
+// ErrorResponse interface for errors returned via the API
+type ErrorResponse struct {
+ Code any `json:"code"`
+ Message any `json:"message"`
+ Invalid any `json:"invalid,omitempty"`
+}
+
+// ResultResponseJSON will write the result as json to the http output
+func ResultResponseJSON(w http.ResponseWriter, r *http.Request, status int, result any) {
+ w.Header().Set("Content-Type", "application/json; charset=utf-8")
+ w.WriteHeader(status)
+
+ var response Response
+ resultClass := fmt.Sprintf("%v", reflect.TypeOf(result))
+
+ if resultClass == "http.ErrorResponse" {
+ response = Response{
+ Error: result,
+ }
+ } else {
+ response = Response{
+ Result: result,
+ }
+ }
+
+ var payload []byte
+ var err error
+ if getPrettyPrintFromContext(r) {
+ payload, err = json.MarshalIndent(response, "", " ")
+ } else {
+ payload, err = json.Marshal(response)
+ }
+
+ if err != nil {
+ logger.Error("ResponseMarshalError", err)
+ }
+
+ fmt.Fprint(w, string(payload))
+}
+
+// ResultSchemaErrorJSON will format the result as a standard error object and send it for output
+func ResultSchemaErrorJSON(w http.ResponseWriter, r *http.Request, errs []jsonschema.KeyError) {
+ errorResponse := ErrorResponse{
+ Code: http.StatusBadRequest,
+ Message: errors.ErrValidationFailed,
+ Invalid: errs,
+ }
+
+ ResultResponseJSON(w, r, http.StatusBadRequest, errorResponse)
+}
+
+// ResultErrorJSON will format the result as a standard error object and send it for output
+func ResultErrorJSON(w http.ResponseWriter, r *http.Request, status int, message string, extended any) {
+ errorResponse := ErrorResponse{
+ Code: status,
+ Message: message,
+ Invalid: extended,
+ }
+
+ ResultResponseJSON(w, r, status, errorResponse)
+}
+
+// NotFound will return a 404 response
+func NotFound(w http.ResponseWriter, r *http.Request) {
+ errorResponse := ErrorResponse{
+ Code: http.StatusNotFound,
+ Message: "Not found",
+ }
+
+ ResultResponseJSON(w, r, http.StatusNotFound, errorResponse)
+}
+
+// ResultResponseText will write the result as text to the http output
+func ResultResponseText(w http.ResponseWriter, status int, content string) {
+ w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+ w.WriteHeader(status)
+ fmt.Fprint(w, content)
+}
+
+// getPrettyPrintFromContext returns the PrettyPrint setting
+func getPrettyPrintFromContext(r *http.Request) bool {
+ pretty, ok := r.Context().Value(c.PrettyPrintCtxKey).(bool)
+ if !ok {
+ return false
+ }
+ return pretty
+}
diff --git a/backend/internal/api/http/responses_test.go b/backend/internal/api/http/responses_test.go
new file mode 100644
index 000000000..afcaa4ffd
--- /dev/null
+++ b/backend/internal/api/http/responses_test.go
@@ -0,0 +1,195 @@
+package http
+
+import (
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "npm/internal/entity/user"
+ "npm/internal/model"
+
+ "github.com/qri-io/jsonschema"
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestResultResponseJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ status int
+ given any
+ want string
+ }{
+ {
+ name: "simple response",
+ status: http.StatusOK,
+ given: true,
+ want: "{\"result\":true}",
+ },
+ {
+ name: "detailed response",
+ status: http.StatusBadRequest,
+ given: user.Model{
+ Base: model.Base{ID: 10},
+ Email: "me@example.com",
+ Name: "John Doe",
+ },
+ want: "{\"result\":{\"id\":10,\"created_at\":0,\"updated_at\":0,\"name\":\"John Doe\",\"email\":\"me@example.com\",\"is_disabled\":false,\"gravatar_url\":\"\"}}",
+ },
+ {
+ name: "error response",
+ status: http.StatusNotFound,
+ given: ErrorResponse{
+ Code: 404,
+ Message: "Not found",
+ Invalid: []string{"your", "page", "was", "not", "found"},
+ },
+ want: "{\"result\":null,\"error\":{\"code\":404,\"message\":\"Not found\",\"invalid\":[\"your\",\"page\",\"was\",\"not\",\"found\"]}}",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, "/anything", nil)
+ w := httptest.NewRecorder()
+ ResultResponseJSON(w, r, tt.status, tt.given)
+ res := w.Result()
+ defer res.Body.Close()
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ t.Errorf("expected error to be nil got %v", err)
+ }
+ assert.Equal(t, tt.want, string(body))
+ assert.Equal(t, tt.status, res.StatusCode)
+ assert.Equal(t, "application/json; charset=utf-8", res.Header.Get("Content-Type"))
+ })
+ }
+}
+
+func TestResultSchemaErrorJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ given []jsonschema.KeyError
+ want string
+ }{
+ {
+ name: "case a",
+ given: []jsonschema.KeyError{
+ {
+ PropertyPath: "/something",
+ InvalidValue: "name",
+ Message: "Name cannot be empty",
+ },
+ },
+ want: "{\"result\":null,\"error\":{\"code\":400,\"message\":{},\"invalid\":[{\"propertyPath\":\"/something\",\"invalidValue\":\"name\",\"message\":\"Name cannot be empty\"}]}}",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, "/anything", nil)
+ w := httptest.NewRecorder()
+ ResultSchemaErrorJSON(w, r, tt.given)
+ res := w.Result()
+ defer res.Body.Close()
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ t.Errorf("expected error to be nil got %v", err)
+ }
+ assert.Equal(t, tt.want, string(body))
+ assert.Equal(t, 400, res.StatusCode)
+ assert.Equal(t, "application/json; charset=utf-8", res.Header.Get("Content-Type"))
+ })
+ }
+}
+
+func TestResultErrorJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ status int
+ message string
+ extended any
+ want string
+ }{
+ {
+ name: "case a",
+ status: http.StatusBadGateway,
+ message: "Oh not something is not acceptable",
+ extended: nil,
+ want: "{\"result\":null,\"error\":{\"code\":502,\"message\":\"Oh not something is not acceptable\"}}",
+ },
+ {
+ name: "case b",
+ status: http.StatusNotAcceptable,
+ message: "Oh not something is not acceptable again",
+ extended: []string{"name is not allowed", "dob is wrong or something"},
+ want: "{\"result\":null,\"error\":{\"code\":406,\"message\":\"Oh not something is not acceptable again\",\"invalid\":[\"name is not allowed\",\"dob is wrong or something\"]}}",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, "/anything", nil)
+ w := httptest.NewRecorder()
+ ResultErrorJSON(w, r, tt.status, tt.message, tt.extended)
+ res := w.Result()
+ defer res.Body.Close()
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ t.Errorf("expected error to be nil got %v", err)
+ }
+ assert.Equal(t, tt.want, string(body))
+ assert.Equal(t, tt.status, res.StatusCode)
+ assert.Equal(t, "application/json; charset=utf-8", res.Header.Get("Content-Type"))
+ })
+ }
+}
+
+func TestNotFound(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ r := httptest.NewRequest(http.MethodGet, "/anything", nil)
+ w := httptest.NewRecorder()
+ NotFound(w, r)
+ res := w.Result()
+ defer res.Body.Close()
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ t.Errorf("expected error to be nil got %v", err)
+ }
+ assert.Equal(t, "{\"result\":null,\"error\":{\"code\":404,\"message\":\"Not found\"}}", string(body))
+ assert.Equal(t, http.StatusNotFound, res.StatusCode)
+ assert.Equal(t, "application/json; charset=utf-8", res.Header.Get("Content-Type"))
+ })
+}
+
+func TestResultResponseText(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("basic test", func(t *testing.T) {
+ w := httptest.NewRecorder()
+ ResultResponseText(w, http.StatusOK, "omg this works")
+ res := w.Result()
+ defer res.Body.Close()
+ body, err := io.ReadAll(res.Body)
+ if err != nil {
+ t.Errorf("expected error to be nil got %v", err)
+ }
+ assert.Equal(t, "omg this works", string(body))
+ assert.Equal(t, http.StatusOK, res.StatusCode)
+ assert.Equal(t, "text/plain; charset=utf-8", res.Header.Get("Content-Type"))
+ })
+}
diff --git a/backend/internal/api/middleware/access_control.go b/backend/internal/api/middleware/access_control.go
new file mode 100644
index 000000000..18bca31b1
--- /dev/null
+++ b/backend/internal/api/middleware/access_control.go
@@ -0,0 +1,13 @@
+package middleware
+
+import (
+ "net/http"
+)
+
+// AccessControl sets http headers for responses
+func AccessControl(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ next.ServeHTTP(w, r)
+ })
+}
diff --git a/backend/internal/api/middleware/access_control_test.go b/backend/internal/api/middleware/access_control_test.go
new file mode 100644
index 000000000..6f9b09932
--- /dev/null
+++ b/backend/internal/api/middleware/access_control_test.go
@@ -0,0 +1,29 @@
+package middleware_test
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "npm/internal/api/middleware"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestAccessControl(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ handler := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ })
+
+ rr := httptest.NewRecorder()
+ req, err := http.NewRequest("GET", "/", nil)
+ assert.Nil(t, err)
+ accessControl := middleware.AccessControl(handler)
+ accessControl.ServeHTTP(rr, req)
+ assert.Equal(t, http.StatusOK, rr.Code)
+ assert.Equal(t, "*", rr.Header().Get("Access-Control-Allow-Origin"))
+}
diff --git a/backend/internal/api/middleware/auth.go b/backend/internal/api/middleware/auth.go
new file mode 100644
index 000000000..5d71f5052
--- /dev/null
+++ b/backend/internal/api/middleware/auth.go
@@ -0,0 +1,101 @@
+package middleware
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "slices"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/config"
+ "npm/internal/entity/user"
+ njwt "npm/internal/jwt"
+ "npm/internal/logger"
+
+ "github.com/go-chi/jwtauth/v5"
+)
+
+// DecodeAuth decodes an auth header
+func DecodeAuth() func(http.Handler) http.Handler {
+ privateKey, privateKeyParseErr := njwt.GetPrivateKey()
+ if privateKeyParseErr != nil && privateKey == nil {
+ logger.Error("PrivateKeyParseError", privateKeyParseErr)
+ }
+
+ publicKey, publicKeyParseErr := njwt.GetPublicKey()
+ if publicKeyParseErr != nil && publicKey == nil {
+ logger.Error("PublicKeyParseError", publicKeyParseErr)
+ }
+
+ tokenAuth := jwtauth.New("RS256", privateKey, publicKey)
+ return jwtauth.Verify(tokenAuth, jwtauth.TokenFromHeader, jwtauth.TokenFromQuery)
+}
+
+// Enforce is a authentication middleware to enforce access from the
+// jwtauth.Verifier middleware request context values. The Authenticator sends a 401 Unauthorised
+// response for any unverified tokens and passes the good ones through.
+func Enforce(permissions ...string) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+
+ if config.IsSetup {
+ token, claims, err := jwtauth.FromContext(ctx)
+
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, err.Error(), nil)
+ return
+ }
+
+ userID := uint(claims["uid"].(float64))
+ _, enabled, _ := user.IsEnabled(userID)
+ if token == nil || !enabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "Unauthorised", nil)
+ return
+ }
+
+ // Check if permissions exist for this user
+ if len(permissions) > 0 {
+ // Since the permission that we require is not on the token, we have to get it from the DB
+ // So we don't go crazy with hits, we will use a memory cache
+ cacheKey := fmt.Sprintf("userCapabilties.%v", userID)
+ cacheItem, found := AuthCache.Get(cacheKey)
+
+ var userCapabilities []string
+ if found {
+ userCapabilities = cacheItem.([]string)
+ } else {
+ // Get from db and store it
+ userCapabilities, err = user.GetCapabilities(userID)
+ if err != nil {
+ AuthCacheSet(cacheKey, userCapabilities)
+ }
+ }
+
+ // Now check that they have the permission in their admin capabilities
+ // full-admin can do anything
+ hasOnePermission := false
+ for _, permission := range permissions {
+ if slices.Contains(userCapabilities, user.CapabilityFullAdmin) || slices.Contains(userCapabilities, permission) {
+ hasOnePermission = true
+ }
+ }
+
+ if !hasOnePermission {
+ // Access denied
+ logger.Debug("Enforce Failed: User has %v but needs %v", userCapabilities, permissions)
+ h.ResultErrorJSON(w, r, http.StatusForbidden, "Forbidden", nil)
+ return
+ }
+ }
+
+ // Add claims to context
+ ctx = context.WithValue(ctx, c.UserIDCtxKey, userID)
+ }
+
+ // Token is authenticated, continue as normal
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/auth_cache.go b/backend/internal/api/middleware/auth_cache.go
new file mode 100644
index 000000000..e0ba5a1ee
--- /dev/null
+++ b/backend/internal/api/middleware/auth_cache.go
@@ -0,0 +1,23 @@
+package middleware
+
+import (
+ "time"
+
+ "npm/internal/logger"
+
+ cache "github.com/patrickmn/go-cache"
+)
+
+// AuthCache is a cache item that stores the Admin API data for each admin that has been requesting endpoints
+var AuthCache *cache.Cache
+
+// AuthCacheInit will create a new Memory Cache
+func AuthCacheInit() {
+ logger.Debug("Creating a new AuthCache")
+ AuthCache = cache.New(1*time.Minute, 5*time.Minute)
+}
+
+// AuthCacheSet will store the item in memory for the expiration time
+func AuthCacheSet(k string, x any) {
+ AuthCache.Set(k, x, cache.DefaultExpiration)
+}
diff --git a/backend/internal/api/middleware/body_context.go b/backend/internal/api/middleware/body_context.go
new file mode 100644
index 000000000..53535b285
--- /dev/null
+++ b/backend/internal/api/middleware/body_context.go
@@ -0,0 +1,26 @@
+package middleware
+
+import (
+ "context"
+ "io"
+ "net/http"
+
+ c "npm/internal/api/context"
+)
+
+// BodyContext simply adds the body data to a context item
+func BodyContext() func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Grab the Body Data
+ var body []byte
+ if r.Body != nil {
+ body, _ = io.ReadAll(r.Body)
+ }
+ // Add it to the context
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, c.BodyCtxKey, body)
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/body_context_test.go b/backend/internal/api/middleware/body_context_test.go
new file mode 100644
index 000000000..603b0a56f
--- /dev/null
+++ b/backend/internal/api/middleware/body_context_test.go
@@ -0,0 +1,43 @@
+package middleware_test
+
+import (
+ "bytes"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ c "npm/internal/api/context"
+ "npm/internal/api/middleware"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestBodyContext(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Create a test request with a body
+ body := []byte(`{"name": "John", "age": 30}`)
+ req, err := http.NewRequest("POST", "/test", bytes.NewBuffer(body))
+ assert.Nil(t, err)
+
+ // Create a test response recorder
+ rr := httptest.NewRecorder()
+
+ // Create a test handler that checks the context for the body data
+ handler := http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) {
+ bodyData := r.Context().Value(c.BodyCtxKey).([]byte)
+ assert.Equal(t, body, bodyData)
+ })
+
+ // Wrap the handler with the BodyContext middleware
+ mw := middleware.BodyContext()(handler)
+
+ // Call the middleware with the test request and response recorder
+ mw.ServeHTTP(rr, req)
+
+ // Check that the response status code is 200
+ status := rr.Code
+ assert.Equal(t, http.StatusOK, status)
+}
diff --git a/backend/internal/api/middleware/cors.go b/backend/internal/api/middleware/cors.go
new file mode 100644
index 000000000..15489b93f
--- /dev/null
+++ b/backend/internal/api/middleware/cors.go
@@ -0,0 +1,82 @@
+package middleware
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/go-chi/chi/v5"
+)
+
+var methodMap = []string{
+ http.MethodGet,
+ http.MethodHead,
+ http.MethodPost,
+ http.MethodPut,
+ http.MethodPatch,
+ http.MethodDelete,
+ http.MethodConnect,
+ http.MethodTrace,
+}
+
+func getRouteMethods(routes chi.Router, path string) []string {
+ var methods []string
+ tctx := chi.NewRouteContext()
+ for _, method := range methodMap {
+ if routes.Match(tctx, method, path) {
+ methods = append(methods, method)
+ }
+ }
+ return methods
+}
+
+var headersAllowedByCORS = []string{
+ "Authorization",
+ "Host",
+ "Content-Type",
+ "Connection",
+ "User-Agent",
+ "Cache-Control",
+ "Accept-Encoding",
+}
+
+// Cors handles cors headers
+func Cors(routes chi.Router) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ methods := getRouteMethods(routes, r.URL.Path)
+ if len(methods) == 0 {
+ // no route no cors
+ next.ServeHTTP(w, r)
+ return
+ }
+ methods = append(methods, http.MethodOptions)
+ w.Header().Set("Access-Control-Allow-Methods", strings.Join(methods, ","))
+ w.Header().Set("Access-Control-Allow-Headers",
+ strings.Join(headersAllowedByCORS, ","),
+ )
+ next.ServeHTTP(w, r)
+ })
+ }
+}
+
+// Options handles options requests
+func Options(routes chi.Router) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ methods := getRouteMethods(routes, r.URL.Path)
+ if len(methods) == 0 {
+ // no route shouldn't have options
+ next.ServeHTTP(w, r)
+ return
+ }
+ if r.Method == http.MethodOptions {
+ w.Header().Set("Access-Control-Allow-Origin", "*")
+ w.Header().Set("Content-Type", "application/json")
+ fmt.Fprint(w, "{}")
+ return
+ }
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/cors_test.go b/backend/internal/api/middleware/cors_test.go
new file mode 100644
index 000000000..da51638c1
--- /dev/null
+++ b/backend/internal/api/middleware/cors_test.go
@@ -0,0 +1,79 @@
+package middleware_test
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "npm/internal/api/middleware"
+
+ "github.com/go-chi/chi/v5"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCors(t *testing.T) {
+ r := chi.NewRouter()
+ r.Use(middleware.Cors(r))
+
+ r.Get("/test", func(w http.ResponseWriter, _ *http.Request) {
+ w.Write([]byte("test"))
+ })
+
+ req, err := http.NewRequest("GET", "/test", nil)
+ assert.Nil(t, err)
+
+ rr := httptest.NewRecorder()
+ r.ServeHTTP(rr, req)
+
+ assert.Equal(t, "GET,OPTIONS", rr.Header().Get("Access-Control-Allow-Methods"))
+ assert.Equal(t, "Authorization,Host,Content-Type,Connection,User-Agent,Cache-Control,Accept-Encoding", rr.Header().Get("Access-Control-Allow-Headers"))
+ assert.Equal(t, "test", rr.Body.String())
+}
+
+func TestCorsNoRoute(t *testing.T) {
+ r := chi.NewRouter()
+ r.Use(middleware.Cors(r))
+
+ req, err := http.NewRequest("GET", "/test", nil)
+ assert.Nil(t, err)
+
+ rr := httptest.NewRecorder()
+ r.ServeHTTP(rr, req)
+
+ assert.Equal(t, "", rr.Header().Get("Access-Control-Allow-Methods"))
+ assert.Equal(t, "", rr.Header().Get("Access-Control-Allow-Headers"))
+}
+
+func TestOptions(t *testing.T) {
+ r := chi.NewRouter()
+ r.Use(middleware.Options(r))
+
+ r.Get("/test", func(w http.ResponseWriter, _ *http.Request) {
+ w.Write([]byte("test"))
+ })
+
+ req, err := http.NewRequest("OPTIONS", "/test", nil)
+ assert.Nil(t, err)
+
+ rr := httptest.NewRecorder()
+ r.ServeHTTP(rr, req)
+
+ assert.Equal(t, "*", rr.Header().Get("Access-Control-Allow-Origin"))
+ assert.Equal(t, "application/json", rr.Header().Get("Content-Type"))
+ assert.Equal(t, "{}", rr.Body.String())
+}
+
+func TestOptionsNoRoute(t *testing.T) {
+ r := chi.NewRouter()
+ r.Use(middleware.Options(r))
+
+ req, err := http.NewRequest("OPTIONS", "/test", nil)
+ assert.Nil(t, err)
+
+ rr := httptest.NewRecorder()
+ r.ServeHTTP(rr, req)
+
+ assert.Equal(t, "", rr.Header().Get("Access-Control-Allow-Origin"))
+ assert.Equal(t, "", rr.Header().Get("Access-Control-Allow-Methods"))
+ assert.Equal(t, "", rr.Header().Get("Access-Control-Allow-Headers"))
+}
diff --git a/backend/internal/api/middleware/enforce_setup.go b/backend/internal/api/middleware/enforce_setup.go
new file mode 100644
index 000000000..49f267c47
--- /dev/null
+++ b/backend/internal/api/middleware/enforce_setup.go
@@ -0,0 +1,23 @@
+package middleware
+
+import (
+ "net/http"
+
+ h "npm/internal/api/http"
+ "npm/internal/config"
+)
+
+// EnforceSetup will error if the config setup doesn't match what is required
+func EnforceSetup() func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if !config.IsSetup {
+ h.ResultErrorJSON(w, r, http.StatusForbidden, "Not available during setup phase", nil)
+ return
+ }
+
+ // All good
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/enforce_setup_test.go b/backend/internal/api/middleware/enforce_setup_test.go
new file mode 100644
index 000000000..dba7cf1fb
--- /dev/null
+++ b/backend/internal/api/middleware/enforce_setup_test.go
@@ -0,0 +1,50 @@
+package middleware_test
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "npm/internal/api/middleware"
+ "npm/internal/config"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestEnforceSetup(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ isSetup bool
+ expectedCode int
+ }{
+ {
+ name: "should allow request when setup is expected and is setup",
+ isSetup: true,
+ expectedCode: http.StatusOK,
+ },
+ {
+ name: "should error when setup is expected but not setup",
+ isSetup: false,
+ expectedCode: http.StatusForbidden,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ config.IsSetup = tt.isSetup
+
+ handler := middleware.EnforceSetup()(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+
+ req := httptest.NewRequest(http.MethodGet, "/", nil)
+ w := httptest.NewRecorder()
+ handler.ServeHTTP(w, req)
+ assert.Equal(t, tt.expectedCode, w.Code)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/expansion.go b/backend/internal/api/middleware/expansion.go
new file mode 100644
index 000000000..266bfe42f
--- /dev/null
+++ b/backend/internal/api/middleware/expansion.go
@@ -0,0 +1,33 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+ "strings"
+
+ c "npm/internal/api/context"
+)
+
+// Expansion will determine whether the request should have objects expanded
+// with ?expand=item,item
+func Expansion(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ expandStr := r.URL.Query().Get("expand")
+ if expandStr != "" {
+ ctx := r.Context()
+ ctx = context.WithValue(ctx, c.ExpansionCtxKey, strings.Split(expandStr, ","))
+ next.ServeHTTP(w, r.WithContext(ctx))
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+}
+
+// GetExpandFromContext returns the Expansion setting
+func GetExpandFromContext(r *http.Request) []string {
+ expand, ok := r.Context().Value(c.ExpansionCtxKey).([]string)
+ if !ok {
+ return nil
+ }
+ return expand
+}
diff --git a/backend/internal/api/middleware/expansion_test.go b/backend/internal/api/middleware/expansion_test.go
new file mode 100644
index 000000000..47904d5c0
--- /dev/null
+++ b/backend/internal/api/middleware/expansion_test.go
@@ -0,0 +1,76 @@
+package middleware_test
+
+import (
+ "context"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ c "npm/internal/api/context"
+ "npm/internal/api/middleware"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestExpansion(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("with expand query param", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/path?expand=item1,item2", nil)
+ assert.NoError(t, err)
+
+ rr := httptest.NewRecorder()
+
+ handler := http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) {
+ expand := middleware.GetExpandFromContext(r)
+ assert.Equal(t, []string{"item1", "item2"}, expand)
+ })
+
+ middleware.Expansion(handler).ServeHTTP(rr, req)
+
+ assert.Equal(t, http.StatusOK, rr.Code)
+ })
+
+ t.Run("without expand query param", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/path", nil)
+ assert.NoError(t, err)
+
+ rr := httptest.NewRecorder()
+
+ handler := http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) {
+ expand := middleware.GetExpandFromContext(r)
+ assert.Nil(t, expand)
+ })
+
+ middleware.Expansion(handler).ServeHTTP(rr, req)
+
+ assert.Equal(t, http.StatusOK, rr.Code)
+ })
+}
+
+func TestGetExpandFromContext(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Run("with context value", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/path", nil)
+ assert.NoError(t, err)
+
+ ctx := req.Context()
+ ctx = context.WithValue(ctx, c.ExpansionCtxKey, []string{"item1", "item2"})
+ req = req.WithContext(ctx)
+
+ expand := middleware.GetExpandFromContext(req)
+ assert.Equal(t, []string{"item1", "item2"}, expand)
+ })
+
+ t.Run("without context value", func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/path", nil)
+ assert.NoError(t, err)
+
+ expand := middleware.GetExpandFromContext(req)
+ assert.Nil(t, expand)
+ })
+}
diff --git a/backend/internal/api/middleware/list_query.go b/backend/internal/api/middleware/list_query.go
new file mode 100644
index 000000000..9660f70b8
--- /dev/null
+++ b/backend/internal/api/middleware/list_query.go
@@ -0,0 +1,195 @@
+package middleware
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+ "npm/internal/model"
+ "npm/internal/tags"
+ "npm/internal/util"
+
+ "github.com/qri-io/jsonschema"
+)
+
+// ListQuery will accept a pre-defined schemaData to validate against the GET query params
+// passed in to this endpoint. This will ensure that the filters are not injecting SQL
+// and the sort parameter is valid as well.
+// After we have determined what the Filters are to be, they are saved on the Context
+// to be used later in other endpoints.
+func ListQuery(obj any) func(http.Handler) http.Handler {
+ schemaData := tags.GetFilterSchema(obj)
+ filterMap := tags.GetFilterMap(obj, "")
+
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+
+ ctx, statusCode, errMsg, errors := listQueryFilters(ctx, r, schemaData)
+ if statusCode > 0 {
+ h.ResultErrorJSON(w, r, statusCode, errMsg, errors)
+ return
+ }
+
+ ctx, statusCode, errMsg = listQuerySort(ctx, r, filterMap)
+ if statusCode > 0 {
+ h.ResultErrorJSON(w, r, statusCode, errMsg, nil)
+ return
+ }
+
+ next.ServeHTTP(w, r.WithContext(ctx))
+ })
+ }
+}
+
+func listQuerySort(
+ ctx context.Context,
+ r *http.Request,
+ filterMap map[string]model.FilterMapValue,
+) (context.Context, int, string) {
+ var sortFields []model.Sort
+
+ sortString := r.URL.Query().Get("sort")
+ if sortString == "" {
+ return ctx, 0, ""
+ }
+
+ // Split sort fields up in to slice
+ sorts := strings.Split(sortString, ",")
+ for _, sortItem := range sorts {
+ if strings.Contains(sortItem, ".") {
+ theseItems := strings.Split(sortItem, ".")
+
+ switch strings.ToLower(theseItems[1]) {
+ case "desc":
+ fallthrough
+ case "descending":
+ theseItems[1] = "DESC"
+ default:
+ theseItems[1] = "ASC"
+ }
+
+ sortFields = append(sortFields, model.Sort{
+ Field: theseItems[0],
+ Direction: theseItems[1],
+ })
+ } else {
+ sortFields = append(sortFields, model.Sort{
+ Field: sortItem,
+ Direction: "ASC",
+ })
+ }
+ }
+
+ // check against filter schema
+ for _, f := range sortFields {
+ if _, exists := filterMap[f.Field]; !exists {
+ return ctx, http.StatusBadRequest, "Invalid sort field"
+ }
+ }
+
+ ctx = context.WithValue(ctx, c.SortCtxKey, sortFields)
+
+ // No problems!
+ return ctx, 0, ""
+}
+
+func listQueryFilters(
+ ctx context.Context,
+ r *http.Request,
+ schemaData string,
+) (context.Context, int, string, any) {
+ reservedFilterKeys := []string{
+ "limit",
+ "offset",
+ "sort",
+ "expand",
+ "t", // This is used as a timestamp paramater in some clients and can be ignored
+ }
+
+ var filters []model.Filter
+ for key, val := range r.URL.Query() {
+ key = strings.ToLower(key)
+
+ // Split out the modifier from the field name and set a default modifier
+ var keyParts []string
+ keyParts = strings.Split(key, ":")
+ if len(keyParts) == 1 {
+ // Default modifier
+ keyParts = append(keyParts, "equals")
+ }
+
+ // Only use this filter if it's not a reserved get param
+ if !util.SliceContainsItem(reservedFilterKeys, keyParts[0]) {
+ for _, valItem := range val {
+ // Check that the val isn't empty
+ if len(strings.TrimSpace(valItem)) > 0 {
+ valSlice := []string{valItem}
+ if keyParts[1] == "in" || keyParts[1] == "notin" {
+ valSlice = strings.Split(valItem, ",")
+ }
+
+ filters = append(filters, model.Filter{
+ Field: keyParts[0],
+ Modifier: keyParts[1],
+ Value: valSlice,
+ })
+ }
+ }
+ }
+ }
+
+ // Only validate schema if there are filters to validate
+ if len(filters) > 0 {
+ // Marshal the Filters in to a JSON string so that the Schema Validation works against it
+ filterData, marshalErr := json.MarshalIndent(filters, "", " ")
+ if marshalErr != nil {
+ return ctx, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", marshalErr), nil
+ }
+
+ // Create root schema
+ rs := &jsonschema.Schema{}
+ if err := json.Unmarshal([]byte(schemaData), rs); err != nil {
+ return ctx, http.StatusInternalServerError, fmt.Sprintf("Schema Fatal: %v", err), nil
+ }
+
+ // Validate it
+ errors, jsonError := rs.ValidateBytes(ctx, filterData)
+ if jsonError != nil {
+ return ctx, http.StatusBadRequest, jsonError.Error(), nil
+ }
+
+ if len(errors) > 0 {
+ return ctx, http.StatusBadRequest, "Invalid Filters", errors
+ }
+
+ ctx = context.WithValue(ctx, c.FiltersCtxKey, filters)
+ }
+
+ // No problems!
+ return ctx, 0, "", nil
+}
+
+// GetFiltersFromContext returns the Filters
+func GetFiltersFromContext(r *http.Request) []model.Filter {
+ filters, ok := r.Context().Value(c.FiltersCtxKey).([]model.Filter)
+ if !ok {
+ // the assertion failed
+ return nil
+ }
+ return filters
+}
+
+// GetSortFromContext returns the Sort
+func GetSortFromContext(r *http.Request) []model.Sort {
+ sorts, ok := r.Context().Value(c.SortCtxKey).([]model.Sort)
+ if !ok {
+ // the assertion failed
+ return nil
+ }
+ return sorts
+}
diff --git a/backend/internal/api/middleware/list_query_test.go b/backend/internal/api/middleware/list_query_test.go
new file mode 100644
index 000000000..3563c1fe1
--- /dev/null
+++ b/backend/internal/api/middleware/list_query_test.go
@@ -0,0 +1,99 @@
+package middleware_test
+
+import (
+ "context"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ c "npm/internal/api/context"
+ "npm/internal/api/middleware"
+ "npm/internal/entity/user"
+ "npm/internal/model"
+ "npm/internal/tags"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestListQuery(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ queryParams string
+ expectedStatus int
+ }{
+ {
+ name: "valid query params",
+ queryParams: "?name:contains=John&sort=name.desc",
+ expectedStatus: http.StatusOK,
+ },
+ {
+ name: "invalid sort field",
+ queryParams: "?name:contains=John&sort=invalid_field",
+ expectedStatus: http.StatusBadRequest,
+ },
+ {
+ name: "invalid filter value",
+ queryParams: "?name=123",
+ expectedStatus: http.StatusOK,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ req, err := http.NewRequest("GET", "/test"+tt.queryParams, nil)
+ assert.NoError(t, err)
+
+ testObj := user.Model{}
+
+ ctx := context.Background()
+ ctx = context.WithValue(ctx, c.FiltersCtxKey, tags.GetFilterSchema(testObj))
+
+ rr := httptest.NewRecorder()
+ handler := middleware.ListQuery(testObj)(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
+ w.WriteHeader(http.StatusOK)
+ }))
+
+ handler.ServeHTTP(rr, req.WithContext(ctx))
+
+ assert.Equal(t, tt.expectedStatus, rr.Code)
+ })
+ }
+}
+
+func TestGetFiltersFromContext(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ req, err := http.NewRequest("GET", "/test", nil)
+ assert.NoError(t, err)
+
+ filters := []model.Filter{
+ {Field: "name", Modifier: "contains", Value: []string{"test"}},
+ }
+ ctx := context.WithValue(req.Context(), c.FiltersCtxKey, filters)
+ req = req.WithContext(ctx)
+
+ result := middleware.GetFiltersFromContext(req)
+ assert.Equal(t, filters, result)
+}
+
+func TestGetSortFromContext(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ req, err := http.NewRequest("GET", "/test", nil)
+ assert.NoError(t, err)
+
+ sorts := []model.Sort{
+ {Field: "name", Direction: "asc"},
+ }
+ ctx := context.WithValue(req.Context(), c.SortCtxKey, sorts)
+ req = req.WithContext(ctx)
+
+ result := middleware.GetSortFromContext(req)
+ assert.Equal(t, sorts, result)
+}
diff --git a/backend/internal/api/middleware/log.go b/backend/internal/api/middleware/log.go
new file mode 100644
index 000000000..12e80dceb
--- /dev/null
+++ b/backend/internal/api/middleware/log.go
@@ -0,0 +1,16 @@
+package middleware
+
+import (
+ "net/http"
+
+ "npm/internal/logger"
+)
+
+// Log will print out route information to the logger
+// only when debug is enabled
+func Log(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ logger.Debug("Request: %s %s", r.Method, r.URL.Path)
+ next.ServeHTTP(w, r)
+ })
+}
diff --git a/backend/internal/api/middleware/pretty_print.go b/backend/internal/api/middleware/pretty_print.go
new file mode 100644
index 000000000..7baa14a26
--- /dev/null
+++ b/backend/internal/api/middleware/pretty_print.go
@@ -0,0 +1,22 @@
+package middleware
+
+import (
+ "context"
+ "net/http"
+
+ c "npm/internal/api/context"
+)
+
+// PrettyPrint will determine whether the request should be pretty printed in output
+// with ?pretty=1 or ?pretty=true
+func PrettyPrint(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ prettyStr := r.URL.Query().Get("pretty")
+ if prettyStr == "1" || prettyStr == "true" {
+ ctx := context.WithValue(r.Context(), c.PrettyPrintCtxKey, true)
+ next.ServeHTTP(w, r.WithContext(ctx))
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+}
diff --git a/backend/internal/api/middleware/schema.go b/backend/internal/api/middleware/schema.go
new file mode 100644
index 000000000..8f09e580f
--- /dev/null
+++ b/backend/internal/api/middleware/schema.go
@@ -0,0 +1,54 @@
+package middleware
+
+import (
+ "context"
+ "encoding/json"
+ "net/http"
+
+ c "npm/internal/api/context"
+ h "npm/internal/api/http"
+
+ "github.com/qri-io/jsonschema"
+ "github.com/rotisserie/eris"
+)
+
+// CheckRequestSchema checks the payload against schema
+func CheckRequestSchema(ctx context.Context, schemaData string, payload []byte) ([]jsonschema.KeyError, error) {
+ // Create root schema
+ rs := &jsonschema.Schema{}
+ if err := json.Unmarshal([]byte(schemaData), rs); err != nil {
+ return nil, eris.Wrapf(err, "Schema Fatal: %v", err)
+ }
+
+ // Validate it
+ schemaErrors, jsonError := rs.ValidateBytes(ctx, payload)
+ if jsonError != nil {
+ return nil, jsonError
+ }
+
+ return schemaErrors, nil
+}
+
+// EnforceRequestSchema accepts a schema and validates the request body against it
+func EnforceRequestSchema(schemaData string) func(http.Handler) http.Handler {
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Get content from context
+ bodyBytes, _ := r.Context().Value(c.BodyCtxKey).([]byte)
+
+ schemaErrors, err := CheckRequestSchema(r.Context(), schemaData, bodyBytes)
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusInternalServerError, err.Error(), nil)
+ return
+ }
+
+ if len(schemaErrors) > 0 {
+ h.ResultSchemaErrorJSON(w, r, schemaErrors)
+ return
+ }
+
+ // All good
+ next.ServeHTTP(w, r)
+ })
+ }
+}
diff --git a/backend/internal/api/middleware/sse_auth.go b/backend/internal/api/middleware/sse_auth.go
new file mode 100644
index 000000000..9b4a508ec
--- /dev/null
+++ b/backend/internal/api/middleware/sse_auth.go
@@ -0,0 +1,50 @@
+package middleware
+
+import (
+ "net/http"
+
+ h "npm/internal/api/http"
+ "npm/internal/entity/user"
+
+ "github.com/go-chi/jwtauth/v5"
+)
+
+// SSEAuth will validate that the jwt token provided to get this far is a SSE token
+// and that the user is enabled
+func SSEAuth(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ ctx := r.Context()
+ token, claims, err := jwtauth.FromContext(ctx)
+
+ if err != nil {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, err.Error(), nil)
+ return
+ }
+
+ if token == nil {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "No token given", nil)
+ return
+ }
+
+ if claims == nil {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "Unauthorised", nil)
+ return
+ }
+
+ userID := uint(claims["uid"].(float64))
+ _, enabled, _ := user.IsEnabled(userID)
+ if !enabled {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "Unauthorised", nil)
+ return
+ }
+
+ iss, _ := token.Get("iss")
+ if iss != "sse" {
+ h.ResultErrorJSON(w, r, http.StatusUnauthorized, "Unauthorised", nil)
+ return
+ }
+
+ // Should be all good now
+ next.ServeHTTP(w, r)
+ })
+}
diff --git a/backend/internal/api/router.go b/backend/internal/api/router.go
new file mode 100644
index 000000000..8928d59d5
--- /dev/null
+++ b/backend/internal/api/router.go
@@ -0,0 +1,375 @@
+package api
+
+import (
+ "net/http"
+ "time"
+
+ "npm/internal/api/handler"
+ "npm/internal/api/middleware"
+ "npm/internal/api/schema"
+ "npm/internal/config"
+ "npm/internal/entity/accesslist"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/entity/host"
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/setting"
+ "npm/internal/entity/stream"
+ "npm/internal/entity/upstream"
+ "npm/internal/entity/user"
+ "npm/internal/logger"
+ "npm/internal/serverevents"
+
+ "github.com/go-chi/chi/v5"
+ chiMiddleware "github.com/go-chi/chi/v5/middleware"
+ "github.com/go-chi/cors"
+)
+
+// NewRouter returns a new router object
+func NewRouter() http.Handler {
+ // Cors
+ corss := cors.New(cors.Options{
+ AllowedOrigins: []string{"*"},
+ AllowedMethods: []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"},
+ AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "X-Requested-With"},
+ AllowCredentials: true,
+ MaxAge: 300,
+ })
+
+ r := chi.NewRouter()
+ r.Use(
+ middleware.AccessControl,
+ middleware.Cors(r),
+ middleware.Options(r),
+ corss.Handler,
+ chiMiddleware.RealIP,
+ chiMiddleware.Recoverer,
+ chiMiddleware.Throttle(5),
+ middleware.PrettyPrint,
+ middleware.Expansion,
+ middleware.DecodeAuth(),
+ middleware.BodyContext(),
+ middleware.Log,
+ )
+
+ return applyRoutes(r)
+}
+
+// applyRoutes is where the magic happens
+func applyRoutes(r chi.Router) chi.Router {
+ middleware.AuthCacheInit()
+ r.NotFound(handler.NotFound())
+ r.MethodNotAllowed(handler.NotAllowed())
+
+ // OAuth endpoints aren't technically API endpoints
+ r.With(middleware.EnforceSetup()).Route("/oauth", func(r chi.Router) {
+ r.Get("/login", handler.OAuthLogin())
+ r.Get("/redirect", handler.OAuthRedirect())
+ })
+
+ // SSE - requires a sse token as the `jwt` get parameter
+ // Exists inside /api but it's here so that we can skip the Timeout middleware
+ // that applies to other endpoints.
+ r.With(middleware.EnforceSetup(), middleware.SSEAuth).
+ Mount("/api/sse", serverevents.Get())
+
+ // API
+ r.With(chiMiddleware.Timeout(30*time.Second)).Route("/api", func(r chi.Router) {
+ r.Get("/", handler.Health())
+ r.Get("/schema", handler.Schema())
+ r.With(middleware.EnforceSetup(), middleware.Enforce()).
+ Get("/config", handler.Config())
+
+ // Auth
+ r.With(middleware.EnforceSetup()).Route("/auth", func(r chi.Router) {
+ r.Get("/", handler.GetAuthConfig())
+ r.With(middleware.EnforceRequestSchema(schema.GetToken())).
+ Post("/", handler.NewToken())
+ r.With(middleware.Enforce()).
+ Post("/refresh", handler.RefreshToken())
+ r.With(middleware.Enforce()).
+ Post("/sse", handler.NewSSEToken())
+ })
+
+ // Users
+ r.Route("/users", func(r chi.Router) {
+ // Create - can be done in Setup stage as well
+ r.With(
+ middleware.Enforce(user.CapabilityUsersManage),
+ middleware.EnforceRequestSchema(schema.CreateUser()),
+ ).Post("/", handler.CreateUser())
+
+ // Requires Setup stage to be completed
+ r.With(middleware.EnforceSetup()).Route("/", func(r chi.Router) {
+ // Get yourself, requires a login but no other permissions
+ r.With(middleware.Enforce()).
+ Get("/{userID:me}", handler.GetUser())
+
+ // Update yourself, requires a login but no other permissions
+ r.With(
+ middleware.Enforce(),
+ middleware.EnforceRequestSchema(schema.UpdateUser()),
+ ).Put("/{userID:me}", handler.UpdateUser())
+
+ r.With(middleware.Enforce(user.CapabilityUsersManage)).Route("/", func(r chi.Router) {
+ // List
+ r.With(middleware.ListQuery(user.Model{})).Get("/", handler.GetUsers())
+
+ // Specific Item
+ r.Get("/{userID:[0-9]+}", handler.GetUser())
+ r.Delete("/{userID:([0-9]+|me)}", handler.DeleteUser())
+
+ // Update another user
+ r.With(middleware.EnforceRequestSchema(schema.UpdateUser())).
+ Put("/{userID:[0-9]+}", handler.UpdateUser())
+ })
+
+ // Auth - sets passwords
+ r.With(
+ middleware.Enforce(),
+ middleware.EnforceRequestSchema(schema.SetAuth()),
+ ).Post("/{userID:me}/auth", handler.SetAuth())
+ r.With(
+ middleware.Enforce(user.CapabilityUsersManage),
+ middleware.EnforceRequestSchema(schema.SetAuth()),
+ ).Post("/{userID:[0-9]+}/auth", handler.SetAuth())
+ })
+ })
+
+ // Only available in debug mode
+ if config.GetLogLevel() == logger.DebugLevel {
+ // delete users without auth
+ r.Delete("/users", handler.DeleteUsers())
+ // SSE test endpoints
+ r.Post("/sse-notification", handler.TestSSENotification())
+ }
+
+ // Settings
+ r.With(middleware.EnforceSetup(), middleware.Enforce(user.CapabilitySettingsManage)).Route("/settings", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.ListQuery(setting.Model{}),
+ ).Get("/", handler.GetSettings())
+
+ r.Get("/{name}", handler.GetSetting())
+ r.With(middleware.EnforceRequestSchema(schema.CreateSetting())).
+ Post("/", handler.CreateSetting())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateSetting())).
+ Put("/{name}", handler.UpdateSetting())
+ })
+
+ // Access Lists
+ r.With(middleware.EnforceSetup()).Route("/access-lists", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityAccessListsView),
+ middleware.ListQuery(accesslist.Model{}),
+ ).Get("/", handler.GetAccessLists())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityAccessListsManage), middleware.EnforceRequestSchema(schema.CreateAccessList())).
+ Post("/", handler.CreateAccessList())
+
+ // Specific Item
+ r.Route("/{accessListID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityAccessListsView)).
+ Get("/", handler.GetAccessList())
+ r.With(middleware.Enforce(user.CapabilityAccessListsManage)).Route("/", func(r chi.Router) {
+ r.Delete("/{accessListID:[0-9]+}", handler.DeleteAccessList())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateAccessList())).
+ Put("/{accessListID:[0-9]+}", handler.UpdateAccessList())
+ })
+ })
+ })
+
+ // DNS Providers
+ r.With(middleware.EnforceSetup()).Route("/dns-providers", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityDNSProvidersView),
+ middleware.ListQuery(dnsprovider.Model{}),
+ ).Get("/", handler.GetDNSProviders())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersManage), middleware.EnforceRequestSchema(schema.CreateDNSProvider())).
+ Post("/", handler.CreateDNSProvider())
+
+ // Specific Item
+ r.Route("/{providerID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersView)).
+ Get("/{providerID:[0-9]+}", handler.GetDNSProvider())
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteDNSProvider())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateDNSProvider())).
+ Put("/{providerID:[0-9]+}", handler.UpdateDNSProvider())
+ })
+ })
+
+ // List Acme DNS Providers
+ r.With(middleware.Enforce(user.CapabilityDNSProvidersView)).Route("/acmesh", func(r chi.Router) {
+ r.Get("/{acmeshID:[a-z0-9_]+}", handler.GetAcmeshProvider())
+ r.Get("/", handler.GetAcmeshProviders())
+ })
+ })
+
+ // Certificate Authorities
+ r.With(middleware.EnforceSetup()).Route("/certificate-authorities", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityCertificateAuthoritiesView),
+ middleware.ListQuery(certificateauthority.Model{}),
+ ).Get("/", handler.GetCertificateAuthorities())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage), middleware.EnforceRequestSchema(schema.CreateCertificateAuthority())).
+ Post("/", handler.CreateCertificateAuthority())
+
+ // Specific Item
+ r.Route("/{caID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesView)).
+ Get("/", handler.GetCertificateAuthority())
+
+ r.With(middleware.EnforceRequestSchema(schema.UpdateCertificateAuthority())).
+ Put("/", handler.UpdateCertificateAuthority())
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage)).
+ Delete("/", handler.DeleteCertificateAuthority())
+
+ r.With(middleware.Enforce(user.CapabilityCertificateAuthoritiesManage)).Route("/", func(r chi.Router) {
+ r.Delete("/{caID:[0-9]+}", handler.DeleteCertificateAuthority())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateCertificateAuthority())).
+ Put("/{caID:[0-9]+}", handler.UpdateCertificateAuthority())
+ })
+ })
+ })
+
+ // Certificates
+ r.With(middleware.EnforceSetup()).Route("/certificates", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityCertificatesView),
+ middleware.ListQuery(certificate.Model{}),
+ ).Get("/", handler.GetCertificates())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityCertificatesManage), middleware.EnforceRequestSchema(schema.CreateCertificate())).
+ Post("/", handler.CreateCertificate())
+
+ // Specific Item
+ r.Route("/{certificateID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityCertificatesView)).
+ Get("/", handler.GetCertificate())
+ r.With(middleware.Enforce(user.CapabilityCertificatesManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteCertificate())
+ r.Put("/", handler.UpdateCertificate())
+ // r.With(middleware.EnforceRequestSchema(schema.UpdateCertificate())).
+ // Put("/", handler.UpdateCertificate())
+ r.Post("/renew", handler.RenewCertificate())
+ r.Get("/download", handler.DownloadCertificate())
+ })
+ })
+ })
+
+ // Hosts
+ r.With(middleware.EnforceSetup()).Route("/hosts", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityHostsView),
+ middleware.ListQuery(host.Model{}),
+ ).Get("/", handler.GetHosts())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityHostsManage), middleware.EnforceRequestSchema(schema.CreateHost())).
+ Post("/", handler.CreateHost())
+
+ // Specific Item
+ r.Route("/{hostID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityHostsView)).
+ Get("/", handler.GetHost())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteHost())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateHost())).
+ Put("/", handler.UpdateHost())
+ r.Get("/nginx-config", handler.GetHostNginxConfig("json"))
+ r.Get("/nginx-config.txt", handler.GetHostNginxConfig("text"))
+ })
+ })
+ })
+
+ // Nginx Templates
+ r.With(middleware.EnforceSetup()).Route("/nginx-templates", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityNginxTemplatesView),
+ middleware.ListQuery(nginxtemplate.Model{}),
+ ).Get("/", handler.GetNginxTemplates())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityNginxTemplatesManage), middleware.EnforceRequestSchema(schema.CreateNginxTemplate())).
+ Post("/", handler.CreateNginxTemplate())
+
+ // Specific Item
+ r.Route("/{templateID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityNginxTemplatesView)).
+ Get("/", handler.GetNginxTemplates())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteNginxTemplate())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateNginxTemplate())).
+ Put("/", handler.UpdateNginxTemplate())
+ })
+ })
+ })
+
+ // Streams
+ r.With(middleware.EnforceSetup()).Route("/streams", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityStreamsView),
+ middleware.ListQuery(stream.Model{}),
+ ).Get("/", handler.GetStreams())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityStreamsManage), middleware.EnforceRequestSchema(schema.CreateStream())).
+ Post("/", handler.CreateStream())
+
+ // Specific Item
+ r.Route("/{hostID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityStreamsView)).
+ Get("/", handler.GetStream())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteStream())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateStream())).
+ Put("/", handler.UpdateStream())
+ })
+ })
+ })
+
+ // Upstreams
+ r.With(middleware.EnforceSetup()).Route("/upstreams", func(r chi.Router) {
+ // List
+ r.With(
+ middleware.Enforce(user.CapabilityHostsView),
+ middleware.ListQuery(upstream.Model{}),
+ ).Get("/", handler.GetUpstreams())
+
+ // Create
+ r.With(middleware.Enforce(user.CapabilityHostsManage), middleware.EnforceRequestSchema(schema.CreateUpstream())).
+ Post("/", handler.CreateUpstream())
+
+ // Specific Item
+ r.Route("/{upstreamID:[0-9]+}", func(r chi.Router) {
+ r.With(middleware.Enforce(user.CapabilityHostsView)).
+ Get("/", handler.GetUpstream())
+ r.With(middleware.Enforce(user.CapabilityHostsManage)).Route("/", func(r chi.Router) {
+ r.Delete("/", handler.DeleteUpstream())
+ r.With(middleware.EnforceRequestSchema(schema.UpdateUpstream())).
+ Put("/", handler.UpdateUpstream())
+ r.Get("/nginx-config", handler.GetUpstreamNginxConfig("json"))
+ r.Get("/nginx-config.txt", handler.GetUpstreamNginxConfig("text"))
+ })
+ })
+ })
+ })
+
+ return r
+}
diff --git a/backend/internal/api/router_test.go b/backend/internal/api/router_test.go
new file mode 100644
index 000000000..22ac857fc
--- /dev/null
+++ b/backend/internal/api/router_test.go
@@ -0,0 +1,56 @@
+package api
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "testing"
+
+ "npm/internal/config"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+var (
+ r = NewRouter()
+ version = "3.0.0"
+ commit = "abcdefgh"
+)
+
+// Tear up/down
+func TestMain(m *testing.M) {
+ config.Init(&version, &commit)
+ code := m.Run()
+ os.Exit(code)
+}
+
+func TestGetHealthz(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t,
+ goleak.IgnoreAnyFunction("github.com/patrickmn/go-cache.(*janitor).Run"),
+ goleak.IgnoreAnyFunction("github.com/jc21/go-sse.(*Server).dispatch"),
+ )
+
+ respRec := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/api/", nil)
+
+ r.ServeHTTP(respRec, req)
+ assert.Equal(t, http.StatusOK, respRec.Code)
+ assert.Contains(t, respRec.Body.String(), "healthy")
+}
+
+func TestNonExistent(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t,
+ goleak.IgnoreAnyFunction("github.com/patrickmn/go-cache.(*janitor).Run"),
+ goleak.IgnoreAnyFunction("github.com/jc21/go-sse.(*Server).dispatch"),
+ )
+
+ respRec := httptest.NewRecorder()
+ req, _ := http.NewRequest("GET", "/non-existent-endpoint.jpg", nil)
+
+ r.ServeHTTP(respRec, req)
+ assert.Equal(t, http.StatusNotFound, respRec.Code)
+ assert.Equal(t, respRec.Body.String(), `{"result":null,"error":{"code":404,"message":"Not found"}}`, "404 Message should match")
+}
diff --git a/backend/internal/api/schema/certificates.go b/backend/internal/api/schema/certificates.go
new file mode 100644
index 000000000..780b02777
--- /dev/null
+++ b/backend/internal/api/schema/certificates.go
@@ -0,0 +1,205 @@
+package schema
+
+import (
+ "fmt"
+
+ "npm/internal/entity/certificate"
+)
+
+// This validation is strictly for Custom certificates
+// and the combination of values that must be defined
+func createCertificateCustom() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, strictString("custom"), stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for HTTP certificates
+// and the combination of values that must be defined
+func createCertificateHTTP() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "certificate_authority_id",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "certificate_authority_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ },
+ "is_ecc": {
+ "type": "boolean"
+ }
+ }
+ }`, strictString("http"), intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for DNS certificates
+// and the combination of values that must be defined
+func createCertificateDNS() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "certificate_authority_id",
+ "dns_provider_id",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "certificate_authority_id": %s,
+ "dns_provider_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ },
+ "is_ecc": {
+ "type": "boolean"
+ }
+ }
+ }`, strictString("dns"), intMinOne, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+// This validation is strictly for MKCERT certificates
+// and the combination of values that must be defined
+func createCertificateMkcert() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "type": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, strictString("mkcert"), stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateHTTP() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "certificate_authority_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateDNS() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "certificate_authority_id": %s,
+ "dns_provider_id": %s,
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, intMinOne, intMinOne, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateCustom() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, stringMinMax(1, 100), domainNames())
+}
+
+func updateCertificateMkcert() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "domain_names": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }`, stringMinMax(1, 100), domainNames())
+}
+
+// CreateCertificate is the schema for incoming data validation
+func CreateCertificate() string {
+ return fmt.Sprintf(`
+ {
+ "oneOf": [%s, %s, %s, %s]
+ }`, createCertificateHTTP(), createCertificateDNS(), createCertificateCustom(), createCertificateMkcert())
+}
+
+// UpdateCertificate is the schema for incoming data validation
+func UpdateCertificate(certificateType string) string {
+ switch certificateType {
+ case certificate.TypeHTTP:
+ return updateCertificateHTTP()
+ case certificate.TypeDNS:
+ return updateCertificateDNS()
+ case certificate.TypeCustom:
+ return updateCertificateCustom()
+ case certificate.TypeMkcert:
+ return updateCertificateMkcert()
+ default:
+ return fmt.Sprintf(`
+ {
+ "oneOf": [%s, %s, %s, %s]
+ }`, updateCertificateHTTP(), updateCertificateDNS(), updateCertificateCustom(), updateCertificateMkcert())
+ }
+}
diff --git a/backend/internal/api/schema/common.go b/backend/internal/api/schema/common.go
new file mode 100644
index 000000000..af02f8eab
--- /dev/null
+++ b/backend/internal/api/schema/common.go
@@ -0,0 +1,73 @@
+package schema
+
+import "fmt"
+
+func strictString(value string) string {
+ return fmt.Sprintf(`{
+ "type": "string",
+ "pattern": "^%s$"
+ }`, value)
+}
+
+const intMinOne = `
+{
+ "type": "integer",
+ "minimum": 1
+}
+`
+
+const boolean = `
+{
+ "type": "boolean"
+}
+`
+
+func stringMinMax(minLength, maxLength int) string {
+ return fmt.Sprintf(`{
+ "type": "string",
+ "minLength": %d,
+ "maxLength": %d
+ }`, minLength, maxLength)
+}
+
+func capabilties() string {
+ return `{
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "string",
+ "minLength": 1
+ }
+ }`
+}
+
+func domainNames() string {
+ return fmt.Sprintf(`
+ {
+ "type": "array",
+ "minItems": 1,
+ "items": %s
+ }`, stringMinMax(4, 255))
+}
+
+const anyType = `
+{
+ "anyOf": [
+ {
+ "type": "array"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "object"
+ },
+ {
+ "type": "integer"
+ },
+ {
+ "type": "string"
+ }
+ ]
+}
+`
diff --git a/backend/internal/api/schema/create_access_list.go b/backend/internal/api/schema/create_access_list.go
new file mode 100644
index 000000000..47ded39df
--- /dev/null
+++ b/backend/internal/api/schema/create_access_list.go
@@ -0,0 +1,21 @@
+package schema
+
+import (
+ "fmt"
+)
+
+// CreateAccessList is the schema for incoming data validation
+func CreateAccessList() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": %s
+ }
+ }
+ `, stringMinMax(2, 100))
+}
diff --git a/backend/internal/api/schema/create_certificate_authority.go b/backend/internal/api/schema/create_certificate_authority.go
new file mode 100644
index 000000000..113c2ce36
--- /dev/null
+++ b/backend/internal/api/schema/create_certificate_authority.go
@@ -0,0 +1,25 @@
+package schema
+
+import "fmt"
+
+// CreateCertificateAuthority is the schema for incoming data validation
+func CreateCertificateAuthority() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "acmesh_server",
+ "max_domains"
+ ],
+ "properties": {
+ "name": %s,
+ "acmesh_server": %s,
+ "max_domains": %s,
+ "ca_bundle": %s,
+ "is_wildcard_supported": %s
+ }
+ }
+ `, stringMinMax(1, 100), stringMinMax(2, 255), intMinOne, stringMinMax(2, 255), boolean)
+}
diff --git a/backend/internal/api/schema/create_dns_provider.go b/backend/internal/api/schema/create_dns_provider.go
new file mode 100644
index 000000000..d51f2dcc0
--- /dev/null
+++ b/backend/internal/api/schema/create_dns_provider.go
@@ -0,0 +1,59 @@
+package schema
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/dnsproviders"
+ "npm/internal/logger"
+ "npm/internal/util"
+
+ "github.com/rotisserie/eris"
+)
+
+// CreateDNSProvider is the schema for incoming data validation
+func CreateDNSProvider() string {
+ allProviders := dnsproviders.GetAll()
+ fmtStr := fmt.Sprintf(`{"oneOf": [%s]}`, strings.TrimRight(strings.Repeat("\n%s,", len(allProviders)), ","))
+
+ allSchemasWrapped := make([]string, 0)
+ for providerName, provider := range allProviders {
+ schema, err := provider.GetJSONSchema()
+ if err != nil {
+ logger.Error("ProviderSchemaError", eris.Wrapf(err, "Invalid Provider Schema for %s: %v", provider.Title, err))
+ } else {
+ allSchemasWrapped = append(allSchemasWrapped, createDNSProviderType(providerName, schema))
+ }
+ }
+
+ return fmt.Sprintf(fmtStr, util.ConvertStringSliceToInterface(allSchemasWrapped)...)
+}
+
+func createDNSProviderType(name, metaSchema string) string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "acmesh_name",
+ "name",
+ "meta"
+ ],
+ "properties": {
+ "acmesh_name": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 100
+ },
+ "dns_sleep": {
+ "type": "integer"
+ },
+ "meta": %s
+ }
+ }
+ `, name, metaSchema)
+}
diff --git a/backend/internal/api/schema/create_host.go b/backend/internal/api/schema/create_host.go
new file mode 100644
index 000000000..819d97643
--- /dev/null
+++ b/backend/internal/api/schema/create_host.go
@@ -0,0 +1,88 @@
+package schema
+
+import "fmt"
+
+// CreateHost is the schema for incoming data validation
+// This schema supports 3 possible types with different data combinations:
+// - proxy
+// - redirection
+// - dead
+func CreateHost() string {
+ return fmt.Sprintf(`
+ {
+ "oneOf": [
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "domain_names",
+ "nginx_template_id",
+ "proxy_scheme"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^proxy$"
+ },
+ "nginx_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "listen_interface": %s,
+ "domain_names": %s,
+ "upstream_id": {
+ "type": "integer"
+ },
+ "proxy_scheme": {
+ "type": "string",
+ "pattern": "^https?$"
+ },
+ "proxy_host": {
+ "type": "string"
+ },
+ "proxy_port": {
+ "type": "integer"
+ },
+ "certificate_id": {
+ "type": "integer"
+ },
+ "access_list_id": {
+ "type": "integer"
+ },
+ "ssl_forced": {
+ "type": "boolean"
+ },
+ "caching_enabled": {
+ "type": "boolean"
+ },
+ "block_exploits": {
+ "type": "boolean"
+ },
+ "allow_websocket_upgrade": {
+ "type": "boolean"
+ },
+ "http2_support": {
+ "type": "boolean"
+ },
+ "hsts_enabled": {
+ "type": "boolean"
+ },
+ "hsts_subdomains": {
+ "type": "boolean"
+ },
+ "paths": {
+ "type": "string"
+ },
+ "advanced_config": {
+ "type": "string"
+ },
+ "is_disabled": {
+ "type": "boolean"
+ }
+ }
+ }
+ ]
+ }
+ `, stringMinMax(0, 255), domainNames())
+}
diff --git a/backend/internal/api/schema/create_nginx_template.go b/backend/internal/api/schema/create_nginx_template.go
new file mode 100644
index 000000000..7fe3fc6f8
--- /dev/null
+++ b/backend/internal/api/schema/create_nginx_template.go
@@ -0,0 +1,30 @@
+package schema
+
+// CreateNginxTemplate is the schema for incoming data validation
+func CreateNginxTemplate() string {
+ return `
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "type",
+ "template"
+ ],
+ "properties": {
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "type": {
+ "type": "string",
+ "pattern": "^proxy|redirect|dead|stream|upstream$"
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+ }
+ `
+}
diff --git a/backend/internal/api/schema/create_setting.go b/backend/internal/api/schema/create_setting.go
new file mode 100644
index 000000000..dca3869c0
--- /dev/null
+++ b/backend/internal/api/schema/create_setting.go
@@ -0,0 +1,21 @@
+package schema
+
+import "fmt"
+
+// CreateSetting is the schema for incoming data validation
+func CreateSetting() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "value"
+ ],
+ "properties": {
+ "name": %s,
+ "value": %s
+ }
+ }
+ `, stringMinMax(2, 100), anyType)
+}
diff --git a/backend/internal/api/schema/create_stream.go b/backend/internal/api/schema/create_stream.go
new file mode 100644
index 000000000..792b8818e
--- /dev/null
+++ b/backend/internal/api/schema/create_stream.go
@@ -0,0 +1,27 @@
+package schema
+
+import "fmt"
+
+// CreateStream is the schema for incoming data validation
+func CreateStream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "provider",
+ "name",
+ "domain_names"
+ ],
+ "properties": {
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/create_upstream.go b/backend/internal/api/schema/create_upstream.go
new file mode 100644
index 000000000..3f2be4648
--- /dev/null
+++ b/backend/internal/api/schema/create_upstream.go
@@ -0,0 +1,73 @@
+package schema
+
+import "fmt"
+
+// CreateUpstream is the schema for incoming data validation
+func CreateUpstream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "servers",
+ "nginx_template_id"
+ ],
+ "properties": {
+ "name": %s,
+ "nginx_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "advanced_config": %s,
+ "ip_hash": {
+ "type": "boolean"
+ },
+ "ntlm": {
+ "type": "boolean"
+ },
+ "keepalive": {
+ "type": "integer"
+ },
+ "keepalive_requests": {
+ "type": "integer"
+ },
+ "keepalive_time": {
+ "type": "string"
+ },
+ "keepalive_timeout": {
+ "type": "string"
+ },
+ "servers": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "server"
+ ],
+ "properties": {
+ "server": %s,
+ "weight": {
+ "type": "integer"
+ },
+ "max_conns": {
+ "type": "integer"
+ },
+ "max_fails": {
+ "type": "integer"
+ },
+ "fail_timeout": {
+ "type": "integer"
+ },
+ "backup": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ }
+ }
+`, stringMinMax(1, 100), stringMinMax(0, 1024), stringMinMax(2, 255))
+}
diff --git a/backend/internal/api/schema/create_user.go b/backend/internal/api/schema/create_user.go
new file mode 100644
index 000000000..3ad1346f7
--- /dev/null
+++ b/backend/internal/api/schema/create_user.go
@@ -0,0 +1,41 @@
+package schema
+
+import "fmt"
+
+// CreateUser is the schema for incoming data validation
+func CreateUser() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "email",
+ "is_disabled",
+ "capabilities"
+ ],
+ "properties": {
+ "name": %s,
+ "email": %s,
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "auth": {
+ "type": "object",
+ "required": [
+ "type",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^local$"
+ },
+ "secret": %s
+ }
+ },
+ "capabilities": %s
+ }
+ }
+ `, stringMinMax(2, 50), stringMinMax(5, 150), stringMinMax(8, 255), capabilties())
+}
diff --git a/backend/internal/api/schema/get_token.go b/backend/internal/api/schema/get_token.go
new file mode 100644
index 000000000..142928924
--- /dev/null
+++ b/backend/internal/api/schema/get_token.go
@@ -0,0 +1,28 @@
+package schema
+
+import "fmt"
+
+// GetToken is the schema for incoming data validation
+// nolint: gosec
+func GetToken() string {
+ stdField := stringMinMax(1, 255)
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "identity",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["local", "ldap"]
+ },
+ "identity": %s,
+ "secret": %s
+ }
+ }
+ `, stdField, stdField)
+}
diff --git a/backend/internal/api/schema/schema_test.go b/backend/internal/api/schema/schema_test.go
new file mode 100644
index 000000000..4b69b390c
--- /dev/null
+++ b/backend/internal/api/schema/schema_test.go
@@ -0,0 +1,133 @@
+package schema
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "npm/internal/entity/certificate"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSchemas(t *testing.T) {
+ tests := []struct {
+ name string
+ schema string
+ }{
+ {
+ name: "CreateCertificate",
+ schema: CreateCertificate(),
+ },
+ {
+ name: "UpdateCertificate TypeHTTP",
+ schema: UpdateCertificate(certificate.TypeHTTP),
+ },
+ {
+ name: "UpdateCertificate TypeDNS",
+ schema: UpdateCertificate(certificate.TypeDNS),
+ },
+ {
+ name: "UpdateCertificate TypeCustom",
+ schema: UpdateCertificate(certificate.TypeCustom),
+ },
+ {
+ name: "UpdateCertificate TypeMkcert",
+ schema: UpdateCertificate(certificate.TypeMkcert),
+ },
+ {
+ name: "UpdateCertificate default",
+ schema: UpdateCertificate(""),
+ },
+ {
+ name: "CreateAccessList",
+ schema: CreateAccessList(),
+ },
+ {
+ name: "CreateCertificateAuthority",
+ schema: CreateCertificateAuthority(),
+ },
+ {
+ name: "CreateDNSProvider",
+ schema: CreateDNSProvider(),
+ },
+ {
+ name: "CreateHost",
+ schema: CreateHost(),
+ },
+ {
+ name: "CreateNginxTemplate",
+ schema: CreateNginxTemplate(),
+ },
+ {
+ name: "CreateSetting",
+ schema: CreateSetting(),
+ },
+ {
+ name: "CreateStream",
+ schema: CreateStream(),
+ },
+ {
+ name: "CreateUpstream",
+ schema: CreateUpstream(),
+ },
+ {
+ name: "CreateUser",
+ schema: CreateUser(),
+ },
+ {
+ name: "GetToken",
+ schema: GetToken(),
+ },
+ {
+ name: "SetAuth",
+ schema: SetAuth(),
+ },
+ {
+ name: "UpdateAccessList",
+ schema: UpdateAccessList(),
+ },
+ {
+ name: "UpdateCertificateAuthority",
+ schema: UpdateCertificateAuthority(),
+ },
+ {
+ name: "UpdateDNSProvider",
+ schema: UpdateDNSProvider(),
+ },
+ {
+ name: "UpdateHost",
+ schema: UpdateHost(),
+ },
+ {
+ name: "UpdateNginxTemplate",
+ schema: UpdateNginxTemplate(),
+ },
+ {
+ name: "UpdateSetting",
+ schema: UpdateSetting(),
+ },
+ {
+ name: "UpdateStream",
+ schema: UpdateStream(),
+ },
+ {
+ name: "UpdateUpstream",
+ schema: UpdateUpstream(),
+ },
+ {
+ name: "UpdateUser",
+ schema: UpdateUser(),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ byt := []byte(tt.schema)
+ var prettyJSON bytes.Buffer
+ err := json.Indent(&prettyJSON, byt, "", " ")
+ assert.NoError(t, err)
+ assert.Greater(t, len(prettyJSON.String()), 0)
+ })
+ }
+}
diff --git a/backend/internal/api/schema/set_auth.go b/backend/internal/api/schema/set_auth.go
new file mode 100644
index 000000000..2ffdb605e
--- /dev/null
+++ b/backend/internal/api/schema/set_auth.go
@@ -0,0 +1,26 @@
+package schema
+
+import "fmt"
+
+// SetAuth is the schema for incoming data validation
+// Only local auth is supported for setting a password
+func SetAuth() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "type",
+ "secret"
+ ],
+ "properties": {
+ "type": {
+ "type": "string",
+ "pattern": "^local$"
+ },
+ "secret": %s,
+ "current_secret": %s
+ }
+ }
+ `, stringMinMax(8, 225), stringMinMax(8, 225))
+}
diff --git a/backend/internal/api/schema/update_access_list.go b/backend/internal/api/schema/update_access_list.go
new file mode 100644
index 000000000..786ac26bf
--- /dev/null
+++ b/backend/internal/api/schema/update_access_list.go
@@ -0,0 +1,17 @@
+package schema
+
+import "fmt"
+
+// UpdateAccessList is the schema for incoming data validation
+func UpdateAccessList() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s
+ }
+ }
+ `, stringMinMax(2, 100))
+}
diff --git a/backend/internal/api/schema/update_certificate_authority.go b/backend/internal/api/schema/update_certificate_authority.go
new file mode 100644
index 000000000..e53db5c26
--- /dev/null
+++ b/backend/internal/api/schema/update_certificate_authority.go
@@ -0,0 +1,21 @@
+package schema
+
+import "fmt"
+
+// UpdateCertificateAuthority is the schema for incoming data validation
+func UpdateCertificateAuthority() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "acmesh_server": %s,
+ "max_domains": %s,
+ "ca_bundle": %s,
+ "is_wildcard_supported": %s
+ }
+ }
+ `, stringMinMax(1, 100), stringMinMax(2, 255), intMinOne, stringMinMax(2, 255), boolean)
+}
diff --git a/backend/internal/api/schema/update_dns_provider.go b/backend/internal/api/schema/update_dns_provider.go
new file mode 100644
index 000000000..b852c88ac
--- /dev/null
+++ b/backend/internal/api/schema/update_dns_provider.go
@@ -0,0 +1,20 @@
+package schema
+
+import "fmt"
+
+// UpdateDNSProvider is the schema for incoming data validation
+func UpdateDNSProvider() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(1, 100))
+}
diff --git a/backend/internal/api/schema/update_host.go b/backend/internal/api/schema/update_host.go
new file mode 100644
index 000000000..21643bbea
--- /dev/null
+++ b/backend/internal/api/schema/update_host.go
@@ -0,0 +1,27 @@
+package schema
+
+import "fmt"
+
+// UpdateHost is the schema for incoming data validation
+func UpdateHost() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "nginx_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/update_nginx_template.go b/backend/internal/api/schema/update_nginx_template.go
new file mode 100644
index 000000000..8f8e14590
--- /dev/null
+++ b/backend/internal/api/schema/update_nginx_template.go
@@ -0,0 +1,22 @@
+package schema
+
+// UpdateNginxTemplate is the schema for incoming data validation
+func UpdateNginxTemplate() string {
+ return `
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": {
+ "type": "string",
+ "minLength": 1
+ },
+ "template": {
+ "type": "string",
+ "minLength": 20
+ }
+ }
+ }
+ `
+}
diff --git a/backend/internal/api/schema/update_setting.go b/backend/internal/api/schema/update_setting.go
new file mode 100644
index 000000000..e9af221bb
--- /dev/null
+++ b/backend/internal/api/schema/update_setting.go
@@ -0,0 +1,17 @@
+package schema
+
+import "fmt"
+
+// UpdateSetting is the schema for incoming data validation
+func UpdateSetting() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "value": %s
+ }
+ }
+ `, anyType)
+}
diff --git a/backend/internal/api/schema/update_stream.go b/backend/internal/api/schema/update_stream.go
new file mode 100644
index 000000000..51d85ff61
--- /dev/null
+++ b/backend/internal/api/schema/update_stream.go
@@ -0,0 +1,23 @@
+package schema
+
+import "fmt"
+
+// UpdateStream is the schema for incoming data validation
+func UpdateStream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "provider": %s,
+ "name": %s,
+ "domain_names": %s,
+ "expires_on": %s,
+ "meta": {
+ "type": "object"
+ }
+ }
+ }
+ `, stringMinMax(2, 100), stringMinMax(1, 100), domainNames(), intMinOne)
+}
diff --git a/backend/internal/api/schema/update_upstream.go b/backend/internal/api/schema/update_upstream.go
new file mode 100644
index 000000000..c259f4208
--- /dev/null
+++ b/backend/internal/api/schema/update_upstream.go
@@ -0,0 +1,69 @@
+package schema
+
+import "fmt"
+
+// UpdateUpstream is the schema for incoming data validation
+func UpdateUpstream() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "nginx_template_id": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "advanced_config": %s,
+ "ip_hash": {
+ "type": "boolean"
+ },
+ "ntlm": {
+ "type": "boolean"
+ },
+ "keepalive": {
+ "type": "integer"
+ },
+ "keepalive_requests": {
+ "type": "integer"
+ },
+ "keepalive_time": {
+ "type": "string"
+ },
+ "keepalive_timeout": {
+ "type": "string"
+ },
+ "servers": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "server"
+ ],
+ "properties": {
+ "server": %s,
+ "weight": {
+ "type": "integer"
+ },
+ "max_conns": {
+ "type": "integer"
+ },
+ "max_fails": {
+ "type": "integer"
+ },
+ "fail_timeout": {
+ "type": "integer"
+ },
+ "backup": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ }
+ }
+`, stringMinMax(1, 100), stringMinMax(0, 1024), stringMinMax(2, 255))
+}
diff --git a/backend/internal/api/schema/update_user.go b/backend/internal/api/schema/update_user.go
new file mode 100644
index 000000000..df4815bf4
--- /dev/null
+++ b/backend/internal/api/schema/update_user.go
@@ -0,0 +1,22 @@
+package schema
+
+import "fmt"
+
+// UpdateUser is the schema for incoming data validation
+func UpdateUser() string {
+ return fmt.Sprintf(`
+ {
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "name": %s,
+ "email": %s,
+ "is_disabled": {
+ "type": "boolean"
+ },
+ "capabilities": %s
+ }
+ }
+ `, stringMinMax(2, 50), stringMinMax(5, 150), capabilties())
+}
diff --git a/backend/internal/api/server.go b/backend/internal/api/server.go
new file mode 100644
index 000000000..f6593df4e
--- /dev/null
+++ b/backend/internal/api/server.go
@@ -0,0 +1,30 @@
+package api
+
+import (
+ "fmt"
+ "net/http"
+ "time"
+
+ "npm/internal/logger"
+ "npm/internal/serverevents"
+)
+
+const httpPort = 3000
+
+// StartServer creates a http server
+func StartServer() {
+ logger.Info("Server starting on port %v", httpPort)
+
+ server := &http.Server{
+ Addr: fmt.Sprintf(":%v", httpPort),
+ Handler: NewRouter(),
+ ReadHeaderTimeout: 3 * time.Second,
+ }
+
+ defer serverevents.Shutdown()
+
+ err := server.ListenAndServe()
+ if err != nil {
+ logger.Error("HttpListenError", err)
+ }
+}
diff --git a/backend/internal/audit-log.js b/backend/internal/audit-log.js
deleted file mode 100644
index 422b4f467..000000000
--- a/backend/internal/audit-log.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const error = require('../lib/error');
-const auditLogModel = require('../models/audit-log');
-
-const internalAuditLog = {
-
- /**
- * All logs
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('auditlog:list')
- .then(() => {
- let query = auditLogModel
- .query()
- .orderBy('created_on', 'DESC')
- .orderBy('id', 'DESC')
- .limit(100)
- .allowEager('[user]');
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('meta', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * This method should not be publicly used, it doesn't check certain things. It will be assumed
- * that permission to add to audit log is already considered, however the access token is used for
- * default user id determination.
- *
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.action
- * @param {Number} [data.user_id]
- * @param {Number} [data.object_id]
- * @param {Number} [data.object_type]
- * @param {Object} [data.meta]
- * @returns {Promise}
- */
- add: (access, data) => {
- return new Promise((resolve, reject) => {
- // Default the user id
- if (typeof data.user_id === 'undefined' || !data.user_id) {
- data.user_id = access.token.getUserId(1);
- }
-
- if (typeof data.action === 'undefined' || !data.action) {
- reject(new error.InternalValidationError('Audit log entry must contain an Action'));
- } else {
- // Make sure at least 1 of the IDs are set and action
- resolve(auditLogModel
- .query()
- .insert({
- user_id: data.user_id,
- action: data.action,
- object_type: data.object_type || '',
- object_id: data.object_id || 0,
- meta: data.meta || {}
- }));
- }
- });
- }
-};
-
-module.exports = internalAuditLog;
diff --git a/backend/internal/certificate.js b/backend/internal/certificate.js
deleted file mode 100644
index 7c8fddeea..000000000
--- a/backend/internal/certificate.js
+++ /dev/null
@@ -1,1223 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const https = require('https');
-const tempWrite = require('temp-write');
-const moment = require('moment');
-const logger = require('../logger').ssl;
-const error = require('../lib/error');
-const utils = require('../lib/utils');
-const certificateModel = require('../models/certificate');
-const dnsPlugins = require('../global/certbot-dns-plugins');
-const internalAuditLog = require('./audit-log');
-const internalNginx = require('./nginx');
-const internalHost = require('./host');
-const letsencryptStaging = process.env.NODE_ENV !== 'production';
-const letsencryptConfig = '/etc/letsencrypt.ini';
-const certbotCommand = 'certbot';
-const archiver = require('archiver');
-const path = require('path');
-const { isArray } = require('lodash');
-
-function omissions() {
- return ['is_deleted'];
-}
-
-const internalCertificate = {
-
- allowedSslFiles: ['certificate', 'certificate_key', 'intermediate_certificate'],
- intervalTimeout: 1000 * 60 * 60, // 1 hour
- interval: null,
- intervalProcessing: false,
-
- initTimer: () => {
- logger.info('Let\'s Encrypt Renewal Timer initialized');
- internalCertificate.interval = setInterval(internalCertificate.processExpiringHosts, internalCertificate.intervalTimeout);
- // And do this now as well
- internalCertificate.processExpiringHosts();
- },
-
- /**
- * Triggered by a timer, this will check for expiring hosts and renew their ssl certs if required
- */
- processExpiringHosts: () => {
- if (!internalCertificate.intervalProcessing) {
- internalCertificate.intervalProcessing = true;
- logger.info('Renewing SSL certs close to expiry...');
-
- const cmd = certbotCommand + ' renew --non-interactive --quiet ' +
- '--config "' + letsencryptConfig + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--disable-hook-validation ' +
- (letsencryptStaging ? '--staging' : '');
-
- return utils.exec(cmd)
- .then((result) => {
- if (result) {
- logger.info('Renew Result: ' + result);
- }
-
- return internalNginx.reload()
- .then(() => {
- logger.info('Renew Complete');
- return result;
- });
- })
- .then(() => {
- // Now go and fetch all the letsencrypt certs from the db and query the files and update expiry times
- return certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('provider', 'letsencrypt')
- .then((certificates) => {
- if (certificates && certificates.length) {
- let promises = [];
-
- certificates.map(function (certificate) {
- promises.push(
- internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
- .then((cert_info) => {
- return certificateModel
- .query()
- .where('id', certificate.id)
- .andWhere('provider', 'letsencrypt')
- .patch({
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- });
- })
- .catch((err) => {
- // Don't want to stop the train here, just log the error
- logger.error(err.message);
- })
- );
- });
-
- return Promise.all(promises);
- }
- });
- })
- .then(() => {
- internalCertificate.intervalProcessing = false;
- })
- .catch((err) => {
- logger.error(err);
- internalCertificate.intervalProcessing = false;
- });
- }
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('certificates:create', data)
- .then(() => {
- data.owner_user_id = access.token.getUserId(1);
-
- if (data.provider === 'letsencrypt') {
- data.nice_name = data.domain_names.join(', ');
- }
-
- return certificateModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- // Request a new Cert from LE. Let the fun begin.
-
- // 1. Find out any hosts that are using any of the hostnames in this cert
- // 2. Disable them in nginx temporarily
- // 3. Generate the LE config
- // 4. Request cert
- // 5. Remove LE config
- // 6. Re-instate previously disabled hosts
-
- // 1. Find out any hosts that are using any of the hostnames in this cert
- return internalHost.getHostsWithDomains(certificate.domain_names)
- .then((in_use_result) => {
- // 2. Disable them in nginx temporarily
- return internalCertificate.disableInUseHosts(in_use_result)
- .then(() => {
- return in_use_result;
- });
- })
- .then((in_use_result) => {
- // With DNS challenge no config is needed, so skip 3 and 5.
- if (certificate.meta.dns_challenge) {
- return internalNginx.reload().then(() => {
- // 4. Request cert
- return internalCertificate.requestLetsEncryptSslWithDnsChallenge(certificate);
- })
- .then(internalNginx.reload)
- .then(() => {
- // 6. Re-instate previously disabled hosts
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(() => {
- return certificate;
- })
- .catch((err) => {
- // In the event of failure, revert things and throw err back
- return internalCertificate.enableInUseHosts(in_use_result)
- .then(internalNginx.reload)
- .then(() => {
- throw err;
- });
- });
- } else {
- // 3. Generate the LE config
- return internalNginx.generateLetsEncryptRequestConfig(certificate)
- .then(internalNginx.reload)
- .then(async() => await new Promise((r) => setTimeout(r, 5000)))
- .then(() => {
- // 4. Request cert
- return internalCertificate.requestLetsEncryptSsl(certificate);
- })
- .then(() => {
- // 5. Remove LE config
- return internalNginx.deleteLetsEncryptRequestConfig(certificate);
- })
- .then(internalNginx.reload)
- .then(() => {
- // 6. Re-instate previously disabled hosts
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(() => {
- return certificate;
- })
- .catch((err) => {
- // In the event of failure, revert things and throw err back
- return internalNginx.deleteLetsEncryptRequestConfig(certificate)
- .then(() => {
- return internalCertificate.enableInUseHosts(in_use_result);
- })
- .then(internalNginx.reload)
- .then(() => {
- throw err;
- });
- });
- }
- })
- .then(() => {
- // At this point, the letsencrypt cert should exist on disk.
- // Lets get the expiry date from the file and update the row silently
- return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
- .then((cert_info) => {
- return certificateModel
- .query()
- .patchAndFetchById(certificate.id, {
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- })
- .then((saved_row) => {
- // Add cert data for audit log
- saved_row.meta = _.assign({}, saved_row.meta, {
- letsencrypt_certificate: cert_info
- });
-
- return saved_row;
- });
- });
- }).catch(async (error) => {
- // Delete the certificate from the database if it was not created successfully
- await certificateModel
- .query()
- .deleteById(certificate.id);
-
- throw error;
- });
- } else {
- return certificate;
- }
- }).then((certificate) => {
-
- data.meta = _.assign({}, data.meta || {}, certificate.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'certificate',
- object_id: certificate.id,
- meta: data
- })
- .then(() => {
- return certificate;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.email]
- * @param {String} [data.name]
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('certificates:update', data.id)
- .then((/*access_data*/) => {
- return internalCertificate.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Certificate could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return certificateModel
- .query()
- .omit(omissions())
- .patchAndFetchById(row.id, data)
- .then((saved_row) => {
- saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
- data.meta = internalCertificate.cleanMeta(data.meta);
-
- // Add row.nice_name for custom certs
- if (saved_row.provider === 'other') {
- data.nice_name = saved_row.nice_name;
- }
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'certificate',
- object_id: row.id,
- meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('certificates:get', data.id)
- .then((access_data) => {
- let query = certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @returns {Promise}
- */
- download: (access, data) => {
- return new Promise((resolve, reject) => {
- access.can('certificates:get', data)
- .then(() => {
- return internalCertificate.get(access, data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- const zipDirectory = '/etc/letsencrypt/live/npm-' + data.id;
-
- if (!fs.existsSync(zipDirectory)) {
- throw new error.ItemNotFoundError('Certificate ' + certificate.nice_name + ' does not exists');
- }
-
- let certFiles = fs.readdirSync(zipDirectory)
- .filter((fn) => fn.endsWith('.pem'))
- .map((fn) => fs.realpathSync(path.join(zipDirectory, fn)));
- const downloadName = 'npm-' + data.id + '-' + `${Date.now()}.zip`;
- const opName = '/tmp/' + downloadName;
- internalCertificate.zipFiles(certFiles, opName)
- .then(() => {
- logger.debug('zip completed : ', opName);
- const resp = {
- fileName: opName
- };
- resolve(resp);
- }).catch((err) => reject(err));
- } else {
- throw new error.ValidationError('Only Let\'sEncrypt certificates can be downloaded');
- }
- }).catch((err) => reject(err));
- });
- },
-
- /**
- * @param {String} source
- * @param {String} out
- * @returns {Promise}
- */
- zipFiles(source, out) {
- const archive = archiver('zip', { zlib: { level: 9 } });
- const stream = fs.createWriteStream(out);
-
- return new Promise((resolve, reject) => {
- source
- .map((fl) => {
- let fileName = path.basename(fl);
- logger.debug(fl, 'added to certificate zip');
- archive.file(fl, { name: fileName });
- });
- archive
- .on('error', (err) => reject(err))
- .pipe(stream);
-
- stream.on('close', () => resolve());
- archive.finalize();
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('certificates:delete', data.id)
- .then(() => {
- return internalCertificate.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return certificateModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Add to audit log
- row.meta = internalCertificate.cleanMeta(row.meta);
-
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'certificate',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- })
- .then(() => {
- if (row.provider === 'letsencrypt') {
- // Revoke the cert
- return internalCertificate.revokeLetsEncryptSsl(row);
- }
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Certs
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('certificates:list')
- .then((access_data) => {
- let query = certificateModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner]')
- .orderBy('nice_name', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('nice_name', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = certificateModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * @param {Object} certificate
- * @returns {Promise}
- */
- writeCustomCert: (certificate) => {
- logger.info('Writing Custom Certificate:', certificate);
-
- const dir = '/data/custom_ssl/npm-' + certificate.id;
-
- return new Promise((resolve, reject) => {
- if (certificate.provider === 'letsencrypt') {
- reject(new Error('Refusing to write letsencrypt certs here'));
- return;
- }
-
- let certData = certificate.meta.certificate;
- if (typeof certificate.meta.intermediate_certificate !== 'undefined') {
- certData = certData + '\n' + certificate.meta.intermediate_certificate;
- }
-
- try {
- if (!fs.existsSync(dir)) {
- fs.mkdirSync(dir);
- }
- } catch (err) {
- reject(err);
- return;
- }
-
- fs.writeFile(dir + '/fullchain.pem', certData, function (err) {
- if (err) {
- reject(err);
- } else {
- resolve();
- }
- });
- })
- .then(() => {
- return new Promise((resolve, reject) => {
- fs.writeFile(dir + '/privkey.pem', certificate.meta.certificate_key, function (err) {
- if (err) {
- reject(err);
- } else {
- resolve();
- }
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Array} data.domain_names
- * @param {String} data.meta.letsencrypt_email
- * @param {Boolean} data.meta.letsencrypt_agree
- * @returns {Promise}
- */
- createQuickCertificate: (access, data) => {
- return internalCertificate.create(access, {
- provider: 'letsencrypt',
- domain_names: data.domain_names,
- meta: data.meta
- });
- },
-
- /**
- * Validates that the certs provided are good.
- * No access required here, nothing is changed or stored.
- *
- * @param {Object} data
- * @param {Object} data.files
- * @returns {Promise}
- */
- validate: (data) => {
- return new Promise((resolve) => {
- // Put file contents into an object
- let files = {};
- _.map(data.files, (file, name) => {
- if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
- files[name] = file.data.toString();
- }
- });
-
- resolve(files);
- })
- .then((files) => {
- // For each file, create a temp file and write the contents to it
- // Then test it depending on the file type
- let promises = [];
- _.map(files, (content, type) => {
- promises.push(new Promise((resolve) => {
- if (type === 'certificate_key') {
- resolve(internalCertificate.checkPrivateKey(content));
- } else {
- // this should handle `certificate` and intermediate certificate
- resolve(internalCertificate.getCertificateInfo(content, true));
- }
- }).then((res) => {
- return {[type]: res};
- }));
- });
-
- return Promise.all(promises)
- .then((files) => {
- let data = {};
-
- _.each(files, (file) => {
- data = _.assign({}, data, file);
- });
-
- return data;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Object} data.files
- * @returns {Promise}
- */
- upload: (access, data) => {
- return internalCertificate.get(access, {id: data.id})
- .then((row) => {
- if (row.provider !== 'other') {
- throw new error.ValidationError('Cannot upload certificates for this type of provider');
- }
-
- return internalCertificate.validate(data)
- .then((validations) => {
- if (typeof validations.certificate === 'undefined') {
- throw new error.ValidationError('Certificate file was not provided');
- }
-
- _.map(data.files, (file, name) => {
- if (internalCertificate.allowedSslFiles.indexOf(name) !== -1) {
- row.meta[name] = file.data.toString();
- }
- });
-
- // TODO: This uses a mysql only raw function that won't translate to postgres
- return internalCertificate.update(access, {
- id: data.id,
- expires_on: moment(validations.certificate.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss'),
- domain_names: [validations.certificate.cn],
- meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
- })
- .then((certificate) => {
- console.log('ROWMETA:', row.meta);
- certificate.meta = row.meta;
- return internalCertificate.writeCustomCert(certificate);
- });
- })
- .then(() => {
- return _.pick(row.meta, internalCertificate.allowedSslFiles);
- });
- });
- },
-
- /**
- * Uses the openssl command to validate the private key.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} private_key This is the entire key contents as a string
- */
- checkPrivateKey: (private_key) => {
- return tempWrite(private_key, '/tmp')
- .then((filepath) => {
- return new Promise((resolve, reject) => {
- const failTimeout = setTimeout(() => {
- reject(new error.ValidationError('Result Validation Error: Validation timed out. This could be due to the key being passphrase-protected.'));
- }, 10000);
- utils
- .exec('openssl pkey -in ' + filepath + ' -check -noout 2>&1 ')
- .then((result) => {
- clearTimeout(failTimeout);
- if (!result.toLowerCase().includes('key is valid')) {
- reject(new error.ValidationError('Result Validation Error: ' + result));
- }
- fs.unlinkSync(filepath);
- resolve(true);
- })
- .catch((err) => {
- clearTimeout(failTimeout);
- fs.unlinkSync(filepath);
- reject(new error.ValidationError('Certificate Key is not valid (' + err.message + ')', err));
- });
- });
- });
- },
-
- /**
- * Uses the openssl command to both validate and get info out of the certificate.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} certificate This is the entire cert contents as a string
- * @param {Boolean} [throw_expired] Throw when the certificate is out of date
- */
- getCertificateInfo: (certificate, throw_expired) => {
- return tempWrite(certificate, '/tmp')
- .then((filepath) => {
- return internalCertificate.getCertificateInfoFromFile(filepath, throw_expired)
- .then((certData) => {
- fs.unlinkSync(filepath);
- return certData;
- }).catch((err) => {
- fs.unlinkSync(filepath);
- throw err;
- });
- });
- },
-
- /**
- * Uses the openssl command to both validate and get info out of the certificate.
- * It will save the file to disk first, then run commands on it, then delete the file.
- *
- * @param {String} certificate_file The file location on disk
- * @param {Boolean} [throw_expired] Throw when the certificate is out of date
- */
- getCertificateInfoFromFile: (certificate_file, throw_expired) => {
- let certData = {};
-
- return utils.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
- .then((result) => {
- // subject=CN = something.example.com
- const regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
- const match = regex.exec(result);
-
- if (typeof match[1] === 'undefined') {
- throw new error.ValidationError('Could not determine subject from certificate: ' + result);
- }
-
- certData['cn'] = match[1];
- })
- .then(() => {
- return utils.exec('openssl x509 -in ' + certificate_file + ' -issuer -noout');
- })
- .then((result) => {
- // issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
- const regex = /^(?:issuer=)?(.*)$/gim;
- const match = regex.exec(result);
-
- if (typeof match[1] === 'undefined') {
- throw new error.ValidationError('Could not determine issuer from certificate: ' + result);
- }
-
- certData['issuer'] = match[1];
- })
- .then(() => {
- return utils.exec('openssl x509 -in ' + certificate_file + ' -dates -noout');
- })
- .then((result) => {
- // notBefore=Jul 14 04:04:29 2018 GMT
- // notAfter=Oct 12 04:04:29 2018 GMT
- let validFrom = null;
- let validTo = null;
-
- const lines = result.split('\n');
- lines.map(function (str) {
- const regex = /^(\S+)=(.*)$/gim;
- const match = regex.exec(str.trim());
-
- if (match && typeof match[2] !== 'undefined') {
- const date = parseInt(moment(match[2], 'MMM DD HH:mm:ss YYYY z').format('X'), 10);
-
- if (match[1].toLowerCase() === 'notbefore') {
- validFrom = date;
- } else if (match[1].toLowerCase() === 'notafter') {
- validTo = date;
- }
- }
- });
-
- if (!validFrom || !validTo) {
- throw new error.ValidationError('Could not determine dates from certificate: ' + result);
- }
-
- if (throw_expired && validTo < parseInt(moment().format('X'), 10)) {
- throw new error.ValidationError('Certificate has expired');
- }
-
- certData['dates'] = {
- from: validFrom,
- to: validTo
- };
-
- return certData;
- }).catch((err) => {
- throw new error.ValidationError('Certificate is not valid (' + err.message + ')', err);
- });
- },
-
- /**
- * Cleans the ssl keys from the meta object and sets them to "true"
- *
- * @param {Object} meta
- * @param {Boolean} [remove]
- * @returns {Object}
- */
- cleanMeta: function (meta, remove) {
- internalCertificate.allowedSslFiles.map((key) => {
- if (typeof meta[key] !== 'undefined' && meta[key]) {
- if (remove) {
- delete meta[key];
- } else {
- meta[key] = true;
- }
- }
- });
-
- return meta;
- },
-
- /**
- * Request a certificate using the http challenge
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- requestLetsEncryptSsl: (certificate) => {
- logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const cmd = certbotCommand + ' certonly ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--agree-tos ' +
- '--authenticator webroot ' +
- '--email "' + certificate.meta.letsencrypt_email + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--domains "' + certificate.domain_names.join(',') + '" ' +
- (letsencryptStaging ? '--staging' : '');
-
- logger.info('Command:', cmd);
-
- return utils.exec(cmd)
- .then((result) => {
- logger.success(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @param {String} dns_provider the dns provider name (key used in `certbot-dns-plugins.js`)
- * @param {String | null} credentials the content of this providers credentials file
- * @param {String} propagation_seconds the cloudflare api token
- * @returns {Promise}
- */
- requestLetsEncryptSslWithDnsChallenge: (certificate) => {
- const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
-
- if (!dns_plugin) {
- throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
- }
-
- logger.info(`Requesting Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
-
- const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- // Escape single quotes and backslashes
- const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
- const credentialsCmd = 'mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentialsLocation + '\' && chmod 600 \'' + credentialsLocation + '\'';
- const prepareCmd = 'pip install ' + dns_plugin.package_name + (dns_plugin.version_requirement || '') + ' ' + dns_plugin.dependencies;
-
- // Whether the plugin has a ---credentials argument
- const hasConfigArg = certificate.meta.dns_provider !== 'route53';
-
- let mainCmd = certbotCommand + ' certonly ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--agree-tos ' +
- '--email "' + certificate.meta.letsencrypt_email + '" ' +
- '--domains "' + certificate.domain_names.join(',') + '" ' +
- '--authenticator ' + dns_plugin.full_plugin_name + ' ' +
- (
- hasConfigArg
- ? '--' + dns_plugin.full_plugin_name + '-credentials "' + credentialsLocation + '"'
- : ''
- ) +
- (
- certificate.meta.propagation_seconds !== undefined
- ? ' --' + dns_plugin.full_plugin_name + '-propagation-seconds ' + certificate.meta.propagation_seconds
- : ''
- ) +
- (letsencryptStaging ? ' --staging' : '');
-
- // Prepend the path to the credentials file as an environment variable
- if (certificate.meta.dns_provider === 'route53') {
- mainCmd = 'AWS_CONFIG_FILE=\'' + credentialsLocation + '\' ' + mainCmd;
- }
-
- logger.info('Command:', `${credentialsCmd} && ${prepareCmd} && ${mainCmd}`);
-
- return utils.exec(credentialsCmd)
- .then(() => {
- return utils.exec(prepareCmd)
- .then(() => {
- return utils.exec(mainCmd)
- .then(async (result) => {
- logger.info(result);
- return result;
- });
- });
- }).catch(async (err) => {
- // Don't fail if file does not exist
- const delete_credentialsCmd = `rm -f '${credentialsLocation}' || true`;
- await utils.exec(delete_credentialsCmd);
- throw err;
- });
- },
-
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @returns {Promise}
- */
- renew: (access, data) => {
- return access.can('certificates:update', data)
- .then(() => {
- return internalCertificate.get(access, data);
- })
- .then((certificate) => {
- if (certificate.provider === 'letsencrypt') {
- const renewMethod = certificate.meta.dns_challenge ? internalCertificate.renewLetsEncryptSslWithDnsChallenge : internalCertificate.renewLetsEncryptSsl;
-
- return renewMethod(certificate)
- .then(() => {
- return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem');
- })
- .then((cert_info) => {
- return certificateModel
- .query()
- .patchAndFetchById(certificate.id, {
- expires_on: moment(cert_info.dates.to, 'X').format('YYYY-MM-DD HH:mm:ss')
- });
- })
- .then((updated_certificate) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'renewed',
- object_type: 'certificate',
- object_id: updated_certificate.id,
- meta: updated_certificate
- })
- .then(() => {
- return updated_certificate;
- });
- });
- } else {
- throw new error.ValidationError('Only Let\'sEncrypt certificates can be renewed');
- }
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- renewLetsEncryptSsl: (certificate) => {
- logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const cmd = certbotCommand + ' renew --force-renewal ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--preferred-challenges "dns,http" ' +
- '--no-random-sleep-on-renew ' +
- '--disable-hook-validation ' +
- (letsencryptStaging ? '--staging' : '');
-
- logger.info('Command:', cmd);
-
- return utils.exec(cmd)
- .then((result) => {
- logger.info(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @returns {Promise}
- */
- renewLetsEncryptSslWithDnsChallenge: (certificate) => {
- const dns_plugin = dnsPlugins[certificate.meta.dns_provider];
-
- if (!dns_plugin) {
- throw Error(`Unknown DNS provider '${certificate.meta.dns_provider}'`);
- }
-
- logger.info(`Renewing Let'sEncrypt certificates via ${dns_plugin.display_name} for Cert #${certificate.id}: ${certificate.domain_names.join(', ')}`);
-
- let mainCmd = certbotCommand + ' renew ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-name "npm-' + certificate.id + '" ' +
- '--disable-hook-validation ' +
- '--no-random-sleep-on-renew ' +
- (letsencryptStaging ? ' --staging' : '');
-
- // Prepend the path to the credentials file as an environment variable
- if (certificate.meta.dns_provider === 'route53') {
- const credentialsLocation = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- mainCmd = 'AWS_CONFIG_FILE=\'' + credentialsLocation + '\' ' + mainCmd;
- }
-
- logger.info('Command:', mainCmd);
-
- return utils.exec(mainCmd)
- .then(async (result) => {
- logger.info(result);
- return result;
- });
- },
-
- /**
- * @param {Object} certificate the certificate row
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- revokeLetsEncryptSsl: (certificate, throw_errors) => {
- logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
-
- const mainCmd = certbotCommand + ' revoke ' +
- '--config "' + letsencryptConfig + '" ' +
- '--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
- '--delete-after-revoke ' +
- (letsencryptStaging ? '--staging' : '');
-
- // Don't fail command if file does not exist
- const delete_credentialsCmd = `rm -f '/etc/letsencrypt/credentials/credentials-${certificate.id}' || true`;
-
- logger.info('Command:', mainCmd + '; ' + delete_credentialsCmd);
-
- return utils.exec(mainCmd)
- .then(async (result) => {
- await utils.exec(delete_credentialsCmd);
- logger.info(result);
- return result;
- })
- .catch((err) => {
- logger.error(err.message);
-
- if (throw_errors) {
- throw err;
- }
- });
- },
-
- /**
- * @param {Object} certificate
- * @returns {Boolean}
- */
- hasLetsEncryptSslCerts: (certificate) => {
- const letsencryptPath = '/etc/letsencrypt/live/npm-' + certificate.id;
-
- return fs.existsSync(letsencryptPath + '/fullchain.pem') && fs.existsSync(letsencryptPath + '/privkey.pem');
- },
-
- /**
- * @param {Object} in_use_result
- * @param {Number} in_use_result.total_count
- * @param {Array} in_use_result.proxy_hosts
- * @param {Array} in_use_result.redirection_hosts
- * @param {Array} in_use_result.dead_hosts
- */
- disableInUseHosts: (in_use_result) => {
- if (in_use_result.total_count) {
- let promises = [];
-
- if (in_use_result.proxy_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('proxy_host', in_use_result.proxy_hosts));
- }
-
- if (in_use_result.redirection_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('redirection_host', in_use_result.redirection_hosts));
- }
-
- if (in_use_result.dead_hosts.length) {
- promises.push(internalNginx.bulkDeleteConfigs('dead_host', in_use_result.dead_hosts));
- }
-
- return Promise.all(promises);
-
- } else {
- return Promise.resolve();
- }
- },
-
- /**
- * @param {Object} in_use_result
- * @param {Number} in_use_result.total_count
- * @param {Array} in_use_result.proxy_hosts
- * @param {Array} in_use_result.redirection_hosts
- * @param {Array} in_use_result.dead_hosts
- */
- enableInUseHosts: (in_use_result) => {
- if (in_use_result.total_count) {
- let promises = [];
-
- if (in_use_result.proxy_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('proxy_host', in_use_result.proxy_hosts));
- }
-
- if (in_use_result.redirection_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('redirection_host', in_use_result.redirection_hosts));
- }
-
- if (in_use_result.dead_hosts.length) {
- promises.push(internalNginx.bulkGenerateConfigs('dead_host', in_use_result.dead_hosts));
- }
-
- return Promise.all(promises);
-
- } else {
- return Promise.resolve();
- }
- },
-
- testHttpsChallenge: async (access, domains) => {
- await access.can('certificates:list');
-
- if (!isArray(domains)) {
- throw new error.InternalValidationError('Domains must be an array of strings');
- }
- if (domains.length === 0) {
- throw new error.InternalValidationError('No domains provided');
- }
-
- // Create a test challenge file
- const testChallengeDir = '/data/letsencrypt-acme-challenge/.well-known/acme-challenge';
- const testChallengeFile = testChallengeDir + '/test-challenge';
- fs.mkdirSync(testChallengeDir, {recursive: true});
- fs.writeFileSync(testChallengeFile, 'Success', {encoding: 'utf8'});
-
- async function performTestForDomain (domain) {
- logger.info('Testing http challenge for ' + domain);
- const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
- const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
- const options = {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/x-www-form-urlencoded',
- 'Content-Length': Buffer.byteLength(formBody)
- }
- };
-
- const result = await new Promise((resolve) => {
-
- const req = https.request('https://www.site24x7.com/tools/restapi-tester', options, function (res) {
- let responseBody = '';
-
- res.on('data', (chunk) => responseBody = responseBody + chunk);
- res.on('end', function () {
- const parsedBody = JSON.parse(responseBody + '');
- if (res.statusCode !== 200) {
- logger.warn(`Failed to test HTTP challenge for domain ${domain}`, res);
- resolve(undefined);
- }
- resolve(parsedBody);
- });
- });
-
- // Make sure to write the request body.
- req.write(formBody);
- req.end();
- req.on('error', function (e) { logger.warn(`Failed to test HTTP challenge for domain ${domain}`, e);
- resolve(undefined); });
- });
-
- if (!result) {
- // Some error occurred while trying to get the data
- return 'failed';
- } else if (`${result.responsecode}` === '200' && result.htmlresponse === 'Success') {
- // Server exists and has responded with the correct data
- return 'ok';
- } else if (`${result.responsecode}` === '200') {
- // Server exists but has responded with wrong data
- logger.info(`HTTP challenge test failed for domain ${domain} because of invalid returned data:`, result.htmlresponse);
- return 'wrong-data';
- } else if (`${result.responsecode}` === '404') {
- // Server exists but responded with a 404
- logger.info(`HTTP challenge test failed for domain ${domain} because code 404 was returned`);
- return '404';
- } else if (`${result.responsecode}` === '0' || (typeof result.reason === 'string' && result.reason.toLowerCase() === 'host unavailable')) {
- // Server does not exist at domain
- logger.info(`HTTP challenge test failed for domain ${domain} the host was not found`);
- return 'no-host';
- } else {
- // Other errors
- logger.info(`HTTP challenge test failed for domain ${domain} because code ${result.responsecode} was returned`);
- return `other:${result.responsecode}`;
- }
- }
-
- const results = {};
-
- for (const domain of domains){
- results[domain] = await performTestForDomain(domain);
- }
-
- // Remove the test challenge file
- fs.unlinkSync(testChallengeFile);
-
- return results;
- }
-};
-
-module.exports = internalCertificate;
diff --git a/backend/internal/config/args.go b/backend/internal/config/args.go
new file mode 100644
index 000000000..dffb3131a
--- /dev/null
+++ b/backend/internal/config/args.go
@@ -0,0 +1,29 @@
+package config
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/alexflint/go-arg"
+)
+
+// ArgConfig is the settings for passing arguments to the command
+type ArgConfig struct {
+ Version bool `arg:"-v" help:"print version and exit"`
+}
+
+var (
+ appArguments ArgConfig
+)
+
+// InitArgs will parse arg vars
+func InitArgs(version, commit *string) {
+ // nolint: errcheck, gosec
+ arg.MustParse(&appArguments)
+
+ if appArguments.Version {
+ fmt.Printf("v%s (%s)\n", *version, *commit)
+ // nolint: revive
+ os.Exit(0)
+ }
+}
diff --git a/backend/internal/config/config.go b/backend/internal/config/config.go
new file mode 100644
index 000000000..11c39ad96
--- /dev/null
+++ b/backend/internal/config/config.go
@@ -0,0 +1,61 @@
+package config
+
+import (
+ "fmt"
+ golog "log"
+
+ "npm/internal/logger"
+
+ "github.com/vrischmann/envconfig"
+)
+
+// Init will parse environment variables into the Env struct
+func Init(version, commit *string) {
+ Version = *version
+ Commit = *commit
+
+ if err := envconfig.InitWithPrefix(&Configuration, "NPM"); err != nil {
+ fmt.Printf("%+v\n", err)
+ }
+
+ if err := initLogger(); err != nil {
+ logger.Error("LoggerConfigurationError", err)
+ }
+}
+
+// InitIPRanges will initialise the config for the ipranges command
+func InitIPRanges(version, commit *string) error {
+ Version = *version
+ Commit = *commit
+ err := envconfig.InitWithPrefix(&Configuration, "NPM")
+ // nolint: errcheck, gosec
+ initLogger()
+ return err
+}
+
+// Init initialises the Log object and return it
+func initLogger() error {
+ // this removes timestamp prefixes from logs
+ golog.SetFlags(0)
+
+ switch Configuration.Log.Level {
+ case "debug":
+ logLevel = logger.DebugLevel
+ case "warn":
+ logLevel = logger.WarnLevel
+ case "error":
+ logLevel = logger.ErrorLevel
+ default:
+ logLevel = logger.InfoLevel
+ }
+
+ return logger.Configure(&logger.Config{
+ LogThreshold: logLevel,
+ Formatter: Configuration.Log.Format,
+ })
+}
+
+// GetLogLevel returns the logger const level
+func GetLogLevel() logger.Level {
+ return logLevel
+}
diff --git a/backend/internal/config/config_test.go b/backend/internal/config/config_test.go
new file mode 100644
index 000000000..542d22817
--- /dev/null
+++ b/backend/internal/config/config_test.go
@@ -0,0 +1,140 @@
+package config
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ "npm/internal/logger"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestInit(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Setenv("NPM_DATA_FOLDER", "/path/to/some/data/folder")
+ t.Setenv("NPM_LOG_LEVEL", "warn")
+ t.Setenv("NPM_DB_DRIVER", "postgres")
+ t.Setenv("NPM_DB_HOST", "1.1.1.1")
+ t.Setenv("NPM_DB_PORT", "5432")
+ t.Setenv("NPM_DB_USERNAME", "rootuser")
+ t.Setenv("NPM_DB_PASSWORD", "4metoremember")
+ t.Setenv("NPM_DB_NAME", "npm")
+ t.Setenv("NPM_DISABLE_IPV4", "false")
+ t.Setenv("NPM_DISABLE_IPV6", "true")
+
+ version := "999.999.999"
+ commit := "abcd124"
+ Init(&version, &commit)
+ err := InitIPRanges(&version, &commit)
+ assert.Nil(t, err)
+
+ assert.Equal(t, "/path/to/some/data/folder", Configuration.DataFolder)
+ assert.Equal(t, false, Configuration.DisableIPV4)
+ assert.Equal(t, true, Configuration.DisableIPV6)
+ assert.Equal(t, "/data/.acme.sh", Configuration.Acmesh.Home)
+ assert.Equal(t, "disable", Configuration.DB.SSLMode)
+ assert.Equal(t, logger.WarnLevel, logger.GetLogLevel())
+
+ assert.Equal(t, "postgres", Configuration.DB.Driver)
+ assert.Equal(t, "1.1.1.1", Configuration.DB.Host)
+ assert.Equal(t, 5432, Configuration.DB.Port)
+ assert.Equal(t, "rootuser", Configuration.DB.Username)
+ assert.Equal(t, "4metoremember", Configuration.DB.Password)
+ assert.Equal(t, "npm", Configuration.DB.Name)
+ assert.Equal(t, "postgres", Configuration.DB.GetDriver())
+}
+
+func TestConnectURLs(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type want struct {
+ gorm string
+ dbmate string
+ }
+
+ tests := []struct {
+ name string
+ envs []string
+ want want
+ }{
+ {
+ name: "sqlite",
+ envs: []string{
+ "NPM_DB_DRIVER=sqlite",
+ "NPM_DATA_FOLDER=/path/to/data",
+ },
+ want: want{
+ gorm: "/path/to/data/nginxproxymanager.db",
+ dbmate: "sqlite:/path/to/data/nginxproxymanager.db",
+ },
+ },
+ {
+ name: "postgres",
+ envs: []string{
+ "NPM_DB_DRIVER=postgres",
+ "NPM_DB_HOST=2.2.2.2",
+ "NPM_DB_PORT=9824",
+ "NPM_DB_USERNAME=postgresuser",
+ "NPM_DB_PASSWORD=pgpass",
+ "NPM_DB_SSLMODE=strict",
+ "NPM_DB_NAME=npm",
+ },
+ want: want{
+ gorm: "host=2.2.2.2 user=postgresuser password=pgpass dbname=npm port=9824 sslmode=strict TimeZone=UTC",
+ dbmate: "postgres://postgresuser:pgpass@2.2.2.2:9824/npm?sslmode=strict",
+ },
+ },
+ {
+ name: "mysql",
+ envs: []string{
+ "NPM_DB_DRIVER=mysql",
+ "NPM_DB_HOST=3.3.3.3",
+ "NPM_DB_PORT=3307",
+ "NPM_DB_USERNAME=mysqluser",
+ "NPM_DB_PASSWORD=mypass",
+ "NPM_DB_NAME=npm",
+ },
+ want: want{
+ gorm: "mysqluser:mypass@tcp(3.3.3.3:3307)/npm?charset=utf8mb4&parseTime=True&loc=Local",
+ dbmate: "mysql://mysqluser:mypass@3.3.3.3:3307/npm",
+ },
+ },
+ }
+
+ version := "888.888.888"
+ commit := "abcd125"
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ for _, env := range tt.envs {
+ parts := strings.Split(env, "=")
+ if len(parts) == 2 {
+ t.Setenv(parts[0], parts[1])
+ }
+ }
+ Init(&version, &commit)
+ assert.Equal(t, tt.want.gorm, Configuration.DB.GetGormConnectURL())
+ assert.Equal(t, tt.want.dbmate, Configuration.DB.GetDBMateConnectURL())
+ })
+ }
+}
+
+func TestCreateDataFolders(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ t.Setenv("NPM_DATA_FOLDER", "/tmp/npmtest")
+
+ version := "777.777.777"
+ commit := "abcd123"
+ Init(&version, &commit)
+ CreateDataFolders()
+
+ _, err := os.Stat("/tmp/npmtest/nginx/hosts")
+ assert.Nil(t, err)
+}
diff --git a/backend/internal/config/db.go b/backend/internal/config/db.go
new file mode 100644
index 000000000..0b0832718
--- /dev/null
+++ b/backend/internal/config/db.go
@@ -0,0 +1,79 @@
+package config
+
+import (
+ "fmt"
+ "strings"
+)
+
+const (
+ DatabaseSqlite = "sqlite"
+ DatabasePostgres = "postgres"
+ DatabaseMysql = "mysql"
+)
+
+type db struct {
+ Driver string `json:"driver" envconfig:"optional,default=sqlite"`
+ Host string `json:"host" envconfig:"optional,default="`
+ Port int `json:"port" envconfig:"optional,default="`
+ Username string `json:"username" envconfig:"optional,default="`
+ Password string `json:"password" envconfig:"optional,default="`
+ Name string `json:"name" envconfig:"optional,default="`
+ SSLMode string `json:"sslmode" envconfig:"optional,default=disable"`
+}
+
+// GetDriver returns the lowercase driver name
+func (d *db) GetDriver() string {
+ return strings.ToLower(d.Driver)
+}
+
+// GetGormConnectURL is used by Gorm
+func (d *db) GetGormConnectURL() string {
+ switch d.GetDriver() {
+ case DatabaseSqlite:
+ return fmt.Sprintf("%s/nginxproxymanager.db", Configuration.DataFolder)
+ case DatabasePostgres:
+ return fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%d sslmode=%s TimeZone=UTC",
+ d.Host,
+ d.Username,
+ d.Password,
+ d.Name,
+ d.Port,
+ d.SSLMode,
+ )
+ case DatabaseMysql:
+ return fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?charset=utf8mb4&parseTime=True&loc=Local",
+ d.Username,
+ d.Password,
+ d.Host,
+ d.Port,
+ d.Name,
+ )
+ }
+ return ""
+}
+
+// GetDBMateConnectURL is used by Dbmate
+func (d *db) GetDBMateConnectURL() string {
+ switch d.GetDriver() {
+ case DatabaseSqlite:
+ return fmt.Sprintf("sqlite:%s/nginxproxymanager.db", Configuration.DataFolder)
+ case DatabasePostgres:
+ return fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=%s",
+ d.Username,
+ d.Password,
+ d.Host,
+ d.Port,
+ d.Name,
+ d.SSLMode,
+ )
+ case DatabaseMysql:
+ return fmt.Sprintf("mysql://%s:%s@%s:%d/%s",
+ d.Username,
+ d.Password,
+ d.Host,
+ d.Port,
+ d.Name,
+ )
+ }
+ return ""
+}
diff --git a/backend/internal/config/folders.go b/backend/internal/config/folders.go
new file mode 100644
index 000000000..1268910e6
--- /dev/null
+++ b/backend/internal/config/folders.go
@@ -0,0 +1,37 @@
+package config
+
+import (
+ "fmt"
+ "os"
+
+ "npm/internal/logger"
+)
+
+// CreateDataFolders will recursively create these folders within the
+// data folder defined in configuration.
+func CreateDataFolders() {
+ folders := []string{
+ "access",
+ "certificates",
+ "logs",
+ // Acme.sh:
+ Configuration.Acmesh.GetWellknown(),
+ // Nginx:
+ "nginx/hosts",
+ "nginx/streams",
+ "nginx/temp",
+ "nginx/upstreams",
+ }
+
+ for _, folder := range folders {
+ path := folder
+ if path[0:1] != "/" {
+ path = fmt.Sprintf("%s/%s", Configuration.DataFolder, folder)
+ }
+ logger.Debug("Creating folder: %s", path)
+ // nolint: gosec
+ if err := os.MkdirAll(path, os.ModePerm); err != nil {
+ logger.Error("CreateDataFolderError", err)
+ }
+ }
+}
diff --git a/backend/internal/config/vars.go b/backend/internal/config/vars.go
new file mode 100644
index 000000000..8d243ecb6
--- /dev/null
+++ b/backend/internal/config/vars.go
@@ -0,0 +1,44 @@
+package config
+
+import (
+ "fmt"
+
+ "npm/internal/logger"
+)
+
+// Version is the version set by ldflags
+var Version string
+
+// Commit is the git commit set by ldflags
+var Commit string
+
+// IsSetup defines whether we have an admin user or not
+var IsSetup bool
+
+var logLevel logger.Level
+
+// Configuration is the main configuration object
+var Configuration struct {
+ DataFolder string `json:"data_folder" envconfig:"optional,default=/data"`
+ DisableIPV4 bool `json:"disable_ipv4" envconfig:"optional"`
+ DisableIPV6 bool `json:"disable_ipv6" envconfig:"optional"`
+ Acmesh acmesh `json:"acmesh"`
+ DB db `json:"db"`
+ Log log `json:"log"`
+}
+
+type log struct {
+ Level string `json:"level" envconfig:"optional,default=info"`
+ Format string `json:"format" envconfig:"optional,default=nice"`
+}
+
+type acmesh struct {
+ Home string `json:"home" envconfig:"optional,default=/data/.acme.sh"`
+ ConfigHome string `json:"config_home" envconfig:"optional,default=/data/.acme.sh/config"`
+ CertHome string `json:"cert_home" envconfig:"optional,default=/data/.acme.sh/certs"`
+}
+
+// GetWellknown returns the well known path
+func (a *acmesh) GetWellknown() string {
+ return fmt.Sprintf("%s/.well-known", a.Home)
+}
diff --git a/backend/internal/config/vars_test.go b/backend/internal/config/vars_test.go
new file mode 100644
index 000000000..be3ae00de
--- /dev/null
+++ b/backend/internal/config/vars_test.go
@@ -0,0 +1,18 @@
+package config
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestAcmeshGetWellknown(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ a := acmesh{
+ Home: "/data/.acme.sh",
+ }
+ assert.Equal(t, "/data/.acme.sh/.well-known", a.GetWellknown())
+}
diff --git a/backend/internal/database/db.go b/backend/internal/database/db.go
new file mode 100644
index 000000000..7f4d429ad
--- /dev/null
+++ b/backend/internal/database/db.go
@@ -0,0 +1,80 @@
+package database
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/config"
+ "npm/internal/logger"
+
+ "github.com/glebarez/sqlite"
+ "github.com/rotisserie/eris"
+ "gorm.io/driver/mysql"
+ "gorm.io/driver/postgres"
+ "gorm.io/gorm"
+ gormlogger "gorm.io/gorm/logger"
+ "gorm.io/gorm/schema"
+)
+
+var dbInstance *gorm.DB
+
+// NewDB creates a new connection
+func NewDB() {
+ logger.Info("Creating new DB instance using %s", strings.ToLower(config.Configuration.DB.Driver))
+ db, err := connect()
+ if err != nil {
+ logger.Error("DatabaseConnectError", err)
+ } else if db != nil {
+ dbInstance = db
+ }
+}
+
+// GetDB returns an existing or new instance
+func GetDB() *gorm.DB {
+ if dbInstance == nil {
+ NewDB()
+ }
+ return dbInstance
+}
+
+// SetDB will set the dbInstance to this
+// Used by unit testing to set the db to a mock database
+func SetDB(db *gorm.DB) {
+ dbInstance = db
+}
+
+func connect() (*gorm.DB, error) {
+ var d gorm.Dialector
+ dsn := config.Configuration.DB.GetGormConnectURL()
+
+ switch strings.ToLower(config.Configuration.DB.Driver) {
+ case config.DatabaseSqlite:
+ // autocreate(dsn)
+ d = sqlite.Open(dsn)
+
+ case config.DatabasePostgres:
+ d = postgres.Open(dsn)
+
+ case config.DatabaseMysql:
+ d = mysql.Open(dsn)
+
+ default:
+ return nil, eris.New(fmt.Sprintf("Database driver %s is not supported. Valid options are: %s, %s or %s", config.Configuration.DB.Driver, config.DatabaseSqlite, config.DatabasePostgres, config.DatabaseMysql))
+ }
+
+ // see: https://gorm.io/docs/gorm_config.html
+ cfg := gorm.Config{
+ NamingStrategy: schema.NamingStrategy{
+ SingularTable: true,
+ NoLowerCase: true,
+ },
+ PrepareStmt: false,
+ }
+
+ // Silence gorm query errors unless when not in debug mode
+ if config.GetLogLevel() != logger.DebugLevel {
+ cfg.Logger = gormlogger.Default.LogMode(gormlogger.Silent)
+ }
+
+ return gorm.Open(d, &cfg)
+}
diff --git a/backend/internal/database/helpers.go b/backend/internal/database/helpers.go
new file mode 100644
index 000000000..c34208bd7
--- /dev/null
+++ b/backend/internal/database/helpers.go
@@ -0,0 +1,40 @@
+package database
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/config"
+)
+
+const (
+ // DateFormat for DateFormat
+ DateFormat = "2006-01-02"
+ // DateTimeFormat for DateTimeFormat
+ DateTimeFormat = "2006-01-02T15:04:05"
+)
+
+// QuoteTableName is a special function that will quote a table
+// name based on the driver. Gorm normally handles this but this
+// is for special cases where we run raw sql
+func QuoteTableName(tbl string) string {
+ switch strings.ToLower(config.Configuration.DB.Driver) {
+ case config.DatabaseMysql:
+ // backticks for mysql
+ return fmt.Sprintf("`%s`", tbl)
+ default:
+ // double quotes for everything else
+ return fmt.Sprintf(`"%s"`, tbl)
+ }
+}
+
+// GetCaseInsensitiveLike returns a different operator based on
+// the db driver
+func GetCaseInsensitiveLike() string {
+ switch strings.ToLower(config.Configuration.DB.Driver) {
+ case config.DatabasePostgres:
+ return "ILIKE"
+ default:
+ return "LIKE"
+ }
+}
diff --git a/backend/internal/database/migrator.go b/backend/internal/database/migrator.go
new file mode 100644
index 000000000..5d5555be9
--- /dev/null
+++ b/backend/internal/database/migrator.go
@@ -0,0 +1,48 @@
+package database
+
+import (
+ "fmt"
+ "net/url"
+
+ "npm/embed"
+ "npm/internal/config"
+ "npm/internal/logger"
+
+ "github.com/amacneil/dbmate/v2/pkg/dbmate"
+
+ // Drivers:
+ _ "github.com/amacneil/dbmate/v2/pkg/driver/mysql"
+ _ "github.com/amacneil/dbmate/v2/pkg/driver/postgres"
+ _ "github.com/amacneil/dbmate/v2/pkg/driver/sqlite"
+)
+
+type afterMigrationComplete func()
+
+// Migrate will bring the db up to date
+func Migrate(followup afterMigrationComplete) bool {
+ dbURL := config.Configuration.DB.GetDBMateConnectURL()
+ u, _ := url.Parse(dbURL)
+ db := dbmate.New(u)
+ db.AutoDumpSchema = false
+ db.FS = embed.MigrationFiles
+ db.MigrationsDir = []string{fmt.Sprintf("./migrations/%s", config.Configuration.DB.GetDriver())}
+
+ migrations, err := db.FindMigrations()
+ if err != nil {
+ logger.Error("MigrationError", err)
+ return false
+ }
+
+ for _, m := range migrations {
+ logger.Debug("%s: %s", m.Version, m.FilePath)
+ }
+
+ err = db.CreateAndMigrate()
+ if err != nil {
+ logger.Error("MigrationError", err)
+ return false
+ }
+
+ followup()
+ return true
+}
diff --git a/backend/internal/dead-host.js b/backend/internal/dead-host.js
deleted file mode 100644
index d35fec257..000000000
--- a/backend/internal/dead-host.js
+++ /dev/null
@@ -1,461 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const deadHostModel = require('../models/dead_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalDeadHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('dead_hosts:create', data)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return deadHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalDeadHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalDeadHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'dead-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('dead_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return deadHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'dead-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalDeadHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate']
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('dead_hosts:get', data.id)
- .then((access_data) => {
- let query = deadHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('dead_hosts:delete', data.id)
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('dead_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('dead_hosts:update', data.id)
- .then(() => {
- return internalDeadHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(deadHostModel, 'dead_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('dead_hosts:update', data.id)
- .then(() => {
- return internalDeadHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return deadHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('dead_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'dead-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('dead_hosts:list')
- .then((access_data) => {
- let query = deadHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = deadHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalDeadHost;
diff --git a/backend/internal/dnsproviders/common.go b/backend/internal/dnsproviders/common.go
new file mode 100644
index 000000000..fef52e16e
--- /dev/null
+++ b/backend/internal/dnsproviders/common.go
@@ -0,0 +1,116 @@
+package dnsproviders
+
+import (
+ "encoding/json"
+
+ "npm/internal/errors"
+)
+
+// providerField should mimick jsonschema, so that
+// the ui can render a field and validate it
+// before we do.
+// See: https://json-schema.org/draft/2020-12/json-schema-validation.html
+type providerField struct {
+ Title string `json:"title"`
+ Type string `json:"type"`
+ AdditionalProperties bool `json:"additionalProperties"`
+ Minimum int `json:"minimum,omitempty"`
+ Maximum int `json:"maximum,omitempty"`
+ MinLength int `json:"minLength,omitempty"`
+ MaxLength int `json:"maxLength,omitempty"`
+ Pattern string `json:"pattern,omitempty"`
+ IsSecret bool `json:"-"` // Not valid jsonschema
+}
+
+// Provider is a simple struct
+type Provider struct {
+ Title string `json:"title"`
+ Type string `json:"type"` // Should always be "object"
+ AdditionalProperties bool `json:"additionalProperties"`
+ MinProperties int `json:"minProperties,omitempty"`
+ Required []string `json:"required,omitempty"`
+ Properties map[string]providerField `json:"properties"`
+}
+
+// GetJSONSchema encodes this object as JSON string
+func (p *Provider) GetJSONSchema() (string, error) {
+ b, err := json.Marshal(p)
+ return string(b), err
+}
+
+// ConvertToUpdatable will manipulate this object so that it returns
+// an updatable json schema
+func (p *Provider) ConvertToUpdatable() {
+ p.MinProperties = 1
+ p.Required = nil
+}
+
+// List returns an array of providers
+func List() []Provider {
+ return []Provider{
+ getDNSAcmeDNS(),
+ getDNSAd(),
+ getDNSAli(),
+ getDNSAws(),
+ getDNSAutoDNS(),
+ getDNSAzure(),
+ getDNSCf(),
+ getDNSCloudns(),
+ getDNSConoha(),
+ getDNSCx(),
+ getDNSCyon(),
+ getDNSDgon(),
+ getDNSMe(),
+ getDNSDNSimple(),
+ getDNSDa(),
+ getDNSDp(),
+ getDNSDpi(),
+ getDNSDreamhost(),
+ getDNSDuckDNS(),
+ getDNSDyn(),
+ getDNSDynu(),
+ getDNSEuserv(),
+ getDNSFreeDNS(),
+ getDNSGandiLiveDNS(),
+ getDNSGd(),
+ getDNSHe(),
+ getDNSInfoblox(),
+ getDNSInwx(),
+ getDNSIspconfig(),
+ getDNSKinghost(),
+ getDNSLinodeV4(),
+ getDNSLoopia(),
+ getDNSLua(),
+ getDNSNamecom(),
+ getDNSNamesilo(),
+ getDNSOne(),
+ getDNSYandex(),
+ getDNSSelectel(),
+ getDNSServercow(),
+ getDNSTele3(),
+ getDNSPDNS(),
+ getDNSUnoeuro(),
+ getDNSVscale(),
+ getDNSDNZilore(),
+ getDNSZonomi(),
+ }
+}
+
+// GetAll returns all the configured providers
+func GetAll() map[string]Provider {
+ mp := make(map[string]Provider)
+ items := List()
+ for _, item := range items {
+ mp[item.Title] = item
+ }
+ return mp
+}
+
+// Get returns a single provider by name
+func Get(provider string) (Provider, error) {
+ all := GetAll()
+ if item, found := all[provider]; found {
+ return item, nil
+ }
+ return Provider{}, errors.ErrProviderNotFound
+}
diff --git a/backend/internal/dnsproviders/common_test.go b/backend/internal/dnsproviders/common_test.go
new file mode 100644
index 000000000..80bde44f1
--- /dev/null
+++ b/backend/internal/dnsproviders/common_test.go
@@ -0,0 +1,40 @@
+package dnsproviders
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetAll(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ providers := GetAll()
+ // This number will have to (annoyingly) be updated
+ // when adding new dns providers to the list
+ assert.Equal(t, 45, len(providers))
+
+ _, dynuExists := providers["dns_dynu"]
+ assert.Equal(t, true, dynuExists)
+ _, duckDNSExists := providers["dns_duckdns"]
+ assert.Equal(t, true, duckDNSExists)
+ _, cfExists := providers["dns_cf"]
+ assert.Equal(t, true, cfExists)
+ _, randomExists := providers["dns_shouldnotexist"]
+ assert.Equal(t, false, randomExists)
+}
+
+func TestGet(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ provider, err := Get("dns_duckdns")
+ assert.Nil(t, err)
+ assert.Equal(t, "dns_duckdns", provider.Title)
+
+ provider, err = Get("dns_shouldnotexist")
+ assert.NotNil(t, err)
+ assert.Equal(t, "provider_not_found", err.Error())
+}
diff --git a/backend/internal/dnsproviders/dns_acmedns.go b/backend/internal/dnsproviders/dns_acmedns.go
new file mode 100644
index 000000000..6730240dc
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_acmedns.go
@@ -0,0 +1,34 @@
+package dnsproviders
+
+func getDNSAcmeDNS() Provider {
+ return Provider{
+ Title: "dns_acmedns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "ACMEDNS_BASE_URL",
+ "ACMEDNS_SUBDOMAIN",
+ "ACMEDNS_USERNAME",
+ "ACMEDNS_PASSWORD",
+ },
+ Properties: map[string]providerField{
+ "ACMEDNS_BASE_URL": {
+ Title: "base-url",
+ Type: "string",
+ },
+ "ACMEDNS_SUBDOMAIN": {
+ Title: "subdomain",
+ Type: "string",
+ },
+ "ACMEDNS_USERNAME": {
+ Title: "username",
+ Type: "string",
+ },
+ "ACMEDNS_PASSWORD": {
+ Title: "password",
+ Type: "string",
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_acmedns_test.go b/backend/internal/dnsproviders/dns_acmedns_test.go
new file mode 100644
index 000000000..6c5f25db4
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_acmedns_test.go
@@ -0,0 +1,52 @@
+package dnsproviders
+
+import (
+ "testing"
+
+ "npm/internal/util"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestAcmeDNSProvider(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ provider := getDNSAcmeDNS()
+ json, err := provider.GetJSONSchema()
+ assert.Nil(t, err)
+ assert.Equal(t, `{
+ "title": "dns_acmedns",
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "ACMEDNS_BASE_URL",
+ "ACMEDNS_SUBDOMAIN",
+ "ACMEDNS_USERNAME",
+ "ACMEDNS_PASSWORD"
+ ],
+ "properties": {
+ "ACMEDNS_BASE_URL": {
+ "title": "base-url",
+ "type": "string",
+ "additionalProperties": false
+ },
+ "ACMEDNS_PASSWORD": {
+ "title": "password",
+ "type": "string",
+ "additionalProperties": false
+ },
+ "ACMEDNS_SUBDOMAIN": {
+ "title": "subdomain",
+ "type": "string",
+ "additionalProperties": false
+ },
+ "ACMEDNS_USERNAME": {
+ "title": "username",
+ "type": "string",
+ "additionalProperties": false
+ }
+ }
+}`, util.PrettyPrintJSON(json))
+}
diff --git a/backend/internal/dnsproviders/dns_ad.go b/backend/internal/dnsproviders/dns_ad.go
new file mode 100644
index 000000000..243f23ca7
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ad.go
@@ -0,0 +1,19 @@
+package dnsproviders
+
+func getDNSAd() Provider {
+ return Provider{
+ Title: "dns_ad",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "AD_API_KEY",
+ },
+ Properties: map[string]providerField{
+ "AD_API_KEY": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_ad_test.go b/backend/internal/dnsproviders/dns_ad_test.go
new file mode 100644
index 000000000..4bf3b4716
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ad_test.go
@@ -0,0 +1,34 @@
+package dnsproviders
+
+import (
+ "testing"
+
+ "npm/internal/util"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestAdProvider(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ provider := getDNSAd()
+ provider.ConvertToUpdatable()
+ json, err := provider.GetJSONSchema()
+ assert.Nil(t, err)
+ assert.Equal(t, `{
+ "title": "dns_ad",
+ "type": "object",
+ "additionalProperties": false,
+ "minProperties": 1,
+ "properties": {
+ "AD_API_KEY": {
+ "title": "api-key",
+ "type": "string",
+ "additionalProperties": false,
+ "minLength": 1
+ }
+ }
+}`, util.PrettyPrintJSON(json))
+}
diff --git a/backend/internal/dnsproviders/dns_ali.go b/backend/internal/dnsproviders/dns_ali.go
new file mode 100644
index 000000000..e338f2768
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ali.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSAli() Provider {
+ return Provider{
+ Title: "dns_ali",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Ali_Key",
+ "Ali_Secret",
+ },
+ Properties: map[string]providerField{
+ "Ali_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ },
+ "Ali_Secret": {
+ Title: "secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_ali_test.go b/backend/internal/dnsproviders/dns_ali_test.go
new file mode 100644
index 000000000..857837398
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ali_test.go
@@ -0,0 +1,42 @@
+package dnsproviders
+
+import (
+ "testing"
+
+ "npm/internal/util"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestAliProvider(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ provider := getDNSAli()
+ json, err := provider.GetJSONSchema()
+ assert.Nil(t, err)
+ assert.Equal(t, `{
+ "title": "dns_ali",
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "Ali_Key",
+ "Ali_Secret"
+ ],
+ "properties": {
+ "Ali_Key": {
+ "title": "api-key",
+ "type": "string",
+ "additionalProperties": false,
+ "minLength": 1
+ },
+ "Ali_Secret": {
+ "title": "secret",
+ "type": "string",
+ "additionalProperties": false,
+ "minLength": 1
+ }
+ }
+}`, util.PrettyPrintJSON(json))
+}
diff --git a/backend/internal/dnsproviders/dns_autodns.go b/backend/internal/dnsproviders/dns_autodns.go
new file mode 100644
index 000000000..bb04f7c4e
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_autodns.go
@@ -0,0 +1,32 @@
+package dnsproviders
+
+func getDNSAutoDNS() Provider {
+ return Provider{
+ Title: "dns_autodns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "AUTODNS_USER",
+ "AUTODNS_PASSWORD",
+ "AUTODNS_CONTEXT",
+ },
+ Properties: map[string]providerField{
+ "AUTODNS_USER": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "AUTODNS_PASSWORD": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "AUTODNS_CONTEXT": {
+ Title: "context",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_aws.go b/backend/internal/dnsproviders/dns_aws.go
new file mode 100644
index 000000000..aed069452
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_aws.go
@@ -0,0 +1,30 @@
+package dnsproviders
+
+func getDNSAws() Provider {
+ return Provider{
+ Title: "dns_aws",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "AWS_ACCESS_KEY_ID",
+ "AWS_SECRET_ACCESS_KEY",
+ },
+ Properties: map[string]providerField{
+ "AWS_ACCESS_KEY_ID": {
+ Title: "access-key-id",
+ Type: "string",
+ MinLength: 10,
+ },
+ "AWS_SECRET_ACCESS_KEY": {
+ Title: "secret-access-key",
+ Type: "string",
+ MinLength: 10,
+ IsSecret: true,
+ },
+ "AWS_DNS_SLOWRATE": {
+ Title: "slow-rate",
+ Type: "integer",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_azure.go b/backend/internal/dnsproviders/dns_azure.go
new file mode 100644
index 000000000..a8c5a4377
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_azure.go
@@ -0,0 +1,38 @@
+package dnsproviders
+
+func getDNSAzure() Provider {
+ return Provider{
+ Title: "dns_azure",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "AZUREDNS_SUBSCRIPTIONID",
+ "AZUREDNS_TENANTID",
+ "AZUREDNS_APPID",
+ "AZUREDNS_CLIENTSECRET",
+ },
+ Properties: map[string]providerField{
+ "AZUREDNS_SUBSCRIPTIONID": {
+ Title: "subscription-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "AZUREDNS_TENANTID": {
+ Title: "tenant-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "AZUREDNS_APPID": {
+ Title: "app-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "AZUREDNS_CLIENTSECRET": {
+ Title: "client-secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cf.go b/backend/internal/dnsproviders/dns_cf.go
new file mode 100644
index 000000000..b35695c28
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cf.go
@@ -0,0 +1,43 @@
+package dnsproviders
+
+func getDNSCf() Provider {
+ return Provider{
+ Title: "dns_cf",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "CF_Key",
+ "CF_Email",
+ "CF_Token",
+ "CF_Account_ID",
+ },
+ Properties: map[string]providerField{
+ "CF_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CF_Email": {
+ Title: "email",
+ Type: "string",
+ MinLength: 5,
+ },
+ "CF_Token": {
+ Title: "token",
+ Type: "string",
+ MinLength: 5,
+ IsSecret: true,
+ },
+ "CF_Account_ID": {
+ Title: "account-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CF_Zone_ID": {
+ Title: "zone-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cloudns.go b/backend/internal/dnsproviders/dns_cloudns.go
new file mode 100644
index 000000000..297de58de
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cloudns.go
@@ -0,0 +1,32 @@
+package dnsproviders
+
+func getDNSCloudns() Provider {
+ return Provider{
+ Title: "dns_cloudns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "CLOUDNS_AUTH_ID",
+ "CLOUDNS_SUB_AUTH_ID",
+ "CLOUDNS_AUTH_PASSWORD",
+ },
+ Properties: map[string]providerField{
+ "CLOUDNS_AUTH_ID": {
+ Title: "auth-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CLOUDNS_SUB_AUTH_ID": {
+ Title: "sub-auth-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CLOUDNS_AUTH_PASSWORD": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_conoha.go b/backend/internal/dnsproviders/dns_conoha.go
new file mode 100644
index 000000000..d84aacd41
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_conoha.go
@@ -0,0 +1,38 @@
+package dnsproviders
+
+func getDNSConoha() Provider {
+ return Provider{
+ Title: "dns_conoha",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "CONOHA_IdentityServiceApi",
+ "CONOHA_Username",
+ "CONOHA_Password",
+ "CONOHA_TenantId",
+ },
+ Properties: map[string]providerField{
+ "CONOHA_IdentityServiceApi": {
+ Title: "api-url",
+ Type: "string",
+ MinLength: 4,
+ },
+ "CONOHA_Username": {
+ Title: "username",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CONOHA_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "CONOHA_TenantId": {
+ Title: "tenant-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cx.go b/backend/internal/dnsproviders/dns_cx.go
new file mode 100644
index 000000000..2750124cf
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cx.go
@@ -0,0 +1,24 @@
+package dnsproviders
+
+func getDNSCx() Provider {
+ return Provider{
+ Title: "dns_cx",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "CX_Key",
+ "CX_Secret",
+ },
+ Properties: map[string]providerField{
+ "CX_Key": {
+ Title: "key",
+ Type: "string",
+ },
+ "CX_Secret": {
+ Title: "secret",
+ Type: "string",
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_cyon.go b/backend/internal/dnsproviders/dns_cyon.go
new file mode 100644
index 000000000..87a5ab379
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_cyon.go
@@ -0,0 +1,33 @@
+package dnsproviders
+
+func getDNSCyon() Provider {
+ return Provider{
+ Title: "dns_cyon",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "CY_Username",
+ "CY_Password",
+ "CY_OTP_Secret",
+ },
+ Properties: map[string]providerField{
+ "CY_Username": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "CY_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "CY_OTP_Secret": {
+ Title: "otp-secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_da.go b/backend/internal/dnsproviders/dns_da.go
new file mode 100644
index 000000000..ee9e0f613
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_da.go
@@ -0,0 +1,23 @@
+package dnsproviders
+
+func getDNSDa() Provider {
+ return Provider{
+ Title: "dns_da",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DA_Api",
+ },
+ Properties: map[string]providerField{
+ "DA_Api": {
+ Title: "api-url",
+ Type: "string",
+ MinLength: 4,
+ },
+ "DA_Api_Insecure": {
+ Title: "insecure",
+ Type: "boolean",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dgon.go b/backend/internal/dnsproviders/dns_dgon.go
new file mode 100644
index 000000000..91113cceb
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dgon.go
@@ -0,0 +1,19 @@
+package dnsproviders
+
+func getDNSDgon() Provider {
+ return Provider{
+ Title: "dns_dgon",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DO_API_KEY",
+ },
+ Properties: map[string]providerField{
+ "DO_API_KEY": {
+ Title: "api-key",
+ Type: "string",
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dnsimple.go b/backend/internal/dnsproviders/dns_dnsimple.go
new file mode 100644
index 000000000..a4a02ae16
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dnsimple.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSDNSimple() Provider {
+ return Provider{
+ Title: "dns_dnsimple",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DNSimple_OAUTH_TOKEN",
+ },
+ Properties: map[string]providerField{
+ "DNSimple_OAUTH_TOKEN": {
+ Title: "oauth-token",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dp.go b/backend/internal/dnsproviders/dns_dp.go
new file mode 100644
index 000000000..cece1a013
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dp.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSDp() Provider {
+ return Provider{
+ Title: "dns_dp",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DP_Id",
+ "DP_Key",
+ },
+ Properties: map[string]providerField{
+ "DP_Id": {
+ Title: "id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "DP_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dpi.go b/backend/internal/dnsproviders/dns_dpi.go
new file mode 100644
index 000000000..0b5255c58
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dpi.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSDpi() Provider {
+ return Provider{
+ Title: "dns_dpi",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DPI_Id",
+ "DPI_Key",
+ },
+ Properties: map[string]providerField{
+ "DPI_Id": {
+ Title: "id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "DPI_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dreamhost.go b/backend/internal/dnsproviders/dns_dreamhost.go
new file mode 100644
index 000000000..037c5f84e
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dreamhost.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSDreamhost() Provider {
+ return Provider{
+ Title: "dns_dreamhost",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DH_API_KEY",
+ },
+ Properties: map[string]providerField{
+ "DH_API_KEY": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_duckdns.go b/backend/internal/dnsproviders/dns_duckdns.go
new file mode 100644
index 000000000..3407ca0cb
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_duckdns.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSDuckDNS() Provider {
+ return Provider{
+ Title: "dns_duckdns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DuckDNS_Token",
+ },
+ Properties: map[string]providerField{
+ "DuckDNS_Token": {
+ Title: "token",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dyn.go b/backend/internal/dnsproviders/dns_dyn.go
new file mode 100644
index 000000000..be595ce31
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dyn.go
@@ -0,0 +1,32 @@
+package dnsproviders
+
+func getDNSDyn() Provider {
+ return Provider{
+ Title: "dns_dyn",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "DYN_Customer",
+ "DYN_Username",
+ "DYN_Password",
+ },
+ Properties: map[string]providerField{
+ "DYN_Customer": {
+ Title: "customer",
+ Type: "string",
+ MinLength: 1,
+ },
+ "DYN_Username": {
+ Title: "username",
+ Type: "string",
+ MinLength: 1,
+ },
+ "DYN_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_dynu.go b/backend/internal/dnsproviders/dns_dynu.go
new file mode 100644
index 000000000..1f3024c19
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_dynu.go
@@ -0,0 +1,25 @@
+package dnsproviders
+
+func getDNSDynu() Provider {
+ return Provider{
+ Title: "dns_dynu",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Dynu_ClientId",
+ },
+ Properties: map[string]providerField{
+ "Dynu_ClientId": {
+ Title: "client-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "Dynu_Secret": {
+ Title: "secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_euserv.go b/backend/internal/dnsproviders/dns_euserv.go
new file mode 100644
index 000000000..be500480c
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_euserv.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSEuserv() Provider {
+ return Provider{
+ Title: "dns_euserv",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "EUSERV_Username",
+ "EUSERV_Password",
+ },
+ Properties: map[string]providerField{
+ "EUSERV_Username": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "EUSERV_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_freedns.go b/backend/internal/dnsproviders/dns_freedns.go
new file mode 100644
index 000000000..7c6849b10
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_freedns.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSFreeDNS() Provider {
+ return Provider{
+ Title: "dns_freedns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "FREEDNS_User",
+ "FREEDNS_Password",
+ },
+ Properties: map[string]providerField{
+ "FREEDNS_User": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "FREEDNS_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_gandi_livedns.go b/backend/internal/dnsproviders/dns_gandi_livedns.go
new file mode 100644
index 000000000..a22ebbff6
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_gandi_livedns.go
@@ -0,0 +1,19 @@
+package dnsproviders
+
+func getDNSGandiLiveDNS() Provider {
+ return Provider{
+ Title: "dns_gandi_livedns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "GANDI_LIVEDNS_KEY",
+ },
+ Properties: map[string]providerField{
+ "GANDI_LIVEDNS_KEY": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_gd.go b/backend/internal/dnsproviders/dns_gd.go
new file mode 100644
index 000000000..11248948f
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_gd.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSGd() Provider {
+ return Provider{
+ Title: "dns_gd",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "GD_Key",
+ "GD_Secret",
+ },
+ Properties: map[string]providerField{
+ "GD_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ },
+ "GD_Secret": {
+ Title: "secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_he.go b/backend/internal/dnsproviders/dns_he.go
new file mode 100644
index 000000000..f4aed32bc
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_he.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSHe() Provider {
+ return Provider{
+ Title: "dns_he",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "HE_Username",
+ "HE_Password",
+ },
+ Properties: map[string]providerField{
+ "HE_Username": {
+ Title: "username",
+ Type: "string",
+ MinLength: 1,
+ },
+ "HE_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_infoblox.go b/backend/internal/dnsproviders/dns_infoblox.go
new file mode 100644
index 000000000..f356f36c5
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_infoblox.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSInfoblox() Provider {
+ return Provider{
+ Title: "dns_infoblox",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Infoblox_Creds",
+ "Infoblox_Server",
+ },
+ Properties: map[string]providerField{
+ "Infoblox_Creds": {
+ Title: "credentials",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "Infoblox_Server": {
+ Title: "server",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_inwx.go b/backend/internal/dnsproviders/dns_inwx.go
new file mode 100644
index 000000000..c0f75cca2
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_inwx.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSInwx() Provider {
+ return Provider{
+ Title: "dns_inwx",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "INWX_User",
+ "INWX_Password",
+ },
+ Properties: map[string]providerField{
+ "INWX_User": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "INWX_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_ispconfig.go b/backend/internal/dnsproviders/dns_ispconfig.go
new file mode 100644
index 000000000..f10e0f799
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_ispconfig.go
@@ -0,0 +1,36 @@
+package dnsproviders
+
+func getDNSIspconfig() Provider {
+ return Provider{
+ Title: "dns_ispconfig",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "ISPC_User",
+ "ISPC_Password",
+ "ISPC_Api",
+ },
+ Properties: map[string]providerField{
+ "ISPC_User": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "ISPC_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "ISPC_Api": {
+ Title: "api-url",
+ Type: "string",
+ MinLength: 1,
+ },
+ "ISPC_Api_Insecure": {
+ Title: "insecure",
+ Type: "boolean",
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_kinghost.go b/backend/internal/dnsproviders/dns_kinghost.go
new file mode 100644
index 000000000..a497acc8c
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_kinghost.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSKinghost() Provider {
+ return Provider{
+ Title: "dns_kinghost",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "KINGHOST_Username",
+ "KINGHOST_Password",
+ },
+ Properties: map[string]providerField{
+ "KINGHOST_Username": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "KINGHOST_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_linode_v4.go b/backend/internal/dnsproviders/dns_linode_v4.go
new file mode 100644
index 000000000..ddafeca17
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_linode_v4.go
@@ -0,0 +1,22 @@
+package dnsproviders
+
+// Note: https://github.com/acmesh-official/acme.sh/wiki/dnsapi#14-use-linode-domain-api
+// needs 15 minute sleep, not currently implemented
+func getDNSLinodeV4() Provider {
+ return Provider{
+ Title: "dns_linode_v4",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "LINODE_V4_API_KEY",
+ },
+ Properties: map[string]providerField{
+ "LINODE_V4_API_KEY": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_loopia.go b/backend/internal/dnsproviders/dns_loopia.go
new file mode 100644
index 000000000..64d22d329
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_loopia.go
@@ -0,0 +1,32 @@
+package dnsproviders
+
+func getDNSLoopia() Provider {
+ return Provider{
+ Title: "dns_loopia",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "LOOPIA_Api",
+ "LOOPIA_User",
+ "LOOPIA_Password",
+ },
+ Properties: map[string]providerField{
+ "LOOPIA_Api": {
+ Title: "api-url",
+ Type: "string",
+ MinLength: 4,
+ },
+ "LOOPIA_User": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "LOOPIA_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_lua.go b/backend/internal/dnsproviders/dns_lua.go
new file mode 100644
index 000000000..b79a87a7b
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_lua.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSLua() Provider {
+ return Provider{
+ Title: "dns_lua",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "LUA_Key",
+ "LUA_Email",
+ },
+ Properties: map[string]providerField{
+ "LUA_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "LUA_Email": {
+ Title: "email",
+ Type: "string",
+ MinLength: 5,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_me.go b/backend/internal/dnsproviders/dns_me.go
new file mode 100644
index 000000000..96b3d4a2a
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_me.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSMe() Provider {
+ return Provider{
+ Title: "dns_me",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "ME_Key",
+ "ME_Secret",
+ },
+ Properties: map[string]providerField{
+ "ME_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ },
+ "ME_Secret": {
+ Title: "secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_namecom.go b/backend/internal/dnsproviders/dns_namecom.go
new file mode 100644
index 000000000..9a72de6cb
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_namecom.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSNamecom() Provider {
+ return Provider{
+ Title: "dns_namecom",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Namecom_Username",
+ "Namecom_Token",
+ },
+ Properties: map[string]providerField{
+ "Namecom_Username": {
+ Title: "username",
+ Type: "string",
+ MinLength: 1,
+ },
+ "Namecom_Token": {
+ Title: "token",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_namesilo.go b/backend/internal/dnsproviders/dns_namesilo.go
new file mode 100644
index 000000000..d44482131
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_namesilo.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSNamesilo() Provider {
+ return Provider{
+ Title: "dns_namesilo",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Namesilo_Key",
+ },
+ Properties: map[string]providerField{
+ "Namesilo_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_nsone.go b/backend/internal/dnsproviders/dns_nsone.go
new file mode 100644
index 000000000..36c4594ff
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_nsone.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSOne() Provider {
+ return Provider{
+ Title: "dns_nsone",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "NS1_Key",
+ },
+ Properties: map[string]providerField{
+ "NS1_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_pdns.go b/backend/internal/dnsproviders/dns_pdns.go
new file mode 100644
index 000000000..ee3802839
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_pdns.go
@@ -0,0 +1,37 @@
+package dnsproviders
+
+func getDNSPDNS() Provider {
+ return Provider{
+ Title: "dns_pdns",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "PDNS_Url",
+ "PDNS_ServerId",
+ "PDNS_Token",
+ "PDNS_Ttl",
+ },
+ Properties: map[string]providerField{
+ "PDNS_Url": {
+ Title: "url",
+ Type: "string",
+ MinLength: 1,
+ },
+ "PDNS_ServerId": {
+ Title: "server-id",
+ Type: "string",
+ MinLength: 1,
+ },
+ "PDNS_Token": {
+ Title: "token",
+ Type: "string",
+ MinLength: 1,
+ },
+ "PDNS_Ttl": {
+ Title: "ttl",
+ Type: "integer",
+ Minimum: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_selectel.go b/backend/internal/dnsproviders/dns_selectel.go
new file mode 100644
index 000000000..7e97dce12
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_selectel.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSSelectel() Provider {
+ return Provider{
+ Title: "dns_selectel",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "SL_Key",
+ },
+ Properties: map[string]providerField{
+ "SL_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_servercow.go b/backend/internal/dnsproviders/dns_servercow.go
new file mode 100644
index 000000000..007d49d13
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_servercow.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSServercow() Provider {
+ return Provider{
+ Title: "dns_servercow",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "SERVERCOW_API_Username",
+ "SERVERCOW_API_Password",
+ },
+ Properties: map[string]providerField{
+ "SERVERCOW_API_Username": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ },
+ "SERVERCOW_API_Password": {
+ Title: "password",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_tele3.go b/backend/internal/dnsproviders/dns_tele3.go
new file mode 100644
index 000000000..c40b8c7be
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_tele3.go
@@ -0,0 +1,26 @@
+package dnsproviders
+
+func getDNSTele3() Provider {
+ return Provider{
+ Title: "dns_tele3",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "TELE3_Key",
+ "TELE3_Secret",
+ },
+ Properties: map[string]providerField{
+ "TELE3_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ },
+ "TELE3_Secret": {
+ Title: "secret",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_unoeuro.go b/backend/internal/dnsproviders/dns_unoeuro.go
new file mode 100644
index 000000000..63b6fa3fc
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_unoeuro.go
@@ -0,0 +1,27 @@
+package dnsproviders
+
+func getDNSUnoeuro() Provider {
+ return Provider{
+ Title: "dns_unoeuro",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "UNO_Key",
+ "UNO_User",
+ },
+ Properties: map[string]providerField{
+ "UNO_Key": {
+ Title: "key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ "UNO_User": {
+ Title: "user",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_vscale.go b/backend/internal/dnsproviders/dns_vscale.go
new file mode 100644
index 000000000..86f1cbf5d
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_vscale.go
@@ -0,0 +1,19 @@
+package dnsproviders
+
+func getDNSVscale() Provider {
+ return Provider{
+ Title: "dns_vscale",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "VSCALE_API_KEY",
+ },
+ Properties: map[string]providerField{
+ "VSCALE_API_KEY": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_yandex.go b/backend/internal/dnsproviders/dns_yandex.go
new file mode 100644
index 000000000..88ab7b470
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_yandex.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSYandex() Provider {
+ return Provider{
+ Title: "dns_yandex",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "PDD_Token",
+ },
+ Properties: map[string]providerField{
+ "PDD_Token": {
+ Title: "token",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_zilore.go b/backend/internal/dnsproviders/dns_zilore.go
new file mode 100644
index 000000000..b35f21591
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_zilore.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSDNZilore() Provider {
+ return Provider{
+ Title: "dns_zilore",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "Zilore_Key",
+ },
+ Properties: map[string]providerField{
+ "Zilore_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/dnsproviders/dns_zonomi.go b/backend/internal/dnsproviders/dns_zonomi.go
new file mode 100644
index 000000000..f93f3ef84
--- /dev/null
+++ b/backend/internal/dnsproviders/dns_zonomi.go
@@ -0,0 +1,20 @@
+package dnsproviders
+
+func getDNSZonomi() Provider {
+ return Provider{
+ Title: "dns_zonomi",
+ Type: "object",
+ AdditionalProperties: false,
+ Required: []string{
+ "ZM_Key",
+ },
+ Properties: map[string]providerField{
+ "ZM_Key": {
+ Title: "api-key",
+ Type: "string",
+ MinLength: 1,
+ IsSecret: true,
+ },
+ },
+ }
+}
diff --git a/backend/internal/entity/accesslist/methods.go b/backend/internal/entity/accesslist/methods.go
new file mode 100644
index 000000000..b0c9ad892
--- /dev/null
+++ b/backend/internal/entity/accesslist/methods.go
@@ -0,0 +1,50 @@
+package accesslist
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of access lists
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/accesslist/model.go b/backend/internal/entity/accesslist/model.go
new file mode 100644
index 000000000..64e0a0587
--- /dev/null
+++ b/backend/internal/entity/accesslist/model.go
@@ -0,0 +1,54 @@
+package accesslist
+
+import (
+ "npm/internal/database"
+ "npm/internal/entity/user"
+ "npm/internal/model"
+ "npm/internal/types"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ Meta types.JSONB `json:"meta" gorm:"column:meta"`
+ // Expansions
+ User *user.Model `json:"user,omitempty" gorm:"-"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "access_list"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+}
diff --git a/backend/internal/entity/auth/entity_test.go b/backend/internal/entity/auth/entity_test.go
new file mode 100644
index 000000000..b4b6f8d42
--- /dev/null
+++ b/backend/internal/entity/auth/entity_test.go
@@ -0,0 +1,166 @@
+package auth
+
+import (
+ "regexp"
+ "testing"
+
+ "npm/internal/test"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ singleRow *sqlmock.Rows
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.singleRow = sqlmock.NewRows([]string{
+ "id",
+ "user_id",
+ "type",
+ "secret",
+ }).AddRow(
+ 10,
+ 100,
+ TypeLocal,
+ "abc123",
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+func assertModel(t *testing.T, m Model) {
+ assert.Equal(t, uint(10), m.ID)
+ assert.Equal(t, uint(100), m.UserID)
+ assert.Equal(t, TypeLocal, m.Type)
+ assert.Equal(t, "abc123", m.Secret)
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestGetByID() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "auth" WHERE "auth"."id" = $1 AND "auth"."is_deleted" = $2 ORDER BY "auth"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByID(10)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestGetByUserIDType() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "auth" WHERE user_id = $1 AND type = $2 AND "auth"."is_deleted" = $3 ORDER BY "auth"."id" LIMIT $4`)).
+ WithArgs(100, TypeLocal, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByUserIDType(100, TypeLocal)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestGetByIdenityType() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "auth" WHERE identity = $1 AND type = $2 AND "auth"."is_deleted" = $3 ORDER BY "auth"."id" LIMIT $4`)).
+ WithArgs("johndoe", TypeLocal, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByIdenityType("johndoe", TypeLocal)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestSave() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "auth" ("created_at","updated_at","is_deleted","user_id","type","identity","secret") VALUES ($1,$2,$3,$4,$5,$6,$7) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ 100,
+ TypeLocal,
+ "",
+ "abc123",
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("11"))
+ s.mock.ExpectCommit()
+
+ // New model
+ m := Model{
+ UserID: 100,
+ Type: TypeLocal,
+ Secret: "abc123",
+ }
+ err := m.Save()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSetPassword() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+ m := Model{UserID: 100}
+ err := m.SetPassword("abc123")
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), TypeLocal, m.Type)
+ assert.Greater(s.T(), len(m.Secret), 15)
+}
+
+func (s *testsuite) TestValidateSecret() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ m := Model{UserID: 100}
+ m.SetPassword("abc123")
+
+ err := m.ValidateSecret("abc123")
+ require.NoError(s.T(), err)
+ err = m.ValidateSecret("this is not the password")
+ assert.NotNil(s.T(), err)
+ assert.Equal(s.T(), "Invalid Credentials", err.Error())
+
+ m.Type = "not a valid type"
+ err = m.ValidateSecret("abc123")
+ assert.NotNil(s.T(), err)
+ assert.Equal(s.T(), "Could not validate Secret, auth type is not Local", err.Error())
+}
diff --git a/backend/internal/entity/auth/ldap.go b/backend/internal/entity/auth/ldap.go
new file mode 100644
index 000000000..9a72f0cff
--- /dev/null
+++ b/backend/internal/entity/auth/ldap.go
@@ -0,0 +1,96 @@
+package auth
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "npm/internal/entity/setting"
+ "npm/internal/logger"
+
+ ldap3 "github.com/go-ldap/ldap/v3"
+ "github.com/rotisserie/eris"
+)
+
+// LDAPUser is the LDAP User
+type LDAPUser struct {
+ Username string `json:"username"`
+ Name string `json:"name"`
+ Email string `json:"email"`
+}
+
+// LDAPAuthenticate will use ldap to authenticate with user/pass
+func LDAPAuthenticate(identity, password string) (*LDAPUser, error) {
+ ldapSettings, err := setting.GetLDAPSettings()
+ if err != nil {
+ return nil, err
+ }
+
+ dn := strings.Replace(ldapSettings.UserDN, "{{USERNAME}}", identity, 1)
+ conn, err := ldapConnect(ldapSettings.Host, dn, password)
+ if err != nil {
+ return nil, err
+ }
+ // nolint: errcheck, gosec
+ defer conn.Close()
+ return ldapSearchUser(conn, ldapSettings, identity)
+}
+
+// Attempt ldap connection
+func ldapConnect(host, dn, password string) (*ldap3.Conn, error) {
+ var conn *ldap3.Conn
+ var err error
+
+ if conn, err = ldap3.DialURL(fmt.Sprintf("ldap://%s", host)); err != nil {
+ logger.Error("LdapError", err)
+ return nil, err
+ }
+
+ logger.Debug("LDAP Logging in with: %s", dn)
+ if err := conn.Bind(dn, password); err != nil {
+ if !strings.Contains(err.Error(), "Invalid Credentials") {
+ logger.Error("LDAPAuthError", err)
+ }
+ // nolint: gosec, errcheck
+ conn.Close()
+ return nil, err
+ }
+
+ logger.Debug("LDAP Login Successful")
+ return conn, nil
+}
+
+func ldapSearchUser(l *ldap3.Conn, ldapSettings setting.LDAPSettings, username string) (*LDAPUser, error) {
+ // Search for the given username
+ searchRequest := ldap3.NewSearchRequest(
+ ldapSettings.BaseDN,
+ ldap3.ScopeWholeSubtree,
+ ldap3.NeverDerefAliases,
+ 0,
+ 0,
+ false,
+ strings.Replace(ldapSettings.SelfFilter, "{{USERNAME}}", username, 1),
+ nil,
+ nil,
+ )
+
+ sr, err := l.Search(searchRequest)
+ if err != nil {
+ logger.Error("LdapError", err)
+ return nil, err
+ }
+
+ if len(sr.Entries) < 1 {
+ return nil, eris.New("No user found in LDAP search")
+ } else if len(sr.Entries) > 1 {
+ j, _ := json.Marshal(sr)
+ logger.Debug("LDAP Search Results: %s", j)
+ return nil, eris.Errorf("Too many LDAP results returned in LDAP search: %d", len(sr.Entries))
+ }
+
+ return &LDAPUser{
+ Username: strings.ToLower(username),
+ Name: sr.Entries[0].GetAttributeValue(ldapSettings.NameProperty),
+ Email: strings.ToLower(sr.Entries[0].GetAttributeValue(ldapSettings.EmailProperty)),
+ }, nil
+}
diff --git a/backend/internal/entity/auth/methods.go b/backend/internal/entity/auth/methods.go
new file mode 100644
index 000000000..8cce3f332
--- /dev/null
+++ b/backend/internal/entity/auth/methods.go
@@ -0,0 +1,34 @@
+package auth
+
+import (
+ "npm/internal/database"
+)
+
+// GetByID finds a auth by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByUserIDType finds a user by id and type
+func GetByUserIDType(userID uint, authType string) (Model, error) {
+ var auth Model
+ db := database.GetDB()
+ result := db.
+ Where("user_id = ?", userID).
+ Where("type = ?", authType).
+ First(&auth)
+ return auth, result.Error
+}
+
+// GetByIdenityType finds a user by identity and type
+func GetByIdenityType(identity string, authType string) (Model, error) {
+ var auth Model
+ db := database.GetDB()
+ result := db.
+ Where("identity = ?", identity).
+ Where("type = ?", authType).
+ First(&auth)
+ return auth, result.Error
+}
diff --git a/backend/internal/entity/auth/model.go b/backend/internal/entity/auth/model.go
new file mode 100644
index 000000000..38a556278
--- /dev/null
+++ b/backend/internal/entity/auth/model.go
@@ -0,0 +1,71 @@
+package auth
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+
+ "github.com/rotisserie/eris"
+ "golang.org/x/crypto/bcrypt"
+)
+
+// Auth types
+const (
+ TypeLocal = "local"
+ TypeLDAP = "ldap"
+ TypeOAuth = "oauth"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id"`
+ Type string `json:"type" gorm:"column:type;default:local"`
+ Identity string `json:"identity,omitempty" gorm:"column:identity"`
+ Secret string `json:"secret,omitempty" gorm:"column:secret"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "auth"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// SetPassword will generate a hashed password based on given string
+func (m *Model) SetPassword(password string) error {
+ hash, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.MinCost+2)
+ if err != nil {
+ return err
+ }
+
+ m.Type = TypeLocal
+ m.Secret = string(hash)
+
+ return nil
+}
+
+// ValidateSecret will check if a given secret matches the encrypted secret
+func (m *Model) ValidateSecret(secret string) error {
+ if m.Type != TypeLocal {
+ return eris.New("Could not validate Secret, auth type is not Local")
+ }
+
+ err := bcrypt.CompareHashAndPassword([]byte(m.Secret), []byte(secret))
+ if err != nil {
+ return eris.New("Invalid Credentials")
+ }
+
+ return nil
+}
diff --git a/backend/internal/entity/auth/oauth.go b/backend/internal/entity/auth/oauth.go
new file mode 100644
index 000000000..438eb87ea
--- /dev/null
+++ b/backend/internal/entity/auth/oauth.go
@@ -0,0 +1,218 @@
+package auth
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+
+ "npm/internal/entity/setting"
+ "npm/internal/logger"
+
+ cache "github.com/patrickmn/go-cache"
+ "github.com/rotisserie/eris"
+ "golang.org/x/oauth2"
+)
+
+// AuthCache is a cache item that stores the Admin API data for each admin that has been requesting endpoints
+var (
+ OAuthCache *cache.Cache
+ settingGetOAuthSettings = setting.GetOAuthSettings
+)
+
+// OAuthCacheInit will create a new Memory Cache
+func OAuthCacheInit() {
+ if OAuthCache == nil {
+ logger.Debug("Creating a new OAuthCache")
+ OAuthCache = cache.New(5*time.Minute, 5*time.Minute)
+ }
+}
+
+// OAuthUser is the OAuth User
+type OAuthUser struct {
+ Identifier string `json:"identifier"`
+ Token string `json:"token"`
+ Resource map[string]any `json:"resource"`
+}
+
+// GetResourceField will attempt to get a field from the resource
+func (m *OAuthUser) GetResourceField(field string) string {
+ if m.Resource != nil {
+ if value, ok := m.Resource[field]; ok {
+ return value.(string)
+ }
+ }
+ return ""
+}
+
+// GetID attempts to get an ID from the resource
+func (m *OAuthUser) GetID() string {
+ if m.Identifier != "" {
+ return m.Identifier
+ }
+
+ fields := []string{
+ "uid",
+ "user_id",
+ "username",
+ "preferred_username",
+ "email",
+ "mail",
+ }
+
+ for _, field := range fields {
+ if val := m.GetResourceField(field); val != "" {
+ return val
+ }
+ }
+
+ return ""
+}
+
+// GetName attempts to get a name from the resource
+// using different fields
+func (m *OAuthUser) GetName() string {
+ fields := []string{
+ "nickname",
+ "given_name",
+ "name",
+ "preferred_username",
+ "username",
+ }
+
+ for _, field := range fields {
+ if name := m.GetResourceField(field); name != "" {
+ return name
+ }
+ }
+
+ // Fallback:
+ return m.Identifier
+}
+
+// GetEmail will return an email address even if it can't be known in the
+// Resource
+func (m *OAuthUser) GetEmail() string {
+ // See if there's an email field first
+ if email := m.GetResourceField("email"); email != "" {
+ return email
+ }
+
+ // Return the identifier if it looks like an email
+ if m.Identifier != "" {
+ if strings.Contains(m.Identifier, "@") {
+ return m.Identifier
+ }
+ return fmt.Sprintf("%s@oauth", m.Identifier)
+ }
+ return ""
+}
+
+func getOAuth2Config() (*oauth2.Config, *setting.OAuthSettings, error) {
+ oauthSettings, err := settingGetOAuthSettings()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if oauthSettings.ClientID == "" || oauthSettings.ClientSecret == "" || oauthSettings.AuthURL == "" || oauthSettings.TokenURL == "" {
+ return nil, nil, eris.New("oauth-settings-incorrect")
+ }
+
+ return &oauth2.Config{
+ ClientID: oauthSettings.ClientID,
+ ClientSecret: oauthSettings.ClientSecret,
+ Scopes: oauthSettings.Scopes,
+ Endpoint: oauth2.Endpoint{
+ AuthURL: oauthSettings.AuthURL,
+ TokenURL: oauthSettings.TokenURL,
+ },
+ }, &oauthSettings, nil
+}
+
+// OAuthLogin is hit by the client to generate a URL to redirect to
+// and start the oauth process
+func OAuthLogin(redirectBase, ipAddress string) (string, error) {
+ OAuthCacheInit()
+
+ conf, _, err := getOAuth2Config()
+ if err != nil {
+ return "", err
+ }
+
+ verifier := oauth2.GenerateVerifier()
+ OAuthCache.Set(getCacheKey(ipAddress), verifier, cache.DefaultExpiration)
+
+ // todo: state should be unique to the incoming IP address of the requester, I guess
+ url := conf.AuthCodeURL("state", oauth2.AccessTypeOnline, oauth2.S256ChallengeOption(verifier))
+
+ if redirectBase != "" {
+ url = url + "&redirect_uri=" + redirectBase + "/oauth/redirect"
+ }
+
+ logger.Debug("URL: %s", url)
+ return url, nil
+}
+
+// OAuthReturn ...
+func OAuthReturn(ctx context.Context, code, ipAddress string) (*OAuthUser, error) {
+ // Just in case...
+ OAuthCacheInit()
+
+ conf, oauthSettings, err := getOAuth2Config()
+ if err != nil {
+ return nil, err
+ }
+
+ verifier, found := OAuthCache.Get(getCacheKey(ipAddress))
+ if !found {
+ return nil, eris.New("oauth-verifier-not-found")
+ }
+
+ // Use the authorization code that is pushed to the redirect
+ // URL. Exchange will do the handshake to retrieve the
+ // initial access token. The HTTP Client returned by
+ // conf.Client will refresh the token as necessary.
+ tok, err := conf.Exchange(ctx, code, oauth2.VerifierOption(verifier.(string)))
+ if err != nil {
+ return nil, err
+ }
+
+ // At this stage, the token is the JWT as given by the oauth server.
+ // we need to use that to get more info about this user,
+ // and then we'll create our own jwt for use later.
+
+ client := conf.Client(ctx, tok)
+ resp, err := client.Get(oauthSettings.ResourceURL)
+ if err != nil {
+ return nil, err
+ }
+
+ // nolint: errcheck, gosec
+ defer resp.Body.Close()
+ body, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return nil, err
+ }
+
+ ou := OAuthUser{
+ Token: tok.AccessToken,
+ }
+
+ // unmarshal the body into a interface
+ if err := json.Unmarshal(body, &ou.Resource); err != nil {
+ return nil, err
+ }
+
+ // Attempt to get the identifier from the resource
+ if oauthSettings.Identifier != "" {
+ ou.Identifier = ou.GetResourceField(oauthSettings.Identifier)
+ }
+
+ return &ou, nil
+}
+
+func getCacheKey(ipAddress string) string {
+ return fmt.Sprintf("oauth-%s", ipAddress)
+}
diff --git a/backend/internal/entity/auth/oauth_test.go b/backend/internal/entity/auth/oauth_test.go
new file mode 100644
index 000000000..46efaea8b
--- /dev/null
+++ b/backend/internal/entity/auth/oauth_test.go
@@ -0,0 +1,430 @@
+package auth
+
+import (
+ "context"
+ "testing"
+
+ "npm/internal/entity/setting"
+
+ cache "github.com/patrickmn/go-cache"
+ "github.com/rotisserie/eris"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGetOAuth2Config(t *testing.T) {
+ tests := []struct {
+ name string
+ mockSettings setting.OAuthSettings
+ expectedError error
+ }{
+ {
+ name: "Valid settings",
+ mockSettings: setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ ClientSecret: "valid-client-secret",
+ AuthURL: "https://auth.url",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ },
+ expectedError: nil,
+ },
+ {
+ name: "Missing ClientID",
+ mockSettings: setting.OAuthSettings{
+ ClientSecret: "valid-client-secret",
+ AuthURL: "https://auth.url",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ },
+ expectedError: eris.New("oauth-settings-incorrect"),
+ },
+ {
+ name: "Missing ClientSecret",
+ mockSettings: setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ AuthURL: "https://auth.url",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ },
+ expectedError: eris.New("oauth-settings-incorrect"),
+ },
+ {
+ name: "Missing AuthURL",
+ mockSettings: setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ ClientSecret: "valid-client-secret",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ },
+ expectedError: eris.New("oauth-settings-incorrect"),
+ },
+ {
+ name: "Missing TokenURL",
+ mockSettings: setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ ClientSecret: "valid-client-secret",
+ AuthURL: "https://auth.url",
+ Scopes: []string{"scope1", "scope2"},
+ },
+ expectedError: eris.New("oauth-settings-incorrect"),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ // Mock the GetOAuthSettings function
+ settingGetOAuthSettings = func() (setting.OAuthSettings, error) {
+ return tt.mockSettings, nil
+ }
+
+ config, settings, err := getOAuth2Config()
+
+ if tt.expectedError != nil {
+ assert.Error(t, err)
+ assert.Equal(t, tt.expectedError.Error(), err.Error())
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, config)
+ assert.NotNil(t, settings)
+ assert.Equal(t, tt.mockSettings.ClientID, config.ClientID)
+ assert.Equal(t, tt.mockSettings.ClientSecret, config.ClientSecret)
+ assert.Equal(t, tt.mockSettings.AuthURL, config.Endpoint.AuthURL)
+ assert.Equal(t, tt.mockSettings.TokenURL, config.Endpoint.TokenURL)
+ assert.Equal(t, tt.mockSettings.Scopes, config.Scopes)
+ }
+ })
+ }
+}
+
+func TestGetEmail(t *testing.T) {
+ tests := []struct {
+ name string
+ oauthUser OAuthUser
+ expected string
+ }{
+ {
+ name: "Email in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "email": "user@example.com",
+ },
+ },
+ expected: "user@example.com",
+ },
+ {
+ name: "Identifier is email",
+ oauthUser: OAuthUser{
+ Identifier: "user@example.com",
+ },
+ expected: "user@example.com",
+ },
+ {
+ name: "Identifier is not email",
+ oauthUser: OAuthUser{
+ Identifier: "user123",
+ },
+ expected: "user123@oauth",
+ },
+ {
+ name: "No email or identifier",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{},
+ },
+ expected: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ email := tt.oauthUser.GetEmail()
+ assert.Equal(t, tt.expected, email)
+ })
+ }
+}
+
+func TestGetName(t *testing.T) {
+ tests := []struct {
+ name string
+ oauthUser OAuthUser
+ expected string
+ }{
+ {
+ name: "Nickname in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "nickname": "user_nick",
+ },
+ },
+ expected: "user_nick",
+ },
+ {
+ name: "Given name in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "given_name": "User Given",
+ },
+ },
+ expected: "User Given",
+ },
+ {
+ name: "Name in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "name": "User Name",
+ },
+ },
+ expected: "User Name",
+ },
+ {
+ name: "Preferred username in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "preferred_username": "preferred_user",
+ },
+ },
+ expected: "preferred_user",
+ },
+ {
+ name: "Username in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "username": "user123",
+ },
+ },
+ expected: "user123",
+ },
+ {
+ name: "No name fields in resource, fallback to identifier",
+ oauthUser: OAuthUser{
+ Identifier: "fallback_identifier",
+ Resource: map[string]any{},
+ },
+ expected: "fallback_identifier",
+ },
+ {
+ name: "No name fields and no identifier",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{},
+ },
+ expected: "",
+ },
+ {
+ name: "All fields",
+ oauthUser: OAuthUser{
+ Identifier: "fallback_identifier",
+ Resource: map[string]any{
+ "nickname": "user_nick",
+ "given_name": "User Given",
+ "name": "User Name",
+ "preferred_username": "preferred_user",
+ "username": "user123",
+ },
+ },
+ expected: "user_nick",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ name := tt.oauthUser.GetName()
+ assert.Equal(t, tt.expected, name)
+ })
+ }
+}
+
+func TestGetID(t *testing.T) {
+ tests := []struct {
+ name string
+ oauthUser OAuthUser
+ expected string
+ }{
+ {
+ name: "Identifier is set",
+ oauthUser: OAuthUser{
+ Identifier: "user123",
+ },
+ expected: "user123",
+ },
+ {
+ name: "UID in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "uid": "uid123",
+ },
+ },
+ expected: "uid123",
+ },
+ {
+ name: "User ID in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "user_id": "user_id123",
+ },
+ },
+ expected: "user_id123",
+ },
+ {
+ name: "Username in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "username": "username123",
+ },
+ },
+ expected: "username123",
+ },
+ {
+ name: "Preferred username in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "preferred_username": "preferred_user",
+ },
+ },
+ expected: "preferred_user",
+ },
+ {
+ name: "Email in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "email": "user@example.com",
+ },
+ },
+ expected: "user@example.com",
+ },
+ {
+ name: "Mail in resource",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{
+ "mail": "mail@example.com",
+ },
+ },
+ expected: "mail@example.com",
+ },
+ {
+ name: "No identifier or resource fields",
+ oauthUser: OAuthUser{
+ Resource: map[string]any{},
+ },
+ expected: "",
+ },
+ {
+ name: "All fields",
+ oauthUser: OAuthUser{
+ Identifier: "user123",
+ Resource: map[string]any{
+ "uid": "uid123",
+ "user_id": "user_id123",
+ "username": "username123",
+ "preferred_username": "preferred_user",
+ "mail": "mail@example.com",
+ "email": "email@example.com",
+ },
+ },
+ expected: "user123",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ id := tt.oauthUser.GetID()
+ assert.Equal(t, tt.expected, id)
+ })
+ }
+}
+
+func TestOAuthLogin(t *testing.T) {
+ tests := []struct {
+ name string
+ redirectBase string
+ ipAddress string
+ expectedError error
+ }{
+ {
+ name: "Valid redirect base",
+ redirectBase: "https://redirect.base",
+ ipAddress: "127.0.0.1",
+ expectedError: nil,
+ },
+ {
+ name: "Empty redirect base",
+ redirectBase: "",
+ ipAddress: "127.0.0.1",
+ expectedError: nil,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ // Mock the GetOAuthSettings function
+ settingGetOAuthSettings = func() (setting.OAuthSettings, error) {
+ return setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ ClientSecret: "valid-client-secret",
+ AuthURL: "https://auth.url",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ }, nil
+ }
+
+ url, err := OAuthLogin(tt.redirectBase, tt.ipAddress)
+
+ if tt.expectedError != nil {
+ assert.Error(t, err)
+ assert.Equal(t, tt.expectedError.Error(), err.Error())
+ } else {
+ assert.NoError(t, err)
+ assert.NotEmpty(t, url)
+ }
+ })
+ }
+}
+
+func TestOAuthReturn(t *testing.T) {
+ var errNotFound = eris.New("oauth-verifier-not-found")
+ tests := []struct {
+ name string
+ code string
+ ipAddress string
+ expectedError error
+ }{
+ {
+ name: "Invalid code",
+ code: "invalid-code",
+ ipAddress: "127.0.0.100",
+ expectedError: errNotFound,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ // Mock the GetOAuthSettings function
+ settingGetOAuthSettings = func() (setting.OAuthSettings, error) {
+ return setting.OAuthSettings{
+ ClientID: "valid-client-id",
+ ClientSecret: "valid-client-secret",
+ AuthURL: "https://auth.url",
+ TokenURL: "https://token.url",
+ Scopes: []string{"scope1", "scope2"},
+ ResourceURL: "https://resource.url",
+ Identifier: "id",
+ }, nil
+ }
+
+ // Initialise the cache and set a verifier
+ OAuthCacheInit()
+ if tt.expectedError != errNotFound {
+ OAuthCache.Set(getCacheKey(tt.ipAddress), "valid-verifier", cache.DefaultExpiration)
+ }
+
+ ctx := context.Background()
+ user, err := OAuthReturn(ctx, tt.code, tt.ipAddress)
+
+ if tt.expectedError != nil {
+ assert.Error(t, err)
+ assert.Equal(t, tt.expectedError.Error(), err.Error())
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, user)
+ }
+ })
+ }
+}
diff --git a/backend/internal/entity/capability.go b/backend/internal/entity/capability.go
new file mode 100644
index 000000000..8b9d7f08a
--- /dev/null
+++ b/backend/internal/entity/capability.go
@@ -0,0 +1,11 @@
+package entity
+
+// Capability is the db model
+type Capability struct {
+ Name string `json:"name" gorm:"column:name;primaryKey" filter:"name,string"`
+}
+
+// TableName overrides the table name used by gorm
+func (Capability) TableName() string {
+ return "capability"
+}
diff --git a/backend/internal/entity/certificate/methods.go b/backend/internal/entity/certificate/methods.go
new file mode 100644
index 000000000..bf76976c1
--- /dev/null
+++ b/backend/internal/entity/certificate/methods.go
@@ -0,0 +1,97 @@
+package certificate
+
+import (
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/jobqueue"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByStatus will select rows that are ready for requesting
+func GetByStatus(status string) ([]Model, error) {
+ items := make([]Model, 0)
+ db := database.GetDB()
+ result := db.
+ Joins("INNER JOIN certificate_authority ON certificate_authority.id = certificate.certificate_authority_id AND certificate_authority.is_deleted = ?", 0).
+ Where("type IN ?", []string{"http", "dns"}).
+ Where("status = ?", status).
+ Where("certificate_authority_id > ?", 0).
+ Find(&items)
+ return items, result.Error
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ if expand != nil {
+ for idx := range items {
+ expandErr := items[idx].Expand(expand)
+ if expandErr != nil {
+ logger.Error("CertificatesExpansionError", expandErr)
+ }
+ }
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
+
+// AddPendingJobs is intended to be used at startup to add
+// anything pending to the JobQueue just once, based on
+// the database row status
+func AddPendingJobs() {
+ rows, err := GetByStatus(StatusReady)
+ if err != nil {
+ logger.Error("AddPendingJobsError", err)
+ return
+ }
+
+ for _, row := range rows {
+ logger.Debug("Adding RequestCertificate job: %+v", row)
+ err := jobqueue.AddJob(jobqueue.Job{
+ Name: "RequestCertificate",
+ Action: row.Request,
+ })
+ if err != nil {
+ logger.Error("AddPendingJobsError", err)
+ }
+ }
+}
diff --git a/backend/internal/entity/certificate/model.go b/backend/internal/entity/certificate/model.go
new file mode 100644
index 000000000..589814fa2
--- /dev/null
+++ b/backend/internal/entity/certificate/model.go
@@ -0,0 +1,313 @@
+package certificate
+
+import (
+ "fmt"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "npm/internal/acme"
+ "npm/internal/config"
+ "npm/internal/database"
+ "npm/internal/entity/certificateauthority"
+ "npm/internal/entity/dnsprovider"
+ "npm/internal/entity/user"
+ "npm/internal/logger"
+ "npm/internal/model"
+ "npm/internal/serverevents"
+ "npm/internal/types"
+ "npm/internal/util"
+
+ "github.com/rotisserie/eris"
+)
+
+const (
+ // TypeCustom custom cert type
+ TypeCustom = "custom"
+ // TypeHTTP http cert type
+ TypeHTTP = "http"
+ // TypeDNS dns cert type
+ TypeDNS = "dns"
+ // TypeMkcert mkcert cert type
+ TypeMkcert = "mkcert"
+
+ // StatusReady is ready for certificate to be requested
+ StatusReady = "ready"
+ // StatusRequesting is process of being requested
+ StatusRequesting = "requesting"
+ // StatusFailed is a certicifate that failed to request
+ StatusFailed = "failed"
+ // StatusProvided is a certificate provided and ready for actual use
+ StatusProvided = "provided"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Type string `json:"type" gorm:"column:type" filter:"type,string"`
+ CertificateAuthorityID types.NullableDBUint `json:"certificate_authority_id" gorm:"column:certificate_authority_id" filter:"certificate_authority_id,integer"`
+ DNSProviderID types.NullableDBUint `json:"dns_provider_id" gorm:"column:dns_provider_id" filter:"dns_provider_id,integer"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ DomainNames types.JSONB `json:"domain_names" gorm:"column:domain_names" filter:"domain_names,string"`
+ ExpiresOn int64 `json:"expires_on" gorm:"column:expires_on" filter:"expires_on,integer"`
+ Status string `json:"status" gorm:"column:status" filter:"status,string"`
+ ErrorMessage string `json:"error_message" gorm:"column:error_message" filter:"error_message,string"`
+ Meta types.JSONB `json:"-" gorm:"column:meta"`
+ IsECC bool `json:"is_ecc" gorm:"column:is_ecc" filter:"is_ecc,bool"`
+ // Expansions:
+ CertificateAuthority *certificateauthority.Model `json:"certificate_authority,omitempty" gorm:"-"`
+ DNSProvider *dnsprovider.Model `json:"dns_provider,omitempty" gorm:"-"`
+ User *user.Model `json:"user,omitempty" gorm:"-"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "certificate"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ if !m.Validate() {
+ return eris.Errorf("Certificate data is incorrect or incomplete for this type")
+ }
+
+ if !m.ValidateWildcardSupport() {
+ return eris.Errorf("Cannot use Wildcard domains with this CA")
+ }
+
+ m.setDefaultStatus()
+
+ // ensure name is trimmed of whitespace
+ m.Name = strings.TrimSpace(m.Name)
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+
+ // todo: delete from acme.sh as well
+}
+
+// Validate will make sure the data given is expected. This object is a bit complicated,
+// as there could be multiple combinations of values.
+func (m *Model) Validate() bool {
+ switch m.Type {
+ case TypeCustom:
+ // TODO: make sure meta contains required fields
+ return m.DNSProviderID.Uint == 0 && m.CertificateAuthorityID.Uint == 0
+
+ case TypeHTTP:
+ return m.DNSProviderID.Uint == 0 && m.CertificateAuthorityID.Uint > 0
+
+ case TypeDNS:
+ return m.DNSProviderID.Uint > 0 && m.CertificateAuthorityID.Uint > 0
+
+ case TypeMkcert:
+ return true
+
+ default:
+ return false
+ }
+}
+
+// ValidateWildcardSupport will ensure that the CA given supports wildcards,
+// only if the domains on this object have at least 1 wildcard
+func (m *Model) ValidateWildcardSupport() bool {
+ domains, err := m.DomainNames.AsStringArray()
+ if err != nil {
+ logger.Error("ValidateWildcardSupportError", err)
+ return false
+ }
+
+ hasWildcard := false
+ for _, domain := range domains {
+ if strings.Contains(domain, "*") {
+ hasWildcard = true
+ }
+ }
+
+ if hasWildcard {
+ // nolint: errcheck, gosec
+ m.Expand([]string{"certificate-authority", "dns-provider"})
+ if !m.CertificateAuthority.IsWildcardSupported {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (m *Model) setDefaultStatus() {
+ if m.ID == 0 {
+ // It's a new certificate
+ if m.Type == TypeCustom {
+ m.Status = StatusProvided
+ } else {
+ m.Status = StatusReady
+ }
+ }
+}
+
+// Expand will populate attached objects for the model
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ if util.SliceContainsItem(items, "certificate-authority") && m.CertificateAuthorityID.Uint > 0 {
+ var certificateAuthority certificateauthority.Model
+ certificateAuthority, err = certificateauthority.GetByID(m.CertificateAuthorityID.Uint)
+ m.CertificateAuthority = &certificateAuthority
+ }
+
+ if util.SliceContainsItem(items, "dns-provider") && m.DNSProviderID.Uint > 0 {
+ var dnsProvider dnsprovider.Model
+ dnsProvider, err = dnsprovider.GetByID(m.DNSProviderID.Uint)
+ m.DNSProvider = &dnsProvider
+ }
+
+ if util.SliceContainsItem(items, "user") && m.ID > 0 {
+ var usr user.Model
+ usr, err = user.GetByID(m.UserID)
+ m.User = &usr
+ }
+
+ return err
+}
+
+// GetCertificateLocations will return the paths on disk where the SSL
+// certs should or would be.
+// Returns: (key, fullchain, certFolder)
+func (m *Model) GetCertificateLocations() (string, string, string) {
+ if m.ID == 0 {
+ logger.Error("GetCertificateLocationsError", eris.New("GetCertificateLocations called before certificate was saved"))
+ return "", "", ""
+ }
+
+ certFolder := fmt.Sprintf("%s/certificates", config.Configuration.DataFolder)
+
+ // Generate a unique folder name for this cert
+ m1 := regexp.MustCompile(`[^A-Za-z0-9\.]`)
+
+ niceName := m1.ReplaceAllString(m.Name, "_")
+ if len(niceName) > 20 {
+ niceName = niceName[:20]
+ }
+ folderName := fmt.Sprintf("%d-%s", m.ID, niceName)
+
+ return fmt.Sprintf("%s/%s/key.pem", certFolder, folderName),
+ fmt.Sprintf("%s/%s/fullchain.pem", certFolder, folderName),
+ fmt.Sprintf("%s/%s", certFolder, folderName)
+}
+
+// Request makes a certificate request
+func (m *Model) Request() error {
+ logger.Info("Requesting certificate for: #%d %v", m.ID, m.Name)
+ serverevents.SendChange("certificates")
+
+ // nolint: errcheck, gosec
+ m.Expand([]string{"certificate-authority", "dns-provider"})
+ m.Status = StatusRequesting
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+
+ // do request
+ domains, err := m.DomainNames.AsStringArray()
+ if err != nil {
+ logger.Error("CertificateRequestError", err)
+ return err
+ }
+
+ certKeyFile, certFullchainFile, certFolder := m.GetCertificateLocations()
+
+ // ensure certFolder is created
+ // nolint: gosec
+ if err := os.MkdirAll(certFolder, os.ModePerm); err != nil {
+ logger.Error("CreateFolderError", err)
+ return err
+ }
+
+ errMsg, err := acme.RequestCert(domains, m.Type, certFullchainFile, certKeyFile, m.DNSProvider, m.CertificateAuthority, true)
+ if err != nil {
+ m.Status = StatusFailed
+ m.ErrorMessage = errMsg
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+ return nil
+ }
+
+ // If done
+ m.Status = StatusProvided
+ m.ExpiresOn = time.Now().UnixMilli()
+ if err := m.Save(); err != nil {
+ logger.Error("CertificateSaveError", err)
+ return err
+ }
+
+ serverevents.SendChange("certificates")
+ logger.Info("Request for certificate for: #%d %v was completed", m.ID, m.Name)
+ return nil
+}
+
+// GetTemplate will convert the Model to a Template
+func (m *Model) GetTemplate() Template {
+ if m.ID == 0 {
+ // No or empty certificate object, happens when the host has no cert
+ return Template{}
+ }
+
+ domainNames, _ := m.DomainNames.AsStringArray()
+
+ return Template{
+ ID: m.ID,
+ CreatedAt: fmt.Sprintf("%d", m.CreatedAt), // todo: nice date string
+ UpdatedAt: fmt.Sprintf("%d", m.UpdatedAt), // todo: nice date string
+ ExpiresOn: util.UnixMilliToNiceFormat(m.ExpiresOn),
+ Type: m.Type,
+ UserID: m.UserID,
+ CertificateAuthorityID: m.CertificateAuthorityID.Uint,
+ DNSProviderID: m.DNSProviderID.Uint,
+ Name: m.Name,
+ DomainNames: domainNames,
+ Status: m.Status,
+ IsECC: m.IsECC,
+ // These are helpers for template generation
+ IsCustom: m.Type == TypeCustom,
+ IsAcme: m.Type != TypeCustom,
+ IsProvided: m.ID > 0 && m.Status == StatusProvided,
+ Folder: m.GetFolder(),
+ }
+}
+
+// GetFolder returns the folder where these certs should exist
+func (m *Model) GetFolder() string {
+ if m.Type == TypeCustom {
+ return fmt.Sprintf("%s/custom_ssl/npm-%d", config.Configuration.DataFolder, m.ID)
+ }
+ return fmt.Sprintf("%s/npm-%d", config.Configuration.Acmesh.CertHome, m.ID)
+}
diff --git a/backend/internal/entity/certificate/template.go b/backend/internal/entity/certificate/template.go
new file mode 100644
index 000000000..2e50257fd
--- /dev/null
+++ b/backend/internal/entity/certificate/template.go
@@ -0,0 +1,22 @@
+package certificate
+
+// Template is the model given to the template parser, converted from the Model
+type Template struct {
+ ID uint
+ CreatedAt string
+ UpdatedAt string
+ ExpiresOn string
+ Type string
+ UserID uint
+ CertificateAuthorityID uint
+ DNSProviderID uint
+ Name string
+ DomainNames []string
+ Status string
+ IsECC bool
+ // These are helpers for template generation
+ IsCustom bool
+ IsAcme bool // non-custom
+ IsProvided bool
+ Folder string
+}
diff --git a/backend/internal/entity/certificateauthority/entity_test.go b/backend/internal/entity/certificateauthority/entity_test.go
new file mode 100644
index 000000000..3aa52f03d
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/entity_test.go
@@ -0,0 +1,233 @@
+package certificateauthority
+
+import (
+ "regexp"
+ "testing"
+
+ "npm/internal/errors"
+ "npm/internal/model"
+ "npm/internal/test"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ testCA *sqlmock.Rows
+ listCountRows *sqlmock.Rows
+ listRows *sqlmock.Rows
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.testCA = sqlmock.NewRows([]string{
+ "id",
+ "name",
+ "acmesh_server",
+ "ca_bundle",
+ "is_wildcard_supported",
+ "max_domains",
+ }).AddRow(
+ 10,
+ "Test CA",
+ "https://ca.internal/acme/acme/directory",
+ "/etc/ssl/certs/NginxProxyManager.crt",
+ true,
+ 2,
+ )
+
+ s.listCountRows = sqlmock.NewRows([]string{
+ "count(*)",
+ }).AddRow(
+ 2,
+ )
+
+ s.listRows = sqlmock.NewRows([]string{
+ "id",
+ "name",
+ "acmesh_server",
+ "ca_bundle",
+ "is_wildcard_supported",
+ "max_domains",
+ }).AddRow(
+ 10,
+ "Test CA",
+ "https://ca.internal/acme/acme/directory",
+ "/etc/ssl/certs/NginxProxyManager.crt",
+ true,
+ 2,
+ ).AddRow(
+ 11,
+ "Test CA 2",
+ "https://ca2.internal/acme/acme/directory",
+ "/etc/ssl/certs/NginxProxyManager.crt",
+ true,
+ 5,
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+func assertModel(t *testing.T, m Model) {
+ assert.Equal(t, uint(10), m.ID)
+ assert.Equal(t, "Test CA", m.Name)
+ assert.Equal(t, "https://ca.internal/acme/acme/directory", m.AcmeshServer)
+ assert.Equal(t, "/etc/ssl/certs/NginxProxyManager.crt", m.CABundle)
+ assert.Equal(t, 2, m.MaxDomains)
+ assert.Equal(t, true, m.IsWildcardSupported)
+ assert.Equal(t, false, m.IsReadonly)
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestGetByID() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "certificate_authority" WHERE "certificate_authority"."id" = $1 AND "certificate_authority"."is_deleted" = $2 ORDER BY "certificate_authority"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.testCA)
+
+ m, err := GetByID(10)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestList() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "certificate_authority" WHERE "certificate_authority"."name" LIKE $1 AND "certificate_authority"."is_deleted" = $2`)).
+ WithArgs("%test%", 0).
+ WillReturnRows(s.listCountRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "certificate_authority" WHERE "certificate_authority"."name" LIKE $1 AND "certificate_authority"."is_deleted" = $2 ORDER BY name asc LIMIT $3`)).
+ WithArgs("%test%", 0, 8).
+ WillReturnRows(s.listRows)
+
+ p := model.PageInfo{
+ Offset: 0,
+ Limit: 8,
+ Sort: []model.Sort{
+ {
+ Field: "name",
+ Direction: "asc",
+ },
+ },
+ }
+
+ f := []model.Filter{
+ {
+ Field: "name",
+ Modifier: "contains",
+ Value: []string{"test"},
+ },
+ }
+
+ resp, err := List(p, f)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), int64(2), resp.Total)
+ assert.Equal(s.T(), p.Offset, resp.Offset)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Sort, resp.Sort)
+ assert.Equal(s.T(), f, resp.Filter)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSave() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "certificate_authority" ("created_at","updated_at","is_deleted","name","acmesh_server","ca_bundle","max_domains","is_wildcard_supported","is_readonly") VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ "Test CA",
+ "https://ca.internal/acme/acme/directory",
+ "/etc/ssl/certs/NginxProxyManager.crt",
+ 2,
+ true,
+ false,
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("11"))
+ s.mock.ExpectCommit()
+
+ m := Model{
+ Name: "Test CA",
+ AcmeshServer: "https://ca.internal/acme/acme/directory",
+ CABundle: "/etc/ssl/certs/NginxProxyManager.crt",
+ MaxDomains: 2,
+ IsWildcardSupported: true,
+ }
+ err := m.Save()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestDelete() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`UPDATE "certificate_authority" SET "is_deleted"=$1 WHERE "certificate_authority"."id" = $2 AND "certificate_authority"."is_deleted" = $3`)).
+ WithArgs(1, 10, 0).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ m := Model{}
+ err := m.Delete()
+ assert.Equal(s.T(), "Unable to delete a new object", err.Error())
+
+ m2 := Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ }
+ err2 := m2.Delete()
+ require.NoError(s.T(), err2)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestCheck() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ m := Model{}
+ err := m.Check()
+ assert.Nil(s.T(), err)
+
+ m.CABundle = "/tmp/doesnotexist"
+ err = m.Check()
+ assert.Equal(s.T(), errors.ErrCABundleDoesNotExist.Error(), err.Error())
+}
diff --git a/backend/internal/entity/certificateauthority/methods.go b/backend/internal/entity/certificateauthority/methods.go
new file mode 100644
index 000000000..d97f12f5a
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/methods.go
@@ -0,0 +1,50 @@
+package certificateauthority
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/certificateauthority/model.go b/backend/internal/entity/certificateauthority/model.go
new file mode 100644
index 000000000..a370a59a5
--- /dev/null
+++ b/backend/internal/entity/certificateauthority/model.go
@@ -0,0 +1,66 @@
+package certificateauthority
+
+import (
+ "os"
+ "path/filepath"
+
+ "npm/internal/database"
+ "npm/internal/errors"
+ "npm/internal/model"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ AcmeshServer string `json:"acmesh_server" gorm:"column:acmesh_server" filter:"acmesh_server,string"`
+ CABundle string `json:"ca_bundle" gorm:"column:ca_bundle" filter:"ca_bundle,string"`
+ MaxDomains int `json:"max_domains" gorm:"column:max_domains" filter:"max_domains,integer"`
+ IsWildcardSupported bool `json:"is_wildcard_supported" gorm:"column:is_wildcard_supported" filter:"is_wildcard_supported,boolean"`
+ IsReadonly bool `json:"is_readonly" gorm:"column:is_readonly" filter:"is_readonly,boolean"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "certificate_authority"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark a row as deleted
+func (m *Model) Delete() error {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return eris.New("Unable to delete a new object")
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error
+}
+
+// Check will ensure the ca bundle path exists if it's set
+func (m *Model) Check() error {
+ var err error
+
+ if m.CABundle != "" {
+ if _, fileerr := os.Stat(filepath.Clean(m.CABundle)); eris.Is(fileerr, os.ErrNotExist) {
+ err = errors.ErrCABundleDoesNotExist
+ }
+ }
+
+ return err
+}
diff --git a/backend/internal/entity/dnsprovider/entity_test.go b/backend/internal/entity/dnsprovider/entity_test.go
new file mode 100644
index 000000000..40c34eabe
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/entity_test.go
@@ -0,0 +1,306 @@
+package dnsprovider
+
+import (
+ "encoding/json"
+ "regexp"
+ "testing"
+
+ "npm/internal/model"
+ "npm/internal/test"
+ "npm/internal/types"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ singleRow *sqlmock.Rows
+ listCountRows *sqlmock.Rows
+ listRows *sqlmock.Rows
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.singleRow = sqlmock.NewRows([]string{
+ "id",
+ "user_id",
+ "name",
+ "acmesh_name",
+ "dns_sleep",
+ "meta",
+ }).AddRow(
+ 10,
+ 100,
+ "Route53",
+ "dns_aws",
+ 10,
+ getMeta().Encoded,
+ )
+
+ s.listCountRows = sqlmock.NewRows([]string{
+ "count(*)",
+ }).AddRow(
+ 2,
+ )
+
+ s.listRows = sqlmock.NewRows([]string{
+ "id",
+ "user_id",
+ "name",
+ "acmesh_name",
+ "dns_sleep",
+ "meta",
+ }).AddRow(
+ 10,
+ 100,
+ "Route53",
+ "dns_aws",
+ 10,
+ getMeta().Encoded,
+ ).AddRow(
+ 11,
+ 100,
+ "ClouDNS",
+ "dns_cloudns",
+ 8,
+ types.JSONB{},
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+func getMeta() types.JSONB {
+ m := types.JSONB{}
+ m.UnmarshalJSON([]byte(`{"access_key_id": "BKINOTLNEREALYBL52W2I", "access_key": "NOTAREALKEY+9qSca7R9U6vUuetR8sh"}`))
+ return m
+}
+
+func assertModel(t *testing.T, m Model) {
+ assert.Equal(t, uint(10), m.ID, "ID not expected value")
+ assert.Equal(t, uint(100), m.UserID, "UserID not expected value")
+ assert.Equal(t, "Route53", m.Name, "Name not expected value")
+ assert.Equal(t, "dns_aws", m.AcmeshName, "AcmeshName not expected value")
+ assert.Equal(t, 10, m.DNSSleep, "DNSSleep not expected value")
+ assert.Equal(t, getMeta(), m.Meta, "Meta not expected value")
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestGetByID() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "dns_provider" WHERE "dns_provider"."id" = $1 AND "dns_provider"."is_deleted" = $2 ORDER BY "dns_provider"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByID(10)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestGetAcmeShEnvVars() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type want struct {
+ envs []string
+ err error
+ }
+
+ tests := []struct {
+ name string
+ dnsProvider Model
+ metaJSON string
+ want want
+ }{
+ {
+ name: "dns_aws",
+ dnsProvider: Model{
+ AcmeshName: "dns_aws",
+ },
+ metaJSON: `{"AWS_ACCESS_KEY_ID":"sdfsdfsdfljlbjkljlkjsdfoiwje","AWS_SECRET_ACCESS_KEY":"xxxxxxx"}`,
+ want: want{
+ envs: []string{
+ `AWS_ACCESS_KEY_ID=sdfsdfsdfljlbjkljlkjsdfoiwje`,
+ `AWS_SECRET_ACCESS_KEY=xxxxxxx`,
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns_cf",
+ dnsProvider: Model{
+ AcmeshName: "dns_cf",
+ },
+ metaJSON: `{"CF_Key":"sdfsdfsdfljlbjkljlkjsdfoiwje","CF_Email":"me@example.com","CF_Token":"dkfjghdk","CF_Account_ID":"hgbdjfg","CF_Zone_ID":"ASDASD"}`,
+ want: want{
+ envs: []string{
+ `CF_Token=dkfjghdk`,
+ `CF_Account_ID=hgbdjfg`,
+ `CF_Zone_ID=ASDASD`,
+ `CF_Key=sdfsdfsdfljlbjkljlkjsdfoiwje`,
+ `CF_Email=me@example.com`,
+ },
+ err: nil,
+ },
+ },
+ {
+ name: "dns_duckdns",
+ dnsProvider: Model{
+ AcmeshName: "dns_duckdns",
+ },
+ metaJSON: `{"DuckDNS_Token":"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"}`,
+ want: want{
+ envs: []string{
+ `DuckDNS_Token=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`,
+ },
+ err: nil,
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.T().Run(tt.name, func(t *testing.T) {
+ var meta types.JSONB
+ err := json.Unmarshal([]byte(tt.metaJSON), &meta.Decoded)
+ assert.Equal(t, nil, err)
+ tt.dnsProvider.Meta = meta
+ envs, err := tt.dnsProvider.GetAcmeShEnvVars()
+ assert.Equal(t, tt.want.err, err)
+ for _, i := range tt.want.envs {
+ assert.Contains(t, envs, i)
+ }
+ })
+ }
+}
+
+func (s *testsuite) TestList() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "dns_provider" WHERE "dns_provider"."acmesh_name" LIKE $1 AND "dns_provider"."is_deleted" = $2`)).
+ WithArgs("dns%", 0).
+ WillReturnRows(s.listCountRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "dns_provider" WHERE "dns_provider"."acmesh_name" LIKE $1 AND "dns_provider"."is_deleted" = $2 ORDER BY name asc LIMIT $3`)).
+ WithArgs("dns%", 0, 8).
+ WillReturnRows(s.listRows)
+
+ p := model.PageInfo{
+ Offset: 0,
+ Limit: 8,
+ Sort: []model.Sort{
+ {
+ Field: "name",
+ Direction: "asc",
+ },
+ },
+ }
+
+ f := []model.Filter{
+ {
+ Field: "acmesh_name",
+ Modifier: "starts",
+ Value: []string{"dns"},
+ },
+ }
+
+ resp, err := List(p, f)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), int64(2), resp.Total)
+ assert.Equal(s.T(), p.Offset, resp.Offset)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Sort, resp.Sort)
+ assert.Equal(s.T(), f, resp.Filter)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSave() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "dns_provider" ("created_at","updated_at","is_deleted","user_id","name","acmesh_name","dns_sleep","meta") VALUES ($1,$2,$3,$4,$5,$6,$7,$8) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ 100,
+ "Route53",
+ "dns_route53",
+ 10,
+ sqlmock.AnyArg(),
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("11"))
+ s.mock.ExpectCommit()
+
+ // New model, no user
+ m := Model{
+ Name: "Route53",
+ AcmeshName: "dns_route53",
+ DNSSleep: 10,
+ Meta: getMeta(),
+ }
+ err := m.Save()
+ assert.Equal(s.T(), "User ID must be specified", err.Error())
+
+ // Success
+ m.UserID = 100
+ err = m.Save()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestDelete() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`UPDATE "dns_provider" SET "is_deleted"=$1 WHERE "dns_provider"."id" = $2 AND "dns_provider"."is_deleted" = $3`)).
+ WithArgs(1, 10, 0).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ m := Model{}
+ err := m.Delete()
+ assert.Equal(s.T(), "Unable to delete a new object", err.Error())
+
+ m2 := Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ }
+ err2 := m2.Delete()
+ require.NoError(s.T(), err2)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
diff --git a/backend/internal/entity/dnsprovider/methods.go b/backend/internal/entity/dnsprovider/methods.go
new file mode 100644
index 000000000..450f15e8e
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/methods.go
@@ -0,0 +1,50 @@
+package dnsprovider
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a row by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of certificates
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/dnsprovider/model.go b/backend/internal/entity/dnsprovider/model.go
new file mode 100644
index 000000000..e128922b9
--- /dev/null
+++ b/backend/internal/entity/dnsprovider/model.go
@@ -0,0 +1,88 @@
+package dnsprovider
+
+import (
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/dnsproviders"
+ "npm/internal/logger"
+ "npm/internal/model"
+ "npm/internal/types"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ AcmeshName string `json:"acmesh_name" gorm:"column:acmesh_name" filter:"acmesh_name,string"`
+ DNSSleep int `json:"dns_sleep" gorm:"column:dns_sleep" filter:"dns_sleep,integer"`
+ Meta types.JSONB `json:"meta" gorm:"column:meta"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "dns_provider"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark a row as deleted
+func (m *Model) Delete() error {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return eris.New("Unable to delete a new object")
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error
+}
+
+// GetAcmeShEnvVars returns the env vars required for acme.sh dns cert requests
+func (m *Model) GetAcmeShEnvVars() ([]string, error) {
+ // First, fetch the provider obj with this AcmeShName
+ _, err := dnsproviders.Get(m.AcmeshName)
+ if err != nil {
+ logger.Error("GetAcmeShEnvVarsError", err)
+ return nil, err
+ }
+
+ // Convert the meta interface to envs slice for use by acme.sh
+ envs := getEnvsFromMeta(m.Meta.Decoded)
+ return envs, nil
+}
+
+func getEnvsFromMeta(meta any) []string {
+ if rec, ok := meta.(map[string]any); ok {
+ envs := make([]string, 0)
+ for key, val := range rec {
+ if f, ok := val.(string); ok {
+ envs = append(envs, fmt.Sprintf(`%s=%v`, key, f))
+ } else if f, ok := val.(int); ok {
+ envs = append(envs, fmt.Sprintf(`%s=%d`, key, f))
+ }
+ }
+ return envs
+ }
+
+ logger.Debug("getEnvsFromMeta: meta is not an map of strings")
+ return nil
+}
diff --git a/backend/internal/entity/filters.go b/backend/internal/entity/filters.go
new file mode 100644
index 000000000..906a25e54
--- /dev/null
+++ b/backend/internal/entity/filters.go
@@ -0,0 +1,19 @@
+package entity
+
+import (
+ "npm/internal/model"
+ "npm/internal/tags"
+)
+
+// GetFilterMap returns the filter map
+// _ was called `includeBaseEntity`
+func GetFilterMap(m any, _ bool) map[string]model.FilterMapValue {
+ filterMap := tags.GetFilterMap(m, "")
+
+ // TODO: this is done in GetFilterMap isn't it?
+ // if includeBaseEntity {
+ // return mergeFilterMaps(tags.GetFilterMap(model.Base{}, ""), filterMap)
+ // }
+
+ return filterMap
+}
diff --git a/backend/internal/entity/host/entity_test.go b/backend/internal/entity/host/entity_test.go
new file mode 100644
index 000000000..031ec860f
--- /dev/null
+++ b/backend/internal/entity/host/entity_test.go
@@ -0,0 +1,233 @@
+package host
+
+import (
+ "regexp"
+ "testing"
+ "time"
+
+ "npm/internal/model"
+ "npm/internal/status"
+ "npm/internal/test"
+ "npm/internal/types"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ singleRow *sqlmock.Rows
+}
+
+func makeJSONB(str string) types.JSONB {
+ m := types.JSONB{}
+ m.UnmarshalJSON([]byte(str))
+ return m
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.singleRow = sqlmock.NewRows([]string{
+ "id",
+ "user_id",
+ "type",
+ "nginx_template_id",
+ "listen_interface",
+ "domain_names",
+ "upstream_id",
+ "proxy_scheme",
+ "proxy_host",
+ "proxy_port",
+ "certificate_id",
+ "access_list_id",
+ "ssl_forced",
+ "caching_enabled",
+ "block_exploits",
+ "allow_websocket_upgrade",
+ "http2_support",
+ "hsts_enabled",
+ "hsts_subdomains",
+ "paths",
+ "advanced_config",
+ "status",
+ "error_message",
+ "is_disabled",
+ }).AddRow(
+ 10, // ID
+ 100, // UserID
+ "proxy", // Type
+ 20, // NginxTemplateID
+ "", // ListenInterface
+ makeJSONB("[\"example.com\"]"), // DomainNames
+ 0, // UpstreamID
+ "http", // ProxyScheme
+ "127.0.0.1", // ProxyHost
+ 3000, // ProxyPort
+ types.NullableDBUint{Uint: 0}, // CertificateID
+ types.NullableDBUint{Uint: 0}, // AccessListID
+ false, // SSLForced
+ false, // CachingEnabled
+ false, // BlockExploits
+ false, // AllowWebsocketUpgrade
+ false, // HTTP2Support
+ false, // HSTSEnabled
+ false, // HSTSSubdomains
+ "", // Paths
+ "", // AdvancedConfig
+ status.StatusReady, // Status
+ "", // ErrorMessage
+ false, // IsDisabled
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestGetByID() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "host" WHERE "host"."id" = $1 AND "host"."is_deleted" = $2 ORDER BY "host"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByID(10)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assert.Equal(s.T(), uint(10), m.ID)
+}
+
+func (s *testsuite) TestSave() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "host" ("created_at","updated_at","is_deleted","user_id","type","nginx_template_id","listen_interface","domain_names","upstream_id","proxy_scheme","proxy_host","proxy_port","certificate_id","access_list_id","ssl_forced","caching_enabled","block_exploits","allow_websocket_upgrade","http2_support","hsts_enabled","hsts_subdomains","paths","advanced_config","status","error_message","is_disabled") VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ 100,
+ "proxy",
+ 20,
+ "",
+ "[\"example.com\"]",
+ nil,
+ "http",
+ "127.0.0.1",
+ 3000, // proxy_port
+ nil,
+ nil,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ "",
+ "",
+ status.StatusReady,
+ "",
+ false,
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("11"))
+ s.mock.ExpectCommit()
+
+ // New model, as system
+ m := Model{
+ UserID: 100,
+ Type: "proxy",
+ NginxTemplateID: 20,
+ DomainNames: makeJSONB("[\"example.com\"]"),
+ ProxyScheme: "http",
+ ProxyHost: "127.0.0.1",
+ ProxyPort: 3000,
+ Status: status.StatusReady,
+ }
+ err := m.Save(true)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestDelete() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`UPDATE "host" SET "is_deleted"=$1 WHERE "host"."id" = $2 AND "host"."is_deleted" = $3`)).
+ WithArgs(1, 10, 0).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ m := Model{}
+ err := m.Delete()
+ assert.Equal(s.T(), "Unable to delete a new object", err.Error())
+
+ m2 := Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ }
+ err2 := m2.Delete()
+ require.NoError(s.T(), err2)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGetTemplate() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ m := Model{
+ Base: model.Base{
+ ID: 10,
+ CreatedAt: time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC).UnixMilli(),
+ UpdatedAt: time.Date(2018, 8, 12, 7, 30, 24, 16, time.UTC).UnixMilli(),
+ },
+ UserID: 100,
+ Type: "proxy",
+ NginxTemplateID: 20,
+ DomainNames: makeJSONB("[\"example.com\"]"),
+ ProxyScheme: "http",
+ ProxyHost: "127.0.0.1",
+ ProxyPort: 3000,
+ Status: status.StatusReady,
+ }
+
+ t := m.GetTemplate()
+ assert.Equal(s.T(), uint(10), t.ID)
+ assert.Equal(s.T(), "Mon, 01 Jan 2018 10:00:00 AEST", t.CreatedAt)
+ assert.Equal(s.T(), "Sun, 12 Aug 2018 17:30:24 AEST", t.UpdatedAt)
+ assert.Equal(s.T(), uint(100), t.UserID)
+ assert.Equal(s.T(), "proxy", t.Type)
+ assert.Equal(s.T(), uint(20), t.NginxTemplateID)
+ assert.Equal(s.T(), "http", t.ProxyScheme)
+ assert.Equal(s.T(), "127.0.0.1", t.ProxyHost)
+ assert.Equal(s.T(), 3000, t.ProxyPort)
+ assert.Equal(s.T(), []string{"example.com"}, t.DomainNames)
+ assert.Equal(s.T(), status.StatusReady, t.Status)
+}
diff --git a/backend/internal/entity/host/methods.go b/backend/internal/entity/host/methods.go
new file mode 100644
index 000000000..b73f6f017
--- /dev/null
+++ b/backend/internal/entity/host/methods.go
@@ -0,0 +1,94 @@
+package host
+
+import (
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a Host by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "domain_names",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ if expand != nil {
+ for idx := range items {
+ expandErr := items[idx].Expand(expand)
+ if expandErr != nil {
+ logger.Error("HostsExpansionError", expandErr)
+ }
+ }
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
+
+// GetUpstreamUseCount returns the number of hosts that are using
+// an upstream, and have not been deleted.
+func GetUpstreamUseCount(upstreamID uint) int64 {
+ db := database.GetDB()
+
+ var count int64
+ if result := db.Model(&Model{}).Where("upstream_id = ?", upstreamID).Count(&count); result.Error != nil {
+ logger.Debug("GetUpstreamUseCount Error: %v", result.Error)
+ return 0
+ }
+ return count
+}
+
+// GetCertificateUseCount returns the number of hosts that are using
+// a certificate, and have not been deleted.
+func GetCertificateUseCount(certificateID uint) int64 {
+ db := database.GetDB()
+
+ var count int64
+ if result := db.Model(&Model{}).Where("certificate_id = ?", certificateID).Count(&count); result.Error != nil {
+ logger.Debug("GetUpstreamUseCount Error: %v", result.Error)
+ return 0
+ }
+ return count
+}
+
+// AddPendingJobs is intended to be used at startup to add
+// anything pending to the JobQueue just once, based on
+// the database row status
+func AddPendingJobs() {
+ // todo
+}
diff --git a/backend/internal/entity/host/model.go b/backend/internal/entity/host/model.go
new file mode 100644
index 000000000..8907eda2c
--- /dev/null
+++ b/backend/internal/entity/host/model.go
@@ -0,0 +1,172 @@
+package host
+
+import (
+ "time"
+
+ "npm/internal/database"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/upstream"
+ "npm/internal/entity/user"
+ "npm/internal/model"
+ "npm/internal/status"
+ "npm/internal/types"
+ "npm/internal/util"
+
+ "github.com/rotisserie/eris"
+)
+
+const (
+ // ProxyHostType is self explanatory
+ ProxyHostType = "proxy"
+ // RedirectionHostType is self explanatory
+ RedirectionHostType = "redirection"
+ // DeadHostType is self explanatory
+ DeadHostType = "dead"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Type string `json:"type" gorm:"column:type" filter:"type,string"`
+ NginxTemplateID uint `json:"nginx_template_id" gorm:"column:nginx_template_id" filter:"nginx_template_id,integer"`
+ ListenInterface string `json:"listen_interface" gorm:"column:listen_interface" filter:"listen_interface,string"`
+ DomainNames types.JSONB `json:"domain_names" gorm:"column:domain_names" filter:"domain_names,string"`
+ UpstreamID types.NullableDBUint `json:"upstream_id" gorm:"column:upstream_id" filter:"upstream_id,integer"`
+ ProxyScheme string `json:"proxy_scheme" gorm:"column:proxy_scheme" filter:"proxy_scheme,string"`
+ ProxyHost string `json:"proxy_host" gorm:"column:proxy_host" filter:"proxy_host,string"`
+ ProxyPort int `json:"proxy_port" gorm:"column:proxy_port" filter:"proxy_port,integer"`
+ CertificateID types.NullableDBUint `json:"certificate_id" gorm:"column:certificate_id" filter:"certificate_id,integer"`
+ AccessListID types.NullableDBUint `json:"access_list_id" gorm:"column:access_list_id" filter:"access_list_id,integer"`
+ SSLForced bool `json:"ssl_forced" gorm:"column:ssl_forced" filter:"ssl_forced,boolean"`
+ CachingEnabled bool `json:"caching_enabled" gorm:"column:caching_enabled" filter:"caching_enabled,boolean"`
+ BlockExploits bool `json:"block_exploits" gorm:"column:block_exploits" filter:"block_exploits,boolean"`
+ AllowWebsocketUpgrade bool `json:"allow_websocket_upgrade" gorm:"column:allow_websocket_upgrade" filter:"allow_websocket_upgrade,boolean"`
+ HTTP2Support bool `json:"http2_support" gorm:"column:http2_support" filter:"http2_support,boolean"`
+ HSTSEnabled bool `json:"hsts_enabled" gorm:"column:hsts_enabled" filter:"hsts_enabled,boolean"`
+ HSTSSubdomains bool `json:"hsts_subdomains" gorm:"column:hsts_subdomains" filter:"hsts_subdomains,boolean"`
+ Paths string `json:"paths" gorm:"column:paths" filter:"paths,string"`
+ AdvancedConfig string `json:"advanced_config" gorm:"column:advanced_config" filter:"advanced_config,string"`
+ Status string `json:"status" gorm:"column:status" filter:"status,string"`
+ ErrorMessage string `json:"error_message" gorm:"column:error_message" filter:"error_message,string"`
+ IsDisabled bool `json:"is_disabled" gorm:"column:is_disabled" filter:"is_disabled,boolean"`
+ // Expansions
+ Certificate *certificate.Model `json:"certificate,omitempty" gorm:"-"`
+ NginxTemplate *nginxtemplate.Model `json:"nginx_template,omitempty" gorm:"-"`
+ User *user.Model `json:"user,omitempty" gorm:"-"`
+ Upstream *upstream.Model `json:"upstream,omitempty" gorm:"-"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "host"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save(skipConfiguration bool) error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ if !skipConfiguration {
+ // Set this host as requiring reconfiguration
+ m.Status = status.StatusReady
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark a row as deleted
+func (m *Model) Delete() error {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return eris.New("Unable to delete a new object")
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error
+}
+
+// Expand will fill in more properties
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ // Always expand the upstream
+ if m.UpstreamID.Uint > 0 {
+ var u upstream.Model
+ u, err = upstream.GetByID(m.UpstreamID.Uint)
+ m.Upstream = &u
+ }
+
+ if util.SliceContainsItem(items, "user") && m.ID > 0 {
+ var usr user.Model
+ usr, err = user.GetByID(m.UserID)
+ m.User = &usr
+ }
+
+ if util.SliceContainsItem(items, "certificate") && m.CertificateID.Uint > 0 {
+ var cert certificate.Model
+ cert, err = certificate.GetByID(m.CertificateID.Uint)
+ m.Certificate = &cert
+ }
+
+ if util.SliceContainsItem(items, "nginxtemplate") && m.NginxTemplateID > 0 {
+ var templ nginxtemplate.Model
+ templ, err = nginxtemplate.GetByID(m.NginxTemplateID)
+ m.NginxTemplate = &templ
+ }
+
+ if util.SliceContainsItem(items, "upstream") && m.UpstreamID.Uint > 0 {
+ var ups upstream.Model
+ ups, err = upstream.GetByID(m.UpstreamID.Uint)
+ m.Upstream = &ups
+ }
+
+ return err
+}
+
+// GetTemplate will convert the Model to a Template
+func (m *Model) GetTemplate() Template {
+ domainNames, _ := m.DomainNames.AsStringArray()
+
+ t := Template{
+ ID: m.ID,
+ CreatedAt: time.UnixMilli(m.CreatedAt).Format(time.RFC1123),
+ UpdatedAt: time.UnixMilli(m.UpdatedAt).Format(time.RFC1123),
+ UserID: m.UserID,
+ Type: m.Type,
+ NginxTemplateID: m.NginxTemplateID,
+ ProxyScheme: m.ProxyScheme,
+ ProxyHost: m.ProxyHost,
+ ProxyPort: m.ProxyPort,
+ ListenInterface: m.ListenInterface,
+ DomainNames: domainNames,
+ UpstreamID: m.UpstreamID.Uint,
+ CertificateID: m.CertificateID.Uint,
+ AccessListID: m.AccessListID.Uint,
+ SSLForced: m.SSLForced,
+ CachingEnabled: m.CachingEnabled,
+ BlockExploits: m.BlockExploits,
+ AllowWebsocketUpgrade: m.AllowWebsocketUpgrade,
+ HTTP2Support: m.HTTP2Support,
+ HSTSEnabled: m.HSTSEnabled,
+ HSTSSubdomains: m.HSTSSubdomains,
+ Paths: m.Paths,
+ AdvancedConfig: m.AdvancedConfig,
+ Status: m.Status,
+ ErrorMessage: m.ErrorMessage,
+ IsDisabled: m.IsDisabled,
+ }
+
+ return t
+}
diff --git a/backend/internal/entity/host/template.go b/backend/internal/entity/host/template.go
new file mode 100644
index 000000000..640ee5d1d
--- /dev/null
+++ b/backend/internal/entity/host/template.go
@@ -0,0 +1,34 @@
+package host
+
+import "npm/internal/entity/upstream"
+
+// Template is the model given to the template parser, converted from the Model
+type Template struct {
+ ID uint
+ CreatedAt string
+ UpdatedAt string
+ UserID uint
+ Type string
+ NginxTemplateID uint
+ ProxyScheme string
+ ProxyHost string
+ ProxyPort int
+ ListenInterface string
+ DomainNames []string
+ UpstreamID uint
+ CertificateID uint
+ AccessListID uint
+ SSLForced bool
+ CachingEnabled bool
+ BlockExploits bool
+ AllowWebsocketUpgrade bool
+ HTTP2Support bool
+ HSTSEnabled bool
+ HSTSSubdomains bool
+ IsDisabled bool
+ Paths string
+ AdvancedConfig string
+ Status string
+ ErrorMessage string
+ Upstream upstream.Model
+}
diff --git a/backend/internal/entity/lists.go b/backend/internal/entity/lists.go
new file mode 100644
index 000000000..186253b7f
--- /dev/null
+++ b/backend/internal/entity/lists.go
@@ -0,0 +1,47 @@
+package entity
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+
+ "gorm.io/gorm"
+)
+
+// ListResponse is the JSON response for users list
+type ListResponse struct {
+ Total int64 `json:"total"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Sort []model.Sort `json:"sort"`
+ Filter []model.Filter `json:"filter,omitempty"`
+ Items any `json:"items,omitempty"`
+}
+
+// ListQueryBuilder is used to setup queries for lists
+func ListQueryBuilder(
+ _ *model.PageInfo,
+ filters []model.Filter,
+ filterMap map[string]model.FilterMapValue,
+) *gorm.DB {
+ scopes := make([]func(*gorm.DB) *gorm.DB, 0)
+ scopes = append(scopes, ScopeFilters(filters, filterMap))
+ return database.GetDB().Scopes(scopes...)
+}
+
+// AddOrderToList is used after query above is used for counting
+// Postgres in particular doesn't like count(*) when ordering at the same time
+func AddOrderToList(
+ dbo *gorm.DB,
+ sort []model.Sort,
+ defaultSort model.Sort,
+) *gorm.DB {
+ return dbo.Scopes(ScopeOrderBy(sort, defaultSort))
+}
+
+// AddOffsetLimitToList is used after query above is used for pagination
+func AddOffsetLimitToList(
+ dbo *gorm.DB,
+ pageInfo *model.PageInfo,
+) *gorm.DB {
+ return dbo.Scopes(ScopeOffsetLimit(pageInfo))
+}
diff --git a/backend/internal/entity/nginxtemplate/methods.go b/backend/internal/entity/nginxtemplate/methods.go
new file mode 100644
index 000000000..67644aec9
--- /dev/null
+++ b/backend/internal/entity/nginxtemplate/methods.go
@@ -0,0 +1,50 @@
+package nginxtemplate
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a Host by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "created_at",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/nginxtemplate/model.go b/backend/internal/entity/nginxtemplate/model.go
new file mode 100644
index 000000000..1d04a06ad
--- /dev/null
+++ b/backend/internal/entity/nginxtemplate/model.go
@@ -0,0 +1,51 @@
+package nginxtemplate
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ Type string `json:"type" gorm:"column:type" filter:"type,string"`
+ Template string `json:"template" gorm:"column:template" filter:"template,string"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "nginx_template"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+}
diff --git a/backend/internal/entity/scopes.go b/backend/internal/entity/scopes.go
new file mode 100644
index 000000000..4c1922efe
--- /dev/null
+++ b/backend/internal/entity/scopes.go
@@ -0,0 +1,127 @@
+package entity
+
+import (
+ "fmt"
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/model"
+
+ "gorm.io/gorm"
+)
+
+// ScopeOffsetLimit ...
+func ScopeOffsetLimit(pageInfo *model.PageInfo) func(db *gorm.DB) *gorm.DB {
+ return func(db *gorm.DB) *gorm.DB {
+ if pageInfo.Offset > 0 || pageInfo.Limit > 0 {
+ return db.Limit(pageInfo.Limit).Offset(pageInfo.Offset)
+ }
+ return db
+ }
+}
+
+// ScopeOrderBy ...
+func ScopeOrderBy(sort []model.Sort, defaultSort model.Sort) func(db *gorm.DB) *gorm.DB {
+ return func(db *gorm.DB) *gorm.DB {
+ if sort != nil {
+ // Sort by items in slice
+ return db.Order(sortToOrderString(sort))
+ } else if defaultSort.Field != "" {
+ // Default to this sort
+ str := defaultSort.Field
+ if defaultSort.Direction != "" {
+ str = str + " " + defaultSort.Direction
+ }
+ return db.Order(str)
+ }
+ return db
+ }
+}
+
+// ScopeFilters ...
+func ScopeFilters(filters []model.Filter, filterMap map[string]model.FilterMapValue) func(db *gorm.DB) *gorm.DB {
+ return func(db *gorm.DB) *gorm.DB {
+ like := database.GetCaseInsensitiveLike()
+ for _, f := range filters {
+ // Lookup this filter field from the name map
+ if _, ok := filterMap[f.Field]; ok {
+ f.Field = filterMap[f.Field].Field
+ }
+
+ // For boolean fields, the value needs tweaking
+ if filterMap[f.Field].Type == "boolean" {
+ f.Value = parseBoolValue(f.Value[0])
+ }
+
+ // Quick adjustments for commonalities
+ if f.Modifier == "in" && len(f.Value) == 1 {
+ f.Modifier = "equals"
+ } else if f.Modifier == "notin" && len(f.Value) == 1 {
+ f.Modifier = "not"
+ }
+
+ switch strings.ToLower(f.Modifier) {
+ case "not":
+ db.Where(fmt.Sprintf("%s != ?", f.Field), f.Value)
+ case "min":
+ db.Where(fmt.Sprintf("%s >= ?", f.Field), f.Value)
+ case "max":
+ db.Where(fmt.Sprintf("%s <= ?", f.Field), f.Value)
+ case "greater":
+ db.Where(fmt.Sprintf("%s > ?", f.Field), f.Value)
+ case "lesser":
+ db.Where(fmt.Sprintf("%s < ?", f.Field), f.Value)
+
+ // LIKE modifiers:
+ case "contains":
+ db.Where(fmt.Sprintf("%s %s ?", f.Field, like), `%`+f.Value[0]+`%`)
+ case "starts":
+ db.Where(fmt.Sprintf("%s %s ?", f.Field, like), f.Value[0]+`%`)
+ case "ends":
+ db.Where(fmt.Sprintf("%s %s ?", f.Field, like), `%`+f.Value[0])
+
+ // Array parameter modifiers:
+ case "in":
+ db.Where(fmt.Sprintf("%s IN ?", f.Field), f.Value)
+ case "notin":
+ db.Where(fmt.Sprintf("%s NOT IN ?", f.Field), f.Value)
+
+ // Default: equals
+ default:
+ db.Where(fmt.Sprintf("%s = ?", f.Field), f.Value)
+ }
+ }
+ return db
+ }
+}
+
+func sortToOrderString(sorts []model.Sort) string {
+ strs := make([]string, 0)
+ for _, i := range sorts {
+ str := i.Field
+ if i.Direction != "" {
+ str = str + " " + i.Direction
+ }
+ strs = append(strs, str)
+ }
+ return strings.Join(strs, ", ")
+}
+
+func parseBoolValue(v string) []string {
+ bVal := "0"
+ switch strings.ToLower(v) {
+ case "yes":
+ fallthrough
+ case "true":
+ fallthrough
+ case "on":
+ fallthrough
+ case "t":
+ fallthrough
+ case "1":
+ fallthrough
+ case "y":
+ bVal = "1"
+ }
+ return []string{bVal}
+}
diff --git a/backend/internal/entity/scopes_test.go b/backend/internal/entity/scopes_test.go
new file mode 100644
index 000000000..b0c61813e
--- /dev/null
+++ b/backend/internal/entity/scopes_test.go
@@ -0,0 +1,33 @@
+package entity
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParseBoolValue(t *testing.T) {
+ tests := []struct {
+ input string
+ expected []string
+ }{
+ {"yes", []string{"1"}},
+ {"true", []string{"1"}},
+ {"on", []string{"1"}},
+ {"t", []string{"1"}},
+ {"1", []string{"1"}},
+ {"y", []string{"1"}},
+ {"no", []string{"0"}},
+ {"false", []string{"0"}},
+ {"off", []string{"0"}},
+ {"f", []string{"0"}},
+ {"0", []string{"0"}},
+ {"n", []string{"0"}},
+ {"random", []string{"0"}},
+ }
+
+ for _, test := range tests {
+ result := parseBoolValue(test.input)
+ assert.Equal(t, test.expected, result, "Input: %s", test.input)
+ }
+}
diff --git a/backend/internal/entity/setting/auth_methods.go b/backend/internal/entity/setting/auth_methods.go
new file mode 100644
index 000000000..3d54f2958
--- /dev/null
+++ b/backend/internal/entity/setting/auth_methods.go
@@ -0,0 +1,32 @@
+package setting
+
+import (
+ "encoding/json"
+ "slices"
+)
+
+// GetAuthMethods returns the authentication methods enabled for this site
+func GetAuthMethods() ([]string, error) {
+ var m Model
+ if err := m.LoadByName("auth-methods"); err != nil {
+ return nil, err
+ }
+
+ var r []string
+ if err := json.Unmarshal([]byte(m.Value.String()), &r); err != nil {
+ return nil, err
+ }
+
+ return r, nil
+}
+
+// AuthMethodEnabled checks that the auth method given is
+// enabled in the db setting
+func AuthMethodEnabled(method string) bool {
+ r, err := GetAuthMethods()
+ if err != nil {
+ return false
+ }
+
+ return slices.Contains(r, method)
+}
diff --git a/backend/internal/entity/setting/ldap.go b/backend/internal/entity/setting/ldap.go
new file mode 100644
index 000000000..d29e63ca1
--- /dev/null
+++ b/backend/internal/entity/setting/ldap.go
@@ -0,0 +1,32 @@
+package setting
+
+import (
+ "encoding/json"
+)
+
+// LDAPSettings are the settings for LDAP that come from
+// the `ldap-auth` setting value
+type LDAPSettings struct {
+ Host string `json:"host"`
+ BaseDN string `json:"base_dn"`
+ UserDN string `json:"user_dn"`
+ EmailProperty string `json:"email_property"`
+ NameProperty string `json:"name_property"`
+ SelfFilter string `json:"self_filter"`
+ AutoCreateUser bool `json:"auto_create_user"`
+}
+
+// GetLDAPSettings will return the LDAP settings
+func GetLDAPSettings() (LDAPSettings, error) {
+ var l LDAPSettings
+ var m Model
+ if err := m.LoadByName("ldap-auth"); err != nil {
+ return l, err
+ }
+
+ if err := json.Unmarshal([]byte(m.Value.String()), &l); err != nil {
+ return l, err
+ }
+
+ return l, nil
+}
diff --git a/backend/internal/entity/setting/methods.go b/backend/internal/entity/setting/methods.go
new file mode 100644
index 000000000..4fa03a0b8
--- /dev/null
+++ b/backend/internal/entity/setting/methods.go
@@ -0,0 +1,57 @@
+package setting
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a setting by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByName finds a setting by name
+func GetByName(name string) (Model, error) {
+ var m Model
+ err := m.LoadByName(name)
+ return m, err
+}
+
+// List will return a list of settings
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/setting/model.go b/backend/internal/entity/setting/model.go
new file mode 100644
index 000000000..7cf246183
--- /dev/null
+++ b/backend/internal/entity/setting/model.go
@@ -0,0 +1,50 @@
+package setting
+
+import (
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/model"
+
+ "gorm.io/datatypes"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ Description string `json:"description" gorm:"column:description" filter:"description,string"`
+ Value datatypes.JSON `json:"value" gorm:"column:value"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "setting"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// LoadByName will load from a Name
+func (m *Model) LoadByName(name string) error {
+ db := database.GetDB()
+ result := db.Where("name = ?", strings.ToLower(name)).First(&m)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ // ensure name is trimmed of whitespace
+ m.Name = strings.ToLower(strings.TrimSpace(m.Name))
+
+ db := database.GetDB()
+ if result := db.Save(m); result.Error != nil {
+ return result.Error
+ }
+
+ return nil
+}
diff --git a/backend/internal/entity/setting/oauth.go b/backend/internal/entity/setting/oauth.go
new file mode 100644
index 000000000..bfc5e0489
--- /dev/null
+++ b/backend/internal/entity/setting/oauth.go
@@ -0,0 +1,42 @@
+package setting
+
+import (
+ "encoding/json"
+)
+
+// OAuthSettings are the settings for OAuth that come from
+// the `oauth-auth` setting value
+type OAuthSettings struct {
+ AutoCreateUser bool `json:"auto_create_user"`
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ AuthURL string `json:"authorization_url"`
+ TokenURL string `json:"token_url"`
+ Identifier string `json:"identifier"`
+ LogoutURL string `json:"logout_url"`
+ Scopes []string `json:"scopes"`
+ ResourceURL string `json:"resource_url"`
+}
+
+// GetOAuthSettings will return the OAuth settings
+func GetOAuthSettings() (OAuthSettings, error) {
+ var o OAuthSettings
+ var m Model
+ if err := m.LoadByName("oauth-auth"); err != nil {
+ return o, err
+ }
+
+ if err := json.Unmarshal([]byte(m.Value.String()), &o); err != nil {
+ return o, err
+ }
+
+ o.ApplyDefaults()
+ return o, nil
+}
+
+// ApplyDefaults will ensure there are defaults set
+func (m *OAuthSettings) ApplyDefaults() {
+ if m.Identifier == "" {
+ m.Identifier = "email"
+ }
+}
diff --git a/backend/internal/entity/stream/methods.go b/backend/internal/entity/stream/methods.go
new file mode 100644
index 000000000..d23cc8ee4
--- /dev/null
+++ b/backend/internal/entity/stream/methods.go
@@ -0,0 +1,50 @@
+package stream
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a auth by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of hosts
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/stream/model.go b/backend/internal/entity/stream/model.go
new file mode 100644
index 000000000..206b91ea4
--- /dev/null
+++ b/backend/internal/entity/stream/model.go
@@ -0,0 +1,54 @@
+package stream
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+ "npm/internal/types"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ ExpiresOn types.DBDate `json:"expires_on" gorm:"column:expires_on" filter:"expires_on,integer"`
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Provider string `json:"provider" gorm:"column:provider" filter:"provider,string"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ DomainNames types.JSONB `json:"domain_names" gorm:"column:domain_names" filter:"domain_names,string"`
+ Meta types.JSONB `json:"-" gorm:"column:meta"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "stream"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+}
diff --git a/backend/internal/entity/upstream/methods.go b/backend/internal/entity/upstream/methods.go
new file mode 100644
index 000000000..45f38878d
--- /dev/null
+++ b/backend/internal/entity/upstream/methods.go
@@ -0,0 +1,56 @@
+package upstream
+
+import (
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a Upstream by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// List will return a list of Upstreams
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Expand to get servers, at a minimum
+ for idx := range items {
+ // nolint: errcheck, gosec
+ items[idx].Expand(expand)
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/upstream/model.go b/backend/internal/entity/upstream/model.go
new file mode 100644
index 000000000..c817d2720
--- /dev/null
+++ b/backend/internal/entity/upstream/model.go
@@ -0,0 +1,116 @@
+package upstream
+
+import (
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/upstreamserver"
+ "npm/internal/entity/user"
+ "npm/internal/model"
+ "npm/internal/status"
+ "npm/internal/util"
+
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+// See: http://nginx.org/en/docs/http/ngx_http_upstream_module.html#upstream
+type Model struct {
+ model.Base
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ NginxTemplateID uint `json:"nginx_template_id" gorm:"column:nginx_template_id" filter:"nginx_template_id,integer"`
+ IPHash bool `json:"ip_hash" gorm:"column:ip_hash" filter:"ip_hash,boolean"`
+ NTLM bool `json:"ntlm" gorm:"column:ntlm" filter:"ntlm,boolean"`
+ Keepalive int `json:"keepalive" gorm:"column:keepalive" filter:"keepalive,integer"`
+ KeepaliveRequests int `json:"keepalive_requests" gorm:"column:keepalive_requests" filter:"keepalive_requests,integer"`
+ KeepaliveTime string `json:"keepalive_time" gorm:"column:keepalive_time" filter:"keepalive_time,string"`
+ KeepaliveTimeout string `json:"keepalive_timeout" gorm:"column:keepalive_timeout" filter:"keepalive_timeout,string"`
+ AdvancedConfig string `json:"advanced_config" gorm:"column:advanced_config" filter:"advanced_config,string"`
+ Status string `json:"status" gorm:"column:status" filter:"status,string"`
+ ErrorMessage string `json:"error_message" gorm:"column:error_message" filter:"error_message,string"`
+ // Expansions
+ Servers []upstreamserver.Model `json:"servers" gorm:"-"`
+ NginxTemplate *nginxtemplate.Model `json:"nginx_template,omitempty" gorm:"-"`
+ User *user.Model `json:"user,omitempty" gorm:"-"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "upstream"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save(skipConfiguration bool) error {
+ if m.UserID == 0 {
+ return eris.Errorf("User ID must be specified")
+ }
+
+ // ensure name is trimmed of whitespace
+ m.Name = strings.TrimSpace(m.Name)
+
+ if !skipConfiguration {
+ // Set this upstream as requiring reconfiguration
+ m.Status = status.StatusReady
+ }
+
+ db := database.GetDB()
+ if result := db.Save(m); result.Error != nil {
+ return result.Error
+ }
+
+ // Save Servers
+ var err error
+ for idx := range m.Servers {
+ // Continue if previous iteration didn't cause an error
+ if err == nil {
+ m.Servers[idx].UpstreamID = m.ID
+ err = m.Servers[idx].Save()
+ }
+ }
+
+ return err
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+}
+
+// Expand will fill in more properties
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ // Always expand servers, if not done already
+ if len(m.Servers) == 0 {
+ m.Servers, err = upstreamserver.GetByUpstreamID(m.ID)
+ }
+
+ if util.SliceContainsItem(items, "user") && m.ID > 0 {
+ var usr user.Model
+ usr, err = user.GetByID(m.UserID)
+ m.User = &usr
+ }
+
+ if util.SliceContainsItem(items, "nginxtemplate") && m.NginxTemplateID > 0 {
+ var templ nginxtemplate.Model
+ templ, err = nginxtemplate.GetByID(m.NginxTemplateID)
+ m.NginxTemplate = &templ
+ }
+
+ return err
+}
diff --git a/backend/internal/entity/upstreamserver/methods.go b/backend/internal/entity/upstreamserver/methods.go
new file mode 100644
index 000000000..2aa207410
--- /dev/null
+++ b/backend/internal/entity/upstreamserver/methods.go
@@ -0,0 +1,59 @@
+package upstreamserver
+
+import (
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/model"
+)
+
+// GetByID finds a Upstream Server by ID
+func GetByID(id int) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ return m, err
+}
+
+// GetByUpstreamID finds all servers in the upstream
+func GetByUpstreamID(upstreamID uint) ([]Model, error) {
+ items := make([]Model, 0)
+ db := database.GetDB()
+ result := db.Where("upstream_id = ?", upstreamID).Order("server ASC").Find(&items)
+ return items, result.Error
+}
+
+// List will return a list of Upstreams
+func List(pageInfo model.PageInfo, filters []model.Filter) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "server",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
diff --git a/backend/internal/entity/upstreamserver/model.go b/backend/internal/entity/upstreamserver/model.go
new file mode 100644
index 000000000..d572bdc71
--- /dev/null
+++ b/backend/internal/entity/upstreamserver/model.go
@@ -0,0 +1,48 @@
+package upstreamserver
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ UpstreamID uint `json:"upstream_id" gorm:"column:upstream_id" filter:"upstream_id,integer"`
+ Server string `json:"server" gorm:"column:server" filter:"server,string"`
+ Weight int `json:"weight" gorm:"column:weight" filter:"weight,integer"`
+ MaxConns int `json:"max_conns" gorm:"column:max_conns" filter:"max_conns,integer"`
+ MaxFails int `json:"max_fails" gorm:"column:max_fails" filter:"max_fails,integer"`
+ FailTimeout int `json:"fail_timeout" gorm:"column:fail_timeout" filter:"fail_timeout,integer"`
+ Backup bool `json:"backup" gorm:"column:is_backup" filter:"backup,boolean"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "upstream_server"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id int) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark row as deleted
+func (m *Model) Delete() bool {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return false
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error == nil
+}
diff --git a/backend/internal/entity/user/capabilities.go b/backend/internal/entity/user/capabilities.go
new file mode 100644
index 000000000..a4c74b89a
--- /dev/null
+++ b/backend/internal/entity/user/capabilities.go
@@ -0,0 +1,40 @@
+package user
+
+const (
+ // CapabilityFullAdmin can do anything
+ CapabilityFullAdmin = "full-admin"
+ // CapabilityAccessListsView access lists view
+ CapabilityAccessListsView = "access-lists.view"
+ // CapabilityAccessListsManage access lists manage
+ CapabilityAccessListsManage = "access-lists.manage"
+ // CapabilityAuditLogView audit log view
+ CapabilityAuditLogView = "audit-log.view"
+ // CapabilityCertificatesView certificates view
+ CapabilityCertificatesView = "certificates.view"
+ // CapabilityCertificatesManage certificates manage
+ CapabilityCertificatesManage = "certificates.manage"
+ // CapabilityCertificateAuthoritiesView certificate authorities view
+ CapabilityCertificateAuthoritiesView = "certificate-authorities.view"
+ // CapabilityCertificateAuthoritiesManage certificate authorities manage
+ CapabilityCertificateAuthoritiesManage = "certificate-authorities.manage"
+ // CapabilityDNSProvidersView dns providers view
+ CapabilityDNSProvidersView = "dns-providers.view"
+ // CapabilityDNSProvidersManage dns providers manage
+ CapabilityDNSProvidersManage = "dns-providers.manage"
+ // CapabilityHostsView hosts view
+ CapabilityHostsView = "hosts.view"
+ // CapabilityHostsManage hosts manage
+ CapabilityHostsManage = "hosts.manage"
+ // CapabilityNginxTemplatesView nginx-templates view
+ CapabilityNginxTemplatesView = "nginx-templates.view"
+ // CapabilityNginxTemplatesManage nginx-templates manage
+ CapabilityNginxTemplatesManage = "nginx-templates.manage"
+ // CapabilitySettingsManage settings manage
+ CapabilitySettingsManage = "settings.manage"
+ // CapabilityStreamsView streams view
+ CapabilityStreamsView = "streams.view"
+ // CapabilityStreamsManage streams manage
+ CapabilityStreamsManage = "streams.manage"
+ // CapabilityUsersManage users manage
+ CapabilityUsersManage = "users.manage"
+)
diff --git a/backend/internal/entity/user/entity_test.go b/backend/internal/entity/user/entity_test.go
new file mode 100644
index 000000000..7045b4d08
--- /dev/null
+++ b/backend/internal/entity/user/entity_test.go
@@ -0,0 +1,454 @@
+package user
+
+import (
+ goerrors "errors"
+ "regexp"
+ "testing"
+
+ "npm/internal/errors"
+ "npm/internal/model"
+ "npm/internal/test"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ singleRow *sqlmock.Rows
+ capabilitiesRows *sqlmock.Rows
+ listCountRows *sqlmock.Rows
+ listRows *sqlmock.Rows
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.singleRow = sqlmock.NewRows([]string{
+ "id",
+ "name",
+ "email",
+ "is_disabled",
+ "is_system",
+ }).AddRow(
+ 10,
+ "John Doe",
+ "jon@example.com",
+ false,
+ false,
+ )
+
+ s.capabilitiesRows = sqlmock.NewRows([]string{
+ "user_id",
+ "capability_name",
+ }).AddRow(
+ 10,
+ "hosts.view",
+ ).AddRow(
+ 10,
+ "hosts.manage",
+ )
+
+ s.listCountRows = sqlmock.NewRows([]string{
+ "count(*)",
+ }).AddRow(
+ 2,
+ )
+
+ s.listRows = sqlmock.NewRows([]string{
+ "id",
+ "name",
+ "email",
+ "is_disabled",
+ "is_system",
+ }).AddRow(
+ 10,
+ "John Doe",
+ "jon@example.com",
+ false,
+ false,
+ ).AddRow(
+ 11,
+ "Jane Doe",
+ "jane@example.com",
+ true,
+ false,
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+func assertModel(t *testing.T, m Model) {
+ assert.Equal(t, uint(10), m.ID)
+ assert.Equal(t, "John Doe", m.Name)
+ assert.Equal(t, "jon@example.com", m.Email)
+ assert.Equal(t, false, m.IsDisabled)
+ assert.Equal(t, false, m.IsSystem)
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestGetByID() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE "user"."id" = $1 AND "user"."is_deleted" = $2 ORDER BY "user"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByID(10)
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestLoadByEmail() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE email = $1 AND is_system = $2 AND "user"."is_deleted" = $3 ORDER BY "user"."id" LIMIT $4`)).
+ WithArgs("jon@example.com", false, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ m, err := GetByEmail("jon@example.com")
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+ assertModel(s.T(), m)
+}
+
+func (s *testsuite) TestIsEnabled() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE "user"."id" = $1 AND "user"."is_deleted" = $2 ORDER BY "user"."id" LIMIT $3`)).
+ WithArgs(10, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE "user"."id" = $1 AND "user"."is_deleted" = $2 ORDER BY "user"."id" LIMIT $3`)).
+ WithArgs(999, 0, 1).
+ WillReturnError(goerrors.New("record not found"))
+
+ // user that exists
+ exists, enabled, err := IsEnabled(10)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), true, exists)
+ assert.Equal(s.T(), true, enabled)
+ // that that doesn't exist
+ exists, enabled, err = IsEnabled(999)
+ assert.Equal(s.T(), "record not found", err.Error())
+ assert.Equal(s.T(), false, exists)
+ assert.Equal(s.T(), false, enabled)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSave() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE email = $1 AND is_system = $2 AND "user"."is_deleted" = $3 ORDER BY "user"."id" LIMIT $4`)).
+ WithArgs("jon@example.com", false, 0, 1).
+ WillReturnRows(s.singleRow)
+
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "user" ("created_at","updated_at","is_deleted","name","email","is_disabled","is_system") VALUES ($1,$2,$3,$4,$5,$6,$7) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ "John Doe",
+ "sarah@example.com",
+ false,
+ false,
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("11"))
+ s.mock.ExpectCommit()
+
+ // New model, as system
+ m := Model{
+ Name: "John Doe",
+ Email: "JON@example.com", // mixed case on purpose
+ IsSystem: true,
+ }
+ err := m.Save()
+ assert.Equal(s.T(), errors.ErrSystemUserReadonly.Error(), err.Error())
+
+ // Remove system and try again. Expect error due to duplicate email
+ m.IsSystem = false
+ err = m.Save()
+ assert.Equal(s.T(), errors.ErrDuplicateEmailUser.Error(), err.Error())
+
+ // Change email and try again. Expect success
+ m.Email = "sarah@example.com"
+ err = m.Save()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestDelete() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`UPDATE "user" SET "is_deleted"=$1 WHERE "user"."id" = $2 AND "user"."is_deleted" = $3`)).
+ WithArgs(1, 10, 0).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ m := Model{}
+ err := m.Delete()
+ assert.Equal(s.T(), "Unable to delete a new object", err.Error())
+
+ m2 := Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ Name: "John Doe",
+ }
+ err2 := m2.Delete()
+ require.NoError(s.T(), err2)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGenerateGravatar() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ m := Model{Email: "jon@example.com"}
+ m.generateGravatar()
+ assert.Equal(s.T(), "https://www.gravatar.com/avatar/dc36565cc2376197358fa27ed4c47253?d=mm&r=pg&s=128", m.GravatarURL)
+}
+
+func (s *testsuite) TestDeleteAll() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`DELETE FROM "user" WHERE is_system = $1`)).
+ WithArgs(false).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`DELETE FROM "auth"`)).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+
+ err := DeleteAll()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGetCapabilities() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(10).
+ WillReturnRows(s.capabilitiesRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(999).
+ WillReturnRows(sqlmock.NewRows([]string{}))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(1000).
+ WillReturnError(goerrors.New("some other error"))
+
+ // user that exists
+ caps, err := GetCapabilities(10)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), 2, len(caps))
+ // user that doesn't exist
+ caps, err = GetCapabilities(999)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), 0, len(caps))
+ // some other error
+ caps, err = GetCapabilities(1000)
+ assert.Equal(s.T(), "some other error", err.Error())
+ assert.Equal(s.T(), 0, len(caps))
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestList() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT count(*) FROM "user" WHERE "user"."name" LIKE $1 AND "user"."is_deleted" = $2`)).
+ WithArgs("%jon%", 0).
+ WillReturnRows(s.listCountRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user" WHERE "user"."name" LIKE $1 AND "user"."is_deleted" = $2 ORDER BY name asc LIMIT $3`)).
+ WithArgs("%jon%", 0, 8).
+ WillReturnRows(s.listRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(10).
+ WillReturnRows(s.capabilitiesRows)
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(11).
+ WillReturnRows(sqlmock.NewRows([]string{}))
+
+ p := model.PageInfo{
+ Offset: 0,
+ Limit: 8,
+ Sort: []model.Sort{
+ {
+ Field: "name",
+ Direction: "asc",
+ },
+ },
+ }
+
+ f := []model.Filter{
+ {
+ Field: "name",
+ Modifier: "contains",
+ Value: []string{"jon"},
+ },
+ }
+
+ e := []string{"capabilities"}
+
+ resp, err := List(p, f, e)
+ require.NoError(s.T(), err)
+ assert.Equal(s.T(), int64(2), resp.Total)
+ assert.Equal(s.T(), p.Offset, resp.Offset)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Limit, resp.Limit)
+ assert.Equal(s.T(), p.Sort, resp.Sort)
+ assert.Equal(s.T(), f, resp.Filter)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSetPermissions() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`DELETE FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(10).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`INSERT INTO "user_has_capability" ("user_id","capability_name") VALUES ($1,$2),($3,$4)`)).
+ WithArgs(10, "hosts.view", 10, "hosts.manage").
+ WillReturnResult(sqlmock.NewResult(88, 0))
+ s.mock.ExpectCommit()
+
+ // Empty model returns error
+ m := Model{}
+ err := m.SetPermissions([]string{"hosts.view", "hosts.manage"})
+ assert.Equal(s.T(), "Cannot set permissions without first saving the User", err.Error())
+
+ // Defined user
+ m.ID = 10
+ err = m.SetPermissions([]string{"hosts.view", "hosts.manage"})
+ require.NoError(s.T(), err)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSaveCapabilities() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "capability"`)).
+ WillReturnRows(sqlmock.NewRows([]string{"name"}).
+ AddRow("full-admin").
+ AddRow("hosts.view").
+ AddRow("hosts.manage"))
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`DELETE FROM "user_has_capability" WHERE user_id = $1`)).
+ WithArgs(10).
+ WillReturnResult(sqlmock.NewResult(0, 1))
+ s.mock.ExpectCommit()
+
+ s.mock.ExpectBegin()
+ s.mock.
+ ExpectExec(regexp.QuoteMeta(`INSERT INTO "user_has_capability" ("user_id","capability_name") VALUES ($1,$2),($3,$4)`)).
+ WithArgs(10, "hosts.view", 10, "hosts.manage").
+ WillReturnResult(sqlmock.NewResult(88, 0))
+ s.mock.ExpectCommit()
+
+ // Empty model returns error
+ m := Model{}
+ err := m.SaveCapabilities()
+ assert.Equal(s.T(), "Cannot save capabilities on unsaved user", err.Error())
+
+ // Empty model returns error
+ m.ID = 10
+ err = m.SaveCapabilities()
+ assert.Equal(s.T(), "At least 1 capability required for a user", err.Error())
+
+ // With some caps
+ m.Capabilities = []string{"hosts.view", "hosts.manage"}
+ err = m.SaveCapabilities()
+ require.NoError(s.T(), err)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestSaveCapabilitiesInvalid() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "capability"`)).
+ WillReturnRows(sqlmock.NewRows([]string{"name"}).
+ AddRow("full-admin").
+ AddRow("hosts.view").
+ AddRow("hosts.manage"))
+
+ // Empty model returns error
+ m := Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ Capabilities: []string{"doesnotexist", "hosts.manage"},
+ }
+ err := m.SaveCapabilities()
+ assert.Equal(s.T(), "Capability `doesnotexist` is not valid", err.Error())
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
diff --git a/backend/internal/entity/user/methods.go b/backend/internal/entity/user/methods.go
new file mode 100644
index 000000000..1002d1855
--- /dev/null
+++ b/backend/internal/entity/user/methods.go
@@ -0,0 +1,142 @@
+package user
+
+import (
+ "fmt"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/entity/auth"
+ "npm/internal/logger"
+ "npm/internal/model"
+)
+
+// GetByID finds a user by ID
+func GetByID(id uint) (Model, error) {
+ var m Model
+ err := m.LoadByID(id)
+ m.generateGravatar()
+ return m, err
+}
+
+// GetByEmail finds a user by email
+func GetByEmail(email string) (Model, error) {
+ var m Model
+ err := m.LoadByEmail(email)
+ return m, err
+}
+
+// IsEnabled is used by middleware to ensure the user is still enabled
+// returns (userExist, isEnabled, error)
+func IsEnabled(userID uint) (bool, bool, error) {
+ var user Model
+ db := database.GetDB()
+ if result := db.First(&user, userID); result.Error != nil {
+ return false, false, result.Error
+ }
+ return true, !user.IsDisabled, nil
+}
+
+// List will return a list of users
+func List(pageInfo model.PageInfo, filters []model.Filter, expand []string) (entity.ListResponse, error) {
+ var result entity.ListResponse
+
+ defaultSort := model.Sort{
+ Field: "name",
+ Direction: "ASC",
+ }
+
+ dbo := entity.ListQueryBuilder(&pageInfo, filters, entity.GetFilterMap(Model{}, true))
+
+ // Get count of items in this search
+ var totalRows int64
+ if res := dbo.Model(&Model{}).Count(&totalRows); res.Error != nil {
+ return result, res.Error
+ }
+
+ // Get rows
+ dbo = entity.AddOffsetLimitToList(dbo, &pageInfo)
+ dbo = entity.AddOrderToList(dbo, pageInfo.Sort, defaultSort)
+ items := make([]Model, 0)
+ if res := dbo.Find(&items); res.Error != nil {
+ return result, res.Error
+ }
+
+ for idx := range items {
+ items[idx].generateGravatar()
+ }
+
+ if expand != nil {
+ for idx := range items {
+ expandErr := items[idx].Expand(expand)
+ if expandErr != nil {
+ logger.Error("UsersExpansionError", expandErr)
+ }
+ }
+ }
+
+ result = entity.ListResponse{
+ Items: items,
+ Total: totalRows,
+ Limit: pageInfo.Limit,
+ Offset: pageInfo.Offset,
+ Sort: pageInfo.GetSort(defaultSort),
+ Filter: filters,
+ }
+
+ return result, nil
+}
+
+// DeleteAll will do just that, and should only be used for testing purposes.
+func DeleteAll() error {
+ db := database.GetDB()
+ if result := db.Exec(
+ fmt.Sprintf(`DELETE FROM %s WHERE is_system = ?`, database.QuoteTableName("user")),
+ false,
+ ); result.Error != nil {
+ return result.Error
+ }
+
+ if result := db.Exec(
+ fmt.Sprintf(`DELETE FROM %s`, database.QuoteTableName("auth")),
+ ); result.Error != nil {
+ return result.Error
+ }
+
+ return nil
+}
+
+// GetCapabilities gets capabilities for a user
+func GetCapabilities(userID uint) ([]string, error) {
+ capabilities := make([]string, 0)
+ var hasCapabilities []HasCapabilityModel
+ db := database.GetDB()
+ if result := db.Where("user_id = ?", userID).Find(&hasCapabilities); result.Error != nil {
+ return nil, result.Error
+ }
+ for _, obj := range hasCapabilities {
+ capabilities = append(capabilities, obj.CapabilityName)
+ }
+ return capabilities, nil
+}
+
+// CreateFromLDAPUser will create a user from an LDAP user object
+func CreateFromLDAPUser(ldapUser *auth.LDAPUser) (Model, error) {
+ user := Model{
+ Email: ldapUser.Email,
+ Name: ldapUser.Name,
+ }
+ err := user.Save()
+ user.generateGravatar()
+ return user, err
+}
+
+// CreateFromOAuthUser will create a user from an OAuth user object
+func CreateFromOAuthUser(ou *auth.OAuthUser) (Model, error) {
+ user := Model{
+ Email: ou.GetEmail(),
+ Name: ou.GetName(),
+ }
+ err := user.Save()
+ user.generateGravatar()
+ return user, err
+}
diff --git a/backend/internal/entity/user/model.go b/backend/internal/entity/user/model.go
new file mode 100644
index 000000000..cb49457da
--- /dev/null
+++ b/backend/internal/entity/user/model.go
@@ -0,0 +1,172 @@
+package user
+
+import (
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/entity"
+ "npm/internal/entity/auth"
+ "npm/internal/errors"
+ "npm/internal/model"
+ "npm/internal/util"
+
+ "github.com/drexedam/gravatar"
+ "github.com/rotisserie/eris"
+)
+
+// Model is the model
+type Model struct {
+ model.Base
+ Name string `json:"name" gorm:"column:name" filter:"name,string"`
+ Email string `json:"email" gorm:"column:email" filter:"email,email"`
+ IsDisabled bool `json:"is_disabled" gorm:"column:is_disabled" filter:"is_disabled,boolean"`
+ IsSystem bool `json:"is_system,omitempty" gorm:"column:is_system" filter:"is_system,boolean"`
+ // Other
+ GravatarURL string `json:"gravatar_url" gorm:"-"`
+ // Expansions
+ Auth *auth.Model `json:"auth,omitempty" gorm:"-"`
+ Capabilities []string `json:"capabilities,omitempty" gorm:"-"`
+}
+
+// TableName overrides the table name used by gorm
+func (Model) TableName() string {
+ return "user"
+}
+
+// HasCapabilityModel is the model
+type HasCapabilityModel struct {
+ UserID uint `json:"user_id" gorm:"column:user_id"`
+ CapabilityName string `json:"name" gorm:"column:capability_name"`
+}
+
+// TableName overrides the table name used by gorm
+func (HasCapabilityModel) TableName() string {
+ return "user_has_capability"
+}
+
+// LoadByID will load from an ID
+func (m *Model) LoadByID(id uint) error {
+ db := database.GetDB()
+ result := db.First(&m, id)
+ return result.Error
+}
+
+// LoadByEmail will load from an Email
+func (m *Model) LoadByEmail(email string) error {
+ db := database.GetDB()
+ result := db.
+ Where("email = ?", strings.TrimSpace(strings.ToLower(email))).
+ Where("is_system = ?", false).
+ First(&m)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *Model) Save() error {
+ if m.IsSystem {
+ return errors.ErrSystemUserReadonly
+ }
+
+ // Ensure email is nice
+ m.Email = strings.TrimSpace(strings.ToLower(m.Email))
+
+ // Check if an existing user with this email exists
+ if m2, err := GetByEmail(m.Email); err == nil && m.ID != m2.ID {
+ return errors.ErrDuplicateEmailUser
+ }
+
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// Delete will mark a user as deleted
+func (m *Model) Delete() error {
+ if m.ID == 0 {
+ // Can't delete a new object
+ return eris.New("Unable to delete a new object")
+ }
+ db := database.GetDB()
+ result := db.Delete(m)
+ return result.Error
+}
+
+// SetPermissions will wipe out any existing permissions and add new ones for this user
+func (m *Model) SetPermissions(permissions []string) error {
+ if m.ID == 0 {
+ return eris.Errorf("Cannot set permissions without first saving the User")
+ }
+
+ db := database.GetDB()
+ // Wipe out previous permissions
+ if result := db.Where("user_id = ?", m.ID).Delete(&HasCapabilityModel{}); result.Error != nil {
+ return result.Error
+ }
+
+ if len(permissions) > 0 {
+ // Add new permissions
+ objs := []*HasCapabilityModel{}
+ for _, permission := range permissions {
+ objs = append(objs, &HasCapabilityModel{UserID: m.ID, CapabilityName: permission})
+ }
+ if result := db.Create(objs); result.Error != nil {
+ return result.Error
+ }
+ }
+
+ return nil
+}
+
+// Expand will fill in more properties
+func (m *Model) Expand(items []string) error {
+ var err error
+
+ if util.SliceContainsItem(items, "capabilities") && m.ID > 0 {
+ m.Capabilities, err = GetCapabilities(m.ID)
+ }
+
+ return err
+}
+
+func (m *Model) generateGravatar() {
+ m.GravatarURL = gravatar.New(m.Email).
+ Size(128).
+ Default(gravatar.MysteryMan).
+ Rating(gravatar.Pg).
+ AvatarURL()
+}
+
+// SaveCapabilities will save the capabilities of the user.
+func (m *Model) SaveCapabilities() error {
+ // m.Capabilities
+ if m.ID == 0 {
+ return eris.Errorf("Cannot save capabilities on unsaved user")
+ }
+
+ // there must be at least 1 capability
+ if len(m.Capabilities) == 0 {
+ return eris.New("At least 1 capability required for a user")
+ }
+
+ db := database.GetDB()
+ // Get a full list of capabilities
+ var capabilities []entity.Capability
+ if result := db.Find(&capabilities); result.Error != nil {
+ return result.Error
+ }
+
+ // Check that the capabilities defined exist in the db
+ for _, cap := range m.Capabilities {
+ found := false
+ for _, a := range capabilities {
+ if a.Name == cap {
+ found = true
+ }
+ }
+ if !found {
+ return eris.Errorf("Capability `%s` is not valid", cap)
+ }
+ }
+
+ return m.SetPermissions(m.Capabilities)
+}
diff --git a/backend/internal/errors/errors.go b/backend/internal/errors/errors.go
new file mode 100644
index 000000000..16be50d24
--- /dev/null
+++ b/backend/internal/errors/errors.go
@@ -0,0 +1,20 @@
+package errors
+
+import (
+ "github.com/rotisserie/eris"
+)
+
+// All error messages used by the service package to report
+// problems back to calling clients
+var (
+ ErrDatabaseUnavailable = eris.New("database-unavailable")
+ ErrDuplicateEmailUser = eris.New("email-already-exists")
+ ErrInvalidLogin = eris.New("invalid-login-credentials")
+ ErrInvalidAuthType = eris.New("invalid-auth-type")
+ ErrUserDisabled = eris.New("user-disabled")
+ ErrSystemUserReadonly = eris.New("cannot-save-system-users")
+ ErrValidationFailed = eris.New("request-failed-validation")
+ ErrCurrentPasswordInvalid = eris.New("current-password-invalid")
+ ErrCABundleDoesNotExist = eris.New("ca-bundle-does-not-exist")
+ ErrProviderNotFound = eris.New("provider_not_found")
+)
diff --git a/backend/internal/host.js b/backend/internal/host.js
deleted file mode 100644
index 58e1d09a4..000000000
--- a/backend/internal/host.js
+++ /dev/null
@@ -1,235 +0,0 @@
-const _ = require('lodash');
-const proxyHostModel = require('../models/proxy_host');
-const redirectionHostModel = require('../models/redirection_host');
-const deadHostModel = require('../models/dead_host');
-
-const internalHost = {
-
- /**
- * Makes sure that the ssl_* and hsts_* fields play nicely together.
- * ie: if there is no cert, then force_ssl is off.
- * if force_ssl is off, then hsts_enabled is definitely off.
- *
- * @param {object} data
- * @param {object} [existing_data]
- * @returns {object}
- */
- cleanSslHstsData: function (data, existing_data) {
- existing_data = existing_data === undefined ? {} : existing_data;
-
- let combined_data = _.assign({}, existing_data, data);
-
- if (!combined_data.certificate_id) {
- combined_data.ssl_forced = false;
- combined_data.http2_support = false;
- }
-
- if (!combined_data.ssl_forced) {
- combined_data.hsts_enabled = false;
- }
-
- if (!combined_data.hsts_enabled) {
- combined_data.hsts_subdomains = false;
- }
-
- return combined_data;
- },
-
- /**
- * used by the getAll functions of hosts, this removes the certificate meta if present
- *
- * @param {Array} rows
- * @returns {Array}
- */
- cleanAllRowsCertificateMeta: function (rows) {
- rows.map(function (row, idx) {
- if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
- rows[idx].certificate.meta = {};
- }
- });
-
- return rows;
- },
-
- /**
- * used by the get/update functions of hosts, this removes the certificate meta if present
- *
- * @param {Object} row
- * @returns {Object}
- */
- cleanRowCertificateMeta: function (row) {
- if (typeof row.certificate !== 'undefined' && row.certificate) {
- row.certificate.meta = {};
- }
-
- return row;
- },
-
- /**
- * This returns all the host types with any domain listed in the provided domain_names array.
- * This is used by the certificates to temporarily disable any host that is using the domain
- *
- * @param {Array} domain_names
- * @returns {Promise}
- */
- getHostsWithDomains: function (domain_names) {
- let promises = [
- proxyHostModel
- .query()
- .where('is_deleted', 0),
- redirectionHostModel
- .query()
- .where('is_deleted', 0),
- deadHostModel
- .query()
- .where('is_deleted', 0)
- ];
-
- return Promise.all(promises)
- .then((promises_results) => {
- let response_object = {
- total_count: 0,
- dead_hosts: [],
- proxy_hosts: [],
- redirection_hosts: []
- };
-
- if (promises_results[0]) {
- // Proxy Hosts
- response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
- response_object.total_count += response_object.proxy_hosts.length;
- }
-
- if (promises_results[1]) {
- // Redirection Hosts
- response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
- response_object.total_count += response_object.redirection_hosts.length;
- }
-
- if (promises_results[2]) {
- // Dead Hosts
- response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
- response_object.total_count += response_object.dead_hosts.length;
- }
-
- return response_object;
- });
- },
-
- /**
- * Internal use only, checks to see if the domain is already taken by any other record
- *
- * @param {String} hostname
- * @param {String} [ignore_type] 'proxy', 'redirection', 'dead'
- * @param {Integer} [ignore_id] Must be supplied if type was also supplied
- * @returns {Promise}
- */
- isHostnameTaken: function (hostname, ignore_type, ignore_id) {
- let promises = [
- proxyHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%'),
- redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%'),
- deadHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('domain_names', 'like', '%' + hostname + '%')
- ];
-
- return Promise.all(promises)
- .then((promises_results) => {
- let is_taken = false;
-
- if (promises_results[0]) {
- // Proxy Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- if (promises_results[1]) {
- // Redirection Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- if (promises_results[2]) {
- // Dead Hosts
- if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
- is_taken = true;
- }
- }
-
- return {
- hostname: hostname,
- is_taken: is_taken
- };
- });
- },
-
- /**
- * Private call only
- *
- * @param {String} hostname
- * @param {Array} existing_rows
- * @param {Integer} [ignore_id]
- * @returns {Boolean}
- */
- _checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
- let is_taken = false;
-
- if (existing_rows && existing_rows.length) {
- existing_rows.map(function (existing_row) {
- existing_row.domain_names.map(function (existing_hostname) {
- // Does this domain match?
- if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
- if (!ignore_id || ignore_id !== existing_row.id) {
- is_taken = true;
- }
- }
- });
- });
- }
-
- return is_taken;
- },
-
- /**
- * Private call only
- *
- * @param {Array} hosts
- * @param {Array} domain_names
- * @returns {Array}
- */
- _getHostsWithDomains: function (hosts, domain_names) {
- let response = [];
-
- if (hosts && hosts.length) {
- hosts.map(function (host) {
- let host_matches = false;
-
- domain_names.map(function (domain_name) {
- host.domain_names.map(function (host_domain_name) {
- if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
- host_matches = true;
- }
- });
- });
-
- if (host_matches) {
- response.push(host);
- }
- });
- }
-
- return response;
- }
-
-};
-
-module.exports = internalHost;
diff --git a/backend/internal/ip_ranges.js b/backend/internal/ip_ranges.js
deleted file mode 100644
index 40e63ea40..000000000
--- a/backend/internal/ip_ranges.js
+++ /dev/null
@@ -1,150 +0,0 @@
-const https = require('https');
-const fs = require('fs');
-const logger = require('../logger').ip_ranges;
-const error = require('../lib/error');
-const internalNginx = require('./nginx');
-const { Liquid } = require('liquidjs');
-
-const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
-const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
-const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
-
-const regIpV4 = /^(\d+\.?){4}\/\d+/;
-const regIpV6 = /^(([\da-fA-F]+)?:)+\/\d+/;
-
-const internalIpRanges = {
-
- interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
- interval: null,
- interval_processing: false,
- iteration_count: 0,
-
- initTimer: () => {
- logger.info('IP Ranges Renewal Timer initialized');
- internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
- },
-
- fetchUrl: (url) => {
- return new Promise((resolve, reject) => {
- logger.info('Fetching ' + url);
- return https.get(url, (res) => {
- res.setEncoding('utf8');
- let raw_data = '';
- res.on('data', (chunk) => {
- raw_data += chunk;
- });
-
- res.on('end', () => {
- resolve(raw_data);
- });
- }).on('error', (err) => {
- reject(err);
- });
- });
- },
-
- /**
- * Triggered at startup and then later by a timer, this will fetch the ip ranges from services and apply them to nginx.
- */
- fetch: () => {
- if (!internalIpRanges.interval_processing) {
- internalIpRanges.interval_processing = true;
- logger.info('Fetching IP Ranges from online services...');
-
- let ip_ranges = [];
-
- return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
- .then((cloudfront_data) => {
- let data = JSON.parse(cloudfront_data);
-
- if (data && typeof data.prefixes !== 'undefined') {
- data.prefixes.map((item) => {
- if (item.service === 'CLOUDFRONT') {
- ip_ranges.push(item.ip_prefix);
- }
- });
- }
-
- if (data && typeof data.ipv6_prefixes !== 'undefined') {
- data.ipv6_prefixes.map((item) => {
- if (item.service === 'CLOUDFRONT') {
- ip_ranges.push(item.ipv6_prefix);
- }
- });
- }
- })
- .then(() => {
- return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
- })
- .then((cloudfare_data) => {
- let items = cloudfare_data.split('\n').filter((line) => regIpV4.test(line));
- ip_ranges = [... ip_ranges, ... items];
- })
- .then(() => {
- return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
- })
- .then((cloudfare_data) => {
- let items = cloudfare_data.split('\n').filter((line) => regIpV6.test(line));
- ip_ranges = [... ip_ranges, ... items];
- })
- .then(() => {
- let clean_ip_ranges = [];
- ip_ranges.map((range) => {
- if (range) {
- clean_ip_ranges.push(range);
- }
- });
-
- return internalIpRanges.generateConfig(clean_ip_ranges)
- .then(() => {
- if (internalIpRanges.iteration_count) {
- // Reload nginx
- return internalNginx.reload();
- }
- });
- })
- .then(() => {
- internalIpRanges.interval_processing = false;
- internalIpRanges.iteration_count++;
- })
- .catch((err) => {
- logger.error(err.message);
- internalIpRanges.interval_processing = false;
- });
- }
- },
-
- /**
- * @param {Array} ip_ranges
- * @returns {Promise}
- */
- generateConfig: (ip_ranges) => {
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
- try {
- template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- renderEngine
- .parseAndRender(template, {ip_ranges: ip_ranges})
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
- resolve(true);
- })
- .catch((err) => {
- logger.warn('Could not write ' + filename + ':', err.message);
- reject(new error.ConfigurationError(err.message));
- });
- });
- }
-};
-
-module.exports = internalIpRanges;
diff --git a/backend/internal/jobqueue/main.go b/backend/internal/jobqueue/main.go
new file mode 100644
index 000000000..22793186e
--- /dev/null
+++ b/backend/internal/jobqueue/main.go
@@ -0,0 +1,49 @@
+package jobqueue
+
+import (
+ "context"
+
+ "github.com/rotisserie/eris"
+)
+
+var (
+ ctx context.Context
+ cancel context.CancelFunc
+ worker *Worker
+)
+
+// Start will intantiate the queue and start doing work
+func Start() {
+ ctx, cancel = context.WithCancel(context.Background())
+ q := &Queue{
+ jobs: make(chan Job, 50),
+ ctx: ctx,
+ cancel: cancel,
+ }
+
+ // Defines a queue worker, which will execute our queue.
+ worker = newWorker(q)
+
+ // Execute jobs in queue.
+ go worker.doWork()
+}
+
+// Shutdown will gracefully stop the queue
+func Shutdown() error {
+ if cancel == nil {
+ return eris.New("Unable to shutdown, jobqueue has not been started")
+ }
+ cancel()
+ worker = nil
+ cancel = nil
+ return nil
+}
+
+// AddJob adds a job to the queue for processing
+func AddJob(j Job) error {
+ if worker == nil {
+ return eris.New("Unable to add job, jobqueue has not been started")
+ }
+ worker.Queue.AddJob(j)
+ return nil
+}
diff --git a/backend/internal/jobqueue/main_test.go b/backend/internal/jobqueue/main_test.go
new file mode 100644
index 000000000..bebdc7d1c
--- /dev/null
+++ b/backend/internal/jobqueue/main_test.go
@@ -0,0 +1,67 @@
+package jobqueue
+
+import (
+ "context"
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/rotisserie/eris"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+type MockJob struct {
+ done chan bool
+}
+
+func (m *MockJob) Execute() {
+ time.Sleep(1 * time.Second)
+ m.done <- true
+}
+
+func TestStart(t *testing.T) {
+ Start()
+ assert.NotNil(t, ctx, "Context should not be nil after Start")
+ assert.NotNil(t, cancel, "Cancel function should not be nil after Start")
+ assert.NotNil(t, worker, "Worker should not be nil after Start")
+ Shutdown()
+}
+
+func TestShutdown(t *testing.T) {
+ Start()
+ err := Shutdown()
+ require.Nil(t, err, "Shutdown should not return an error when jobqueue is started")
+
+ // nolint: gosimple
+ select {
+ case <-ctx.Done():
+ switch ctx.Err() {
+ case context.DeadlineExceeded:
+ fmt.Println("context timeout exceeded")
+ case context.Canceled:
+ fmt.Println("context cancelled by force. whole process is complete")
+ default:
+ require.Nil(t, ctx.Err(), "Context done state has unexpected value")
+ }
+ }
+
+ require.Nil(t, cancel, "Cancel function should be nil after Shutdown")
+ require.Nil(t, worker, "Worker should be nil after Shutdown")
+
+ err = Shutdown()
+ require.NotNil(t, err, "Shutdown should return an error when jobqueue is not started")
+ require.Equal(t, eris.New("Unable to shutdown, jobqueue has not been started").Error(), err.Error())
+}
+
+func TestAddJobWithoutStart(t *testing.T) {
+ mockJob := Job{
+ Name: "mockJob",
+ Action: func() error {
+ return nil
+ },
+ }
+ err := AddJob(mockJob)
+ assert.NotNil(t, err, "AddJob should return an error when jobqueue is not started")
+ assert.Equal(t, eris.New("Unable to add job, jobqueue has not been started").Error(), err.Error())
+}
diff --git a/backend/internal/jobqueue/models.go b/backend/internal/jobqueue/models.go
new file mode 100644
index 000000000..410a788fb
--- /dev/null
+++ b/backend/internal/jobqueue/models.go
@@ -0,0 +1,52 @@
+package jobqueue
+
+import (
+ "context"
+ "sync"
+)
+
+// Queue holds name, list of jobs and context with cancel.
+type Queue struct {
+ jobs chan Job
+ ctx context.Context
+ cancel context.CancelFunc
+ mu sync.Mutex
+}
+
+// Job - holds logic to perform some operations during queue execution.
+type Job struct {
+ Name string
+ Action func() error // A function that should be executed when the job is running.
+}
+
+// AddJobs adds jobs to the queue and cancels channel.
+func (q *Queue) AddJobs(jobs []Job) {
+ var wg sync.WaitGroup
+ wg.Add(len(jobs))
+
+ for _, job := range jobs {
+ // Goroutine which adds job to the queue.
+ go func(job Job) {
+ q.AddJob(job)
+ wg.Done()
+ }(job)
+ }
+
+ go func() {
+ wg.Wait()
+ // Cancel queue channel, when all goroutines were done.
+ q.cancel()
+ }()
+}
+
+// AddJob sends job to the channel.
+func (q *Queue) AddJob(job Job) {
+ q.mu.Lock()
+ defer q.mu.Unlock()
+ q.jobs <- job
+}
+
+// Run performs job execution.
+func (j *Job) Run() error {
+ return j.Action()
+}
diff --git a/backend/internal/jobqueue/worker.go b/backend/internal/jobqueue/worker.go
new file mode 100644
index 000000000..f431b82a6
--- /dev/null
+++ b/backend/internal/jobqueue/worker.go
@@ -0,0 +1,37 @@
+package jobqueue
+
+import (
+ "fmt"
+
+ "npm/internal/logger"
+)
+
+// Worker responsible for queue serving.
+type Worker struct {
+ Queue *Queue
+}
+
+func newWorker(queue *Queue) *Worker {
+ return &Worker{
+ Queue: queue,
+ }
+}
+
+// doWork processes jobs from the queue (jobs channel).
+func (w *Worker) doWork() bool {
+ for {
+ select {
+ // if context was canceled.
+ case <-w.Queue.ctx.Done():
+ logger.Info("JobQueue worker graceful shutdown")
+ return true
+ // if job received.
+ case job := <-w.Queue.jobs:
+ err := job.Run()
+ if err != nil {
+ logger.Error(fmt.Sprintf("%sError", job.Name), err)
+ continue
+ }
+ }
+ }
+}
diff --git a/backend/internal/jwt/jwt.go b/backend/internal/jwt/jwt.go
new file mode 100644
index 000000000..acc365fa0
--- /dev/null
+++ b/backend/internal/jwt/jwt.go
@@ -0,0 +1,65 @@
+package jwt
+
+import (
+ "time"
+
+ "npm/internal/entity/user"
+ "npm/internal/logger"
+
+ "github.com/dgrijalva/jwt-go"
+ "github.com/rotisserie/eris"
+)
+
+// UserJWTClaims is the structure of a JWT for a User
+type UserJWTClaims struct {
+ UserID uint `json:"uid"`
+ Roles []string `json:"roles"`
+ jwt.StandardClaims
+}
+
+// GeneratedResponse is the response of a generated token, usually used in http response
+type GeneratedResponse struct {
+ Expires int64 `json:"expires"`
+ Token string `json:"token"`
+}
+
+// Generate will create a JWT
+func Generate(userObj *user.Model, forSSE bool) (GeneratedResponse, error) {
+ var response GeneratedResponse
+
+ key, _ := GetPrivateKey()
+ expires := time.Now().AddDate(0, 0, 1) // 1 day
+ issuer := "api"
+
+ if forSSE {
+ issuer = "sse"
+ }
+
+ // Create the Claims
+ claims := UserJWTClaims{
+ userObj.ID,
+ []string{"user"},
+ jwt.StandardClaims{
+ IssuedAt: time.Now().Unix(),
+ ExpiresAt: expires.Unix(),
+ Issuer: issuer,
+ },
+ }
+
+ // Create a new token object, specifying signing method and the claims
+ // you would like it to contain.
+ token := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
+ var err error
+ token.Signature, err = token.SignedString(key)
+ if err != nil {
+ logger.Error("JWTError", eris.Wrapf(err, "Error signing token: %v", err))
+ return response, err
+ }
+
+ response = GeneratedResponse{
+ Expires: expires.Unix(),
+ Token: token.Signature,
+ }
+
+ return response, nil
+}
diff --git a/backend/internal/jwt/keys.go b/backend/internal/jwt/keys.go
new file mode 100644
index 000000000..b082c4a4e
--- /dev/null
+++ b/backend/internal/jwt/keys.go
@@ -0,0 +1,135 @@
+package jwt
+
+import (
+ "bytes"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/asn1"
+ "encoding/pem"
+
+ "npm/internal/logger"
+
+ "github.com/rotisserie/eris"
+)
+
+var (
+ privateKey *rsa.PrivateKey
+ publicKey *rsa.PublicKey
+)
+
+// GetPrivateKey will load the key from config package and return a usable object
+// It should only load from file once per program execution
+func GetPrivateKey() (*rsa.PrivateKey, error) {
+ if privateKey == nil {
+ var blankKey *rsa.PrivateKey
+
+ if currentKeys.PrivateKey == "" {
+ return blankKey, eris.New("Could not get Private Key from configuration")
+ }
+
+ var err error
+ privateKey, err = LoadPemPrivateKey(currentKeys.PrivateKey)
+ if err != nil {
+ return blankKey, err
+ }
+ }
+
+ pub, pubErr := GetPublicKey()
+ if pubErr != nil {
+ return privateKey, pubErr
+ }
+
+ privateKey.PublicKey = *pub
+
+ return privateKey, pubErr
+}
+
+// GetPublicKey will load the key from config package and return a usable object
+// It should only load once per program execution
+func GetPublicKey() (*rsa.PublicKey, error) {
+ if publicKey == nil {
+ var blankKey *rsa.PublicKey
+
+ if currentKeys.PublicKey == "" {
+ return blankKey, eris.New("Could not get Public Key from configuration")
+ }
+
+ var err error
+ publicKey, err = LoadPemPublicKey(currentKeys.PublicKey)
+ if err != nil {
+ return blankKey, err
+ }
+ }
+
+ return publicKey, nil
+}
+
+// LoadPemPrivateKey reads a key from a PEM encoded string and returns a private key
+func LoadPemPrivateKey(content string) (*rsa.PrivateKey, error) {
+ var key *rsa.PrivateKey
+ data, _ := pem.Decode([]byte(content))
+ var err error
+ key, err = x509.ParsePKCS1PrivateKey(data.Bytes)
+ if err != nil {
+ return key, err
+ }
+ return key, nil
+}
+
+// LoadPemPublicKey reads a key from a PEM encoded string and returns a public key
+func LoadPemPublicKey(content string) (*rsa.PublicKey, error) {
+ var key *rsa.PublicKey
+ data, _ := pem.Decode([]byte(content))
+ publicKeyFileImported, err := x509.ParsePKCS1PublicKey(data.Bytes)
+ if err != nil {
+ return key, err
+ }
+
+ return publicKeyFileImported, nil
+}
+
+func generateKeys() (KeysModel, error) {
+ m := KeysModel{}
+ reader := rand.Reader
+ bitSize := 4096
+
+ key, err := rsa.GenerateKey(reader, bitSize)
+ if err != nil {
+ return m, err
+ }
+
+ privateKey := &pem.Block{
+ Type: "PRIVATE KEY",
+ Bytes: x509.MarshalPKCS1PrivateKey(key),
+ }
+
+ privateKeyBuffer := new(bytes.Buffer)
+ err = pem.Encode(privateKeyBuffer, privateKey)
+ if err != nil {
+ return m, err
+ }
+
+ asn1Bytes, err := asn1.Marshal(key.PublicKey)
+ if err != nil {
+ return m, err
+ }
+
+ publicKey := &pem.Block{
+ Type: "PUBLIC KEY",
+ Bytes: asn1Bytes,
+ }
+
+ publicKeyBuffer := new(bytes.Buffer)
+ err = pem.Encode(publicKeyBuffer, publicKey)
+ if err != nil {
+ return m, err
+ }
+
+ m.PublicKey = publicKeyBuffer.String()
+ m.PrivateKey = privateKeyBuffer.String()
+
+ logger.Info("Generated new RSA keys")
+
+ return m, nil
+}
diff --git a/backend/internal/jwt/keys_db.go b/backend/internal/jwt/keys_db.go
new file mode 100644
index 000000000..e3ecd55dc
--- /dev/null
+++ b/backend/internal/jwt/keys_db.go
@@ -0,0 +1,52 @@
+package jwt
+
+import (
+ "npm/internal/database"
+ "npm/internal/model"
+)
+
+var currentKeys KeysModel
+
+// KeysModel is the model
+type KeysModel struct {
+ model.Base
+ PublicKey string `gorm:"column:public_key"`
+ PrivateKey string `gorm:"column:private_key"`
+}
+
+// TableName overrides the table name used by gorm
+func (KeysModel) TableName() string {
+ return "jwt_keys"
+}
+
+// LoadLatest will load the latest keys
+func (m *KeysModel) LoadLatest() error {
+ db := database.GetDB()
+ result := db.Order("created_at DESC").First(&m)
+ return result.Error
+}
+
+// Save will save this model to the DB
+func (m *KeysModel) Save() error {
+ db := database.GetDB()
+ result := db.Save(m)
+ return result.Error
+}
+
+// LoadKeys will load from the database, or generate and save new ones
+func LoadKeys() error {
+ // Try to find in db
+ if err := currentKeys.LoadLatest(); err != nil {
+ // Keys probably don't exist, so we need to generate some
+ if currentKeys, err = generateKeys(); err != nil {
+ return err
+ }
+
+ // and save them
+ if err = currentKeys.Save(); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/backend/internal/jwt/suite_test.go b/backend/internal/jwt/suite_test.go
new file mode 100644
index 000000000..b17f5d677
--- /dev/null
+++ b/backend/internal/jwt/suite_test.go
@@ -0,0 +1,255 @@
+package jwt
+
+import (
+ "regexp"
+ "testing"
+
+ "npm/internal/entity/user"
+ "npm/internal/model"
+ "npm/internal/test"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/stretchr/testify/suite"
+ "go.uber.org/goleak"
+ "gorm.io/gorm"
+)
+
+// +------------+
+// | Setup |
+// +------------+
+
+type testsuite struct {
+ suite.Suite
+ mock sqlmock.Sqlmock
+ keysRow *sqlmock.Rows
+ privateKeyString string
+ publicKeyString string
+}
+
+// SetupTest is executed before each test
+func (s *testsuite) SetupTest() {
+ var err error
+ s.mock, err = test.Setup()
+ require.NoError(s.T(), err)
+ test.InitConfig(s.T())
+
+ s.publicKeyString = `-----BEGIN PUBLIC KEY-----
+MIICCgKCAgEAmjPv7Bnb02rdcMqPIK6EMt7hIYzobgmIwoEtNVP6IaVzPdTo0l5V
+prxVH9J2oeJzZPmUjgsru+1db/RqOT4QYma7FGjoVi/AZvGGbiJENOK87K0d3byM
+CZ1bridvVOWKU92EvK+uTBfG9wtEpyS4pTC6mt6jKnKJrlRA7pKbHB7jmgBDU+6C
+CniYIkXpmHKQu1Q4Mpa5oYzgNMiAnxRmps8BwkVNwzMc7mwdhSn+M+qkVJJO4Q5A
+BgHoieR6y5+P2oieX4Z8HwnzxKS79FmTo0JhSVadPxYTRKWLAcVgM7lo/d+KQy7o
+v+jFVYNiaeSdAPgfEoYdiJA7NZC4xEVsQMeZLGZp1WpcwJ7DInn0Z4ILsuFIeQGv
+nmaq+yFRgJcQT3zANlitqwdK4FWd0DN1PH17YG3oIGvxHCFEClZqeLPte1lB3T3w
+xyrw00OfSKBwZ9gAxemxQRu/8/9EnLOM4FlanK8/S4GIPgj05LvE52ZLotXqQU77
+PIXaGwufKkwOx0oHmy82wZpc4A18Po6UMcsnyfR+ykKHacuRKCHnJjCEszKR8t/i
+BqL4+sCPZfE/6FSjZi1tHGTybMGVTEL/HnJEHOZqvWbKwl6xLln38UWi7YG/FFs/
+ymcpxqEnMWQCs8ZmA1q6nPkzHkjYGmr5SyC6jaFaKJN1SgCsaP2sF80CAwEAAQ==
+-----END PUBLIC KEY-----`
+
+ s.privateKeyString = `-----BEGIN PRIVATE KEY-----
+MIIJKQIBAAKCAgEAmjPv7Bnb02rdcMqPIK6EMt7hIYzobgmIwoEtNVP6IaVzPdTo
+0l5VprxVH9J2oeJzZPmUjgsru+1db/RqOT4QYma7FGjoVi/AZvGGbiJENOK87K0d
+3byMCZ1bridvVOWKU92EvK+uTBfG9wtEpyS4pTC6mt6jKnKJrlRA7pKbHB7jmgBD
+U+6CCniYIkXpmHKQu1Q4Mpa5oYzgNMiAnxRmps8BwkVNwzMc7mwdhSn+M+qkVJJO
+4Q5ABgHoieR6y5+P2oieX4Z8HwnzxKS79FmTo0JhSVadPxYTRKWLAcVgM7lo/d+K
+Qy7ov+jFVYNiaeSdAPgfEoYdiJA7NZC4xEVsQMeZLGZp1WpcwJ7DInn0Z4ILsuFI
+eQGvnmaq+yFRgJcQT3zANlitqwdK4FWd0DN1PH17YG3oIGvxHCFEClZqeLPte1lB
+3T3wxyrw00OfSKBwZ9gAxemxQRu/8/9EnLOM4FlanK8/S4GIPgj05LvE52ZLotXq
+QU77PIXaGwufKkwOx0oHmy82wZpc4A18Po6UMcsnyfR+ykKHacuRKCHnJjCEszKR
+8t/iBqL4+sCPZfE/6FSjZi1tHGTybMGVTEL/HnJEHOZqvWbKwl6xLln38UWi7YG/
+FFs/ymcpxqEnMWQCs8ZmA1q6nPkzHkjYGmr5SyC6jaFaKJN1SgCsaP2sF80CAwEA
+AQKCAgEAmM9ZNd6WQleHZAvHdHqc1RCbhzTs7IaUOTPrygoTOR6NKjwAEOCc/mNp
+8+QL3fbbpbfSqESXrV7XFmfekCVZ9TmasOoZO7eMcjdsoV1hvArpb51KmH8NQ0Xm
+IZpAsJ/byaoerSFnl06ExDItcXlpZYH5mhmBFkJ1AAXMZt9vyJkvsWALWHRl99xz
+3prrl0AI/yrBmhhVkqtZT9VV6M89vpYrRwqIuiS/yeHoCxuHJomjGY/3jP0jIxDn
+EScTLRBNbSGv2DgcbmHdaQRaohXWwZW5dQTZRTgqFf/61eFzqS5WxiatDFDDI9KX
+I1vUvd1oXRqFKEUxpTBRDI8DGrU1RR7140FsevGo8Z4xctpPml8vxv0D/77p0PHF
+9wCW6NLQeR/v4E8tEzrH3tpx/RtS8VqpMMbTbzvAWqhcWuEAjix4W0X6poR9QEUl
+p0Xm7Ut726QMN8dxbp91C1kkLO5m0mll3dlZrqhJ0FcB3eyNNO7GOfteQcOiyYfM
+HayjA7yVPQTVy92lo77HQPXakydVCZVfUosN7zMf+7nKVh/SYzaTMF9OxcyLyNEE
++Dvjy3KbCCWFxxkqwICackjIIESFpFmmT5ZY7Hx7EImk+TEh7EpTv55/ZNlodDwh
+qE1zlfOaUnKzmsp5ET8WpXUUv4wRLQxtIrtTGvvNPHzRRQZWn0ECggEBAMkqQ8gR
+Pv3XvBNVqHtluDe5pmNg2K5/VCWDgndXSIyxM5Pr4Nr3z5mfnQ01Z/T1smlk+dVy
+oD8dsj1XB23e36IPSFtYlNjv58eo0Fcv2XlGYK9a/10IHZ/jNVRIgUdH/KTuZ154
+JRZ5LR/+gjmBqkWmG+bODlBXnp96BUw6rwaYMDecCnOKoKOhlkvGg3aJx/l8sptx
+TVhSwPCRVFff53rMsad60+Vjv9DohhWlotuujQGH1Cvw7aacFm20888IOBGGEIEZ
+u3pvr3/8L0LZDXdoR1MMcdopDek+fU8fS5M5STv2/rQauGMtCJPu615uup1IBOaV
+soYraIlqh+wT9t0CggEBAMQ8jCOJVZe11voX5F5fbYKr/fWP4P2QDXk/DlX0VQmX
+AgSljT5ePcDOFwz6Xaa7HvMq22IOssl7e3vTrS5HJd7k/e4TzhBz6TSMMuuvgN2l
+JkWRr1dsBVJXOJxQt65t34GCZEf8UikSQaWnyyRmats8H9iG0wQr1jXJxULUOd7h
+y/d1gfwujKjws1NRWFYkl1ZXeWUGpuEGR+96CgMMXoEoQr8fGEe19QBflBOQVEhf
+KbH6qmvoBARYAUpU5nszst2HeChv84a238gOMSuhHwi2oHM3zpkU0m36mDoVulc7
+heqb/v584AZWesJ5ShouVHc4HreRPHx8Xrxmv1ce/bECggEAJi5udQ/I6/dBjE3q
+z5kL8Q+8pAoitmQWfZRLdAlODN4pUv8nS4hTj+36qiIj3BuyREzVGo1KGxCw3vGg
+yFrQCXtrGWNjxRUr4fqJqLK9TUZtXXshEvBSZyGB4sBsQTJJoqhZWFXnfC99wB/X
+acDRp6ySiSk9EETBJ7XKQaC1zcOfCz8DwNBkEwq9cx53n00hdpoTcGt96bCzTDXZ
+U2B9GBK3+XjXtSdMpgMsR/mLQrULsGmufLSa9s+TdjktOXNu6OyQP2C589A0+E7O
+TZrS8oIJX5ryFR1LtaSVtinTd1sdKlOEHn0f2DsY8LMdW2wa4XVk8LsjClI84jAl
+IkrbxQKCAQANRIa5FFz8H+hECn9/PfZ6gkRuaObuXeH7U58VgqqJNnOFeuf80oRc
+V9LJJthUIIysJjak/5do9fdYXOx1l4vg8RyWDzK8fAnFasE6nCgbVEItK/dt8ri9
+Y3ZJY0+39GfLKtS65T1s13YmzBx4/o+0+PCyRBNaUdhu1JCIvy6Wei+/MGu0cDVE
+atnFBVfyoxC0Xr+va+62giU09MxefmSZWO6CW4jZuFyzRMMPO4/nQL/h76+8EfjL
+jmOv8eOPauRqA/HE0iTl89FXhlYevAsMHMTmZVyLjxPXKb1HGBb8NOMOBLQN4sWG
+yCwOoAK5mG5PjTTOdnxfck05cbz4F/lRAoIBAQCDG6yy1abapV+0Yfe58g3KVkUn
+4pNbb30CERQvwReEgF/sI0Kr3dQ4RvF4NQfRpODoakvAQfbrhg29juzT7O0Lk/kP
+tPd0xat/r7pnq1kn4rQmgzIWzPPC72BoAjDEkdyB9u9a1RSqBHcQ0st81sVSNeoZ
+OzTJqfuKN8R71VA/8ujMlnWEdfPF+SBc/01CChhLfuWiiATPhqG7wypBt5TBAMpa
+58rkFlrsTz0qWt+jyLQqJObPk/aVwXQT9QpihEp1IrDRUnU3gP0fjUTkXSVdNRvp
+CC3OVnreGq4pnTKFlElta1kgenXb+zbjwVwZntxmgP1z/Q9m/yToyLehDfk+
+-----END PRIVATE KEY-----`
+
+ // These rows need to be intantiated for each test as they are
+ // read in the db object, and their row position is not resettable
+ // between tests.
+ s.keysRow = sqlmock.NewRows([]string{
+ "public_key",
+ "private_key",
+ }).AddRow(
+ s.publicKeyString,
+ s.privateKeyString,
+ )
+}
+
+// In order for 'go test' to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run
+func TestExampleTestSuite(t *testing.T) {
+ suite.Run(t, new(testsuite))
+}
+
+// +------------+
+// | Tests |
+// +------------+
+
+func (s *testsuite) TestLoadKeys() {
+ s.T().Skip("Skipping as it's not working")
+
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Required for clean test runs
+ currentKeys = KeysModel{}
+
+ // first query, no rows
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "jwt_keys" WHERE "jwt_keys"."is_deleted" = $1`)).
+ WithArgs(0).
+ WillReturnError(gorm.ErrRecordNotFound)
+
+ // insert row
+ s.mock.ExpectBegin()
+ s.mock.ExpectQuery(regexp.QuoteMeta(`INSERT INTO "jwt_keys" ("created_at","updated_at","is_deleted","public_key","private_key") VALUES ($1,$2,$3,$4,$5) RETURNING "id"`)).
+ WithArgs(
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ 0,
+ sqlmock.AnyArg(),
+ sqlmock.AnyArg(),
+ ).
+ WillReturnRows(sqlmock.NewRows([]string{"id"}).AddRow("10"))
+ s.mock.ExpectCommit()
+
+ // last query, load existing row
+ s.mock.
+ ExpectQuery(regexp.QuoteMeta(`SELECT * FROM "jwt_keys" WHERE "jwt_keys"."is_deleted" = $1`)).
+ WithArgs(0).
+ WillReturnRows(s.keysRow)
+
+ // Load and create first
+ err := LoadKeys()
+ require.NoError(s.T(), err)
+
+ // Load something we just created
+ k := KeysModel{}
+ err = k.LoadLatest()
+ require.NoError(s.T(), err)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGetPrivateKey() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Required for clean test runs
+ privateKey = nil
+ currentKeys = KeysModel{}
+
+ // First call, expect error as currentKeys isn't loaded
+ _, err := GetPrivateKey()
+ assert.Equal(s.T(), "Could not get Private Key from configuration", err.Error())
+
+ // Set currentKeys and try again
+ currentKeys = KeysModel{
+ Base: model.Base{
+ ID: 10,
+ },
+ PrivateKey: s.privateKeyString,
+ PublicKey: s.publicKeyString,
+ }
+
+ // Get after currentKeys is set
+ _, err = GetPrivateKey()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGetPublicKey() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Required for clean test runs
+ publicKey = nil
+ currentKeys = KeysModel{}
+
+ // First call, expect error as currentKeys isn't loaded
+ _, err := GetPublicKey()
+ assert.Equal(s.T(), "Could not get Public Key from configuration", err.Error())
+
+ // Set currentKeys and try again
+ currentKeys = KeysModel{
+ Base: model.Base{
+ ID: 10,
+ },
+ PrivateKey: s.privateKeyString,
+ PublicKey: s.publicKeyString,
+ }
+
+ // Get after currentKeys is set
+ _, err = GetPublicKey()
+ require.NoError(s.T(), err)
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
+
+func (s *testsuite) TestGenerate() {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(s.T(), goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ currentKeys = KeysModel{
+ Base: model.Base{
+ ID: 10,
+ },
+ PrivateKey: s.privateKeyString,
+ PublicKey: s.publicKeyString,
+ }
+
+ usr := user.Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ }
+
+ // test 1, user key
+ r, err := Generate(&usr, false)
+ require.NoError(s.T(), err)
+ assert.Greater(s.T(), len(r.Token), 20)
+
+ // test 2, sse key
+ r, err = Generate(&usr, true)
+ require.NoError(s.T(), err)
+ assert.Greater(s.T(), len(r.Token), 20)
+
+ require.NoError(s.T(), s.mock.ExpectationsWereMet())
+}
diff --git a/backend/internal/logger/config.go b/backend/internal/logger/config.go
new file mode 100644
index 000000000..abbfbf7a2
--- /dev/null
+++ b/backend/internal/logger/config.go
@@ -0,0 +1,37 @@
+package logger
+
+// Level type
+type Level int
+
+// Log level definitions
+const (
+ // DebugLevel usually only enabled when debugging. Very verbose logging.
+ DebugLevel Level = 10
+ // InfoLevel general operational entries about what's going on inside the application.
+ InfoLevel Level = 20
+ // WarnLevel non-critical entries that deserve eyes.
+ WarnLevel Level = 30
+ // ErrorLevel used for errors that should definitely be noted.
+ ErrorLevel Level = 40
+)
+
+// Config options for the logger.
+type Config struct {
+ LogThreshold Level
+ Formatter string
+}
+
+// Interface for a logger
+type Interface interface {
+ GetLogLevel() Level
+ Debug(format string, args ...any)
+ Info(format string, args ...any)
+ Warn(format string, args ...any)
+ Error(errorClass string, err error, args ...any)
+ Errorf(errorClass, format string, err error, args ...any)
+}
+
+// ConfigurableLogger is an interface for a logger that can be configured
+type ConfigurableLogger interface {
+ Configure(c *Config) error
+}
diff --git a/backend/internal/logger/logger.go b/backend/internal/logger/logger.go
new file mode 100644
index 000000000..56be4b71c
--- /dev/null
+++ b/backend/internal/logger/logger.go
@@ -0,0 +1,223 @@
+package logger
+
+import (
+ "encoding/json"
+ "fmt"
+ stdlog "log"
+ "os"
+ "runtime/debug"
+ "sync"
+ "time"
+
+ "github.com/fatih/color"
+ "github.com/rotisserie/eris"
+)
+
+var colorReset, colorGray, colorYellow, colorBlue, colorRed, colorMagenta, colorBlack, colorWhite *color.Color
+
+// Log message structure.
+type Log struct {
+ Timestamp string `json:"timestamp"`
+ Level string `json:"level"`
+ Message string `json:"message"`
+ Pid int `json:"pid"`
+ Summary string `json:"summary,omitempty"`
+ Caller string `json:"caller,omitempty"`
+ StackTrace []string `json:"stack_trace,omitempty"`
+}
+
+// Logger instance
+type Logger struct {
+ Config
+ mux sync.Mutex
+}
+
+// global logging configuration.
+var logger = NewLogger()
+
+// NewLogger creates a new logger instance
+func NewLogger() *Logger {
+ color.NoColor = false
+ colorReset = color.New(color.Reset)
+ colorGray = color.New(color.FgWhite)
+ colorYellow = color.New(color.Bold, color.FgYellow)
+ colorBlue = color.New(color.Bold, color.FgBlue)
+ colorRed = color.New(color.Bold, color.FgRed)
+ colorMagenta = color.New(color.Bold, color.FgMagenta)
+ colorBlack = color.New(color.Bold, color.FgBlack)
+ colorWhite = color.New(color.Bold, color.FgWhite)
+
+ return &Logger{
+ Config: NewConfig(),
+ }
+}
+
+// NewConfig returns the default config
+func NewConfig() Config {
+ return Config{
+ LogThreshold: InfoLevel,
+ Formatter: "json",
+ }
+}
+
+// Configure logger and will return error if missing required fields.
+func Configure(c *Config) error {
+ return logger.Configure(c)
+}
+
+// GetLogLevel currently configured
+func GetLogLevel() Level {
+ return logger.GetLogLevel()
+}
+
+// Debug logs if the log level is set to DebugLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Debug(format string, args ...any) {
+ logger.Debug(format, args...)
+}
+
+// Info logs if the log level is set to InfoLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Info(format string, args ...any) {
+ logger.Info(format, args...)
+}
+
+// Warn logs if the log level is set to WarnLevel or below. Arguments are handled in the manner of fmt.Printf.
+func Warn(format string, args ...any) {
+ logger.Warn(format, args...)
+}
+
+// Error logs error given if the log level is set to ErrorLevel or below. Arguments are not logged.
+// Attempts to log to bugsang.
+func Error(errorClass string, err error) {
+ logger.Error(errorClass, err)
+}
+
+// Get returns the logger
+func Get() *Logger {
+ return logger
+}
+
+// Configure logger and will return error if missing required fields.
+func (l *Logger) Configure(c *Config) error {
+ // ensure updates to the config are atomic
+ l.mux.Lock()
+ defer l.mux.Unlock()
+
+ if c == nil {
+ return eris.Errorf("a non nil Config is mandatory")
+ }
+
+ if err := c.LogThreshold.validate(); err != nil {
+ return err
+ }
+
+ l.LogThreshold = c.LogThreshold
+ l.Formatter = c.Formatter
+
+ stdlog.SetFlags(0) // this removes timestamp prefixes from logs
+ return nil
+}
+
+// validate the log level is in the accepted list.
+func (l Level) validate() error {
+ switch l {
+ case DebugLevel, InfoLevel, WarnLevel, ErrorLevel:
+ return nil
+ default:
+ return eris.Errorf("invalid \"Level\" %d", l)
+ }
+}
+
+var logLevels = map[Level]string{
+ DebugLevel: "DEBUG",
+ InfoLevel: "INFO",
+ WarnLevel: "WARN",
+ ErrorLevel: "ERROR",
+}
+
+func (l *Logger) logLevel(logLevel Level, format string, args ...any) {
+ if logLevel < l.LogThreshold {
+ return
+ }
+
+ errorClass := ""
+ if logLevel == ErrorLevel {
+ // First arg is the errorClass
+ errorClass = args[0].(string)
+ if len(args) > 1 {
+ args = args[1:]
+ } else {
+ args = []any{}
+ }
+ }
+
+ stringMessage := fmt.Sprintf(format, args...)
+
+ if l.Formatter == "json" {
+ // JSON Log Format
+ jsonLog, _ := json.Marshal(
+ Log{
+ Timestamp: time.Now().Format(time.RFC3339Nano),
+ Level: logLevels[logLevel],
+ Message: stringMessage,
+ Pid: os.Getpid(),
+ },
+ )
+
+ stdlog.Println(string(jsonLog))
+ } else {
+ // Nice Log Format
+ var colorLevel *color.Color
+ switch logLevel {
+ case DebugLevel:
+ colorLevel = colorMagenta
+ case InfoLevel:
+ colorLevel = colorBlue
+ case WarnLevel:
+ colorLevel = colorYellow
+ case ErrorLevel:
+ colorLevel = colorRed
+ stringMessage = fmt.Sprintf("%s: %s", errorClass, stringMessage)
+ }
+
+ t := time.Now()
+ stdlog.Println(
+ colorBlack.Sprint("["),
+ colorWhite.Sprint(t.Format("2006-01-02 15:04:05")),
+ colorBlack.Sprint("] "),
+ colorLevel.Sprintf("%-8v", logLevels[logLevel]),
+ colorGray.Sprint(stringMessage),
+ colorReset.Sprint(""),
+ )
+
+ if logLevel == ErrorLevel && l.LogThreshold == DebugLevel {
+ // Print a stack trace too
+ debug.PrintStack()
+ }
+ }
+}
+
+// GetLogLevel currently configured
+func (l *Logger) GetLogLevel() Level {
+ return l.LogThreshold
+}
+
+// Debug logs if the log level is set to DebugLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Debug(format string, args ...any) {
+ l.logLevel(DebugLevel, format, args...)
+}
+
+// Info logs if the log level is set to InfoLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Info(format string, args ...any) {
+ l.logLevel(InfoLevel, format, args...)
+}
+
+// Warn logs if the log level is set to WarnLevel or below. Arguments are handled in the manner of fmt.Printf.
+func (l *Logger) Warn(format string, args ...any) {
+ l.logLevel(WarnLevel, format, args...)
+}
+
+// Error logs error given if the log level is set to ErrorLevel or below. Arguments are not logged.
+// Attempts to log to bugsang.
+func (l *Logger) Error(errorClass string, err error) {
+ l.logLevel(ErrorLevel, err.Error(), errorClass)
+}
diff --git a/backend/internal/logger/logger_test.go b/backend/internal/logger/logger_test.go
new file mode 100644
index 000000000..9fb765292
--- /dev/null
+++ b/backend/internal/logger/logger_test.go
@@ -0,0 +1,204 @@
+package logger
+
+import (
+ "bytes"
+ "io"
+ "log"
+ "os"
+ "testing"
+
+ "github.com/rotisserie/eris"
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetLogLevel(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ assert.Equal(t, InfoLevel, GetLogLevel())
+}
+
+func TestThreshold(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Debug("this should not display")
+ assert.Empty(t, buf.String())
+
+ Info("this should display")
+ assert.NotEmpty(t, buf.String())
+
+ Error("ErrorClass", eris.New("this should display"))
+ assert.NotEmpty(t, buf.String())
+}
+
+func TestDebug(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: DebugLevel,
+ Formatter: "json",
+ }))
+
+ Debug("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "DEBUG")
+ assert.Contains(t, buf.String(), "This is a test message")
+ Get()
+}
+
+func TestInfo(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Info("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "INFO")
+ assert.Contains(t, buf.String(), "This is a test message")
+}
+
+func TestWarn(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: InfoLevel,
+ }))
+
+ Warn("This is a %s message", "test")
+ assert.Contains(t, buf.String(), "WARN")
+ assert.Contains(t, buf.String(), "This is a test message")
+}
+
+func TestError(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ buf := new(bytes.Buffer)
+ log.SetOutput(buf)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ assert.NoError(t, Configure(&Config{
+ LogThreshold: ErrorLevel,
+ }))
+
+ Error("TestErrorClass", eris.Errorf("this is a %s error", "test"))
+ assert.Contains(t, buf.String(), "ERROR")
+ assert.Contains(t, buf.String(), "this is a test error")
+}
+
+func TestConfigure(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type args struct {
+ c *Config
+ }
+ tests := []struct {
+ name string
+ args args
+ wantErr bool
+ }{
+ {
+ name: "configure",
+ args: args{
+ &Config{
+ LogThreshold: InfoLevel,
+ },
+ },
+ wantErr: false,
+ },
+ {
+ name: "configure json",
+ args: args{
+ &Config{
+ LogThreshold: InfoLevel,
+ Formatter: "json",
+ },
+ },
+ wantErr: false,
+ },
+ {
+ name: "invalid log level",
+ args: args{
+ &Config{},
+ },
+ wantErr: true,
+ },
+ {
+ name: "invalid config struct",
+ args: args{
+ nil,
+ },
+ wantErr: true,
+ },
+ }
+ for _, tt := range tests {
+ tt := tt
+ t.Run(tt.name, func(t *testing.T) {
+ if err := Configure(tt.args.c); (err != nil) != tt.wantErr {
+ t.Errorf("Configure() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
+
+func BenchmarkLogLevelBelowThreshold(b *testing.B) {
+ l := NewLogger()
+
+ log.SetOutput(io.Discard)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ for i := 0; i < b.N; i++ {
+ l.logLevel(DebugLevel, "benchmark %d", i)
+ }
+}
+
+func BenchmarkLogLevelAboveThreshold(b *testing.B) {
+ l := NewLogger()
+
+ log.SetOutput(io.Discard)
+ defer func() {
+ log.SetOutput(os.Stderr)
+ }()
+
+ for i := 0; i < b.N; i++ {
+ l.logLevel(InfoLevel, "benchmark %d", i)
+ }
+}
diff --git a/backend/internal/model/cloudfrontranges.go b/backend/internal/model/cloudfrontranges.go
new file mode 100644
index 000000000..bdb087742
--- /dev/null
+++ b/backend/internal/model/cloudfrontranges.go
@@ -0,0 +1,17 @@
+package model
+
+// CloudfrontIPRangePrefix is used within config for cloudfront
+type CloudfrontIPRangeV4Prefix struct {
+ Value string `json:"ip_prefix"`
+}
+
+// CloudfrontIPRangeV6Prefix is used within config for cloudfront
+type CloudfrontIPRangeV6Prefix struct {
+ Value string `json:"ipv6_prefix"`
+}
+
+// CloudfrontIPRanges is the main config for cloudfront
+type CloudfrontIPRanges struct {
+ IPV4Prefixes []CloudfrontIPRangeV4Prefix `json:"prefixes"`
+ IPV6Prefixes []CloudfrontIPRangeV6Prefix `json:"ipv6_prefixes"`
+}
diff --git a/backend/internal/model/filter.go b/backend/internal/model/filter.go
new file mode 100644
index 000000000..2cdf560c6
--- /dev/null
+++ b/backend/internal/model/filter.go
@@ -0,0 +1,16 @@
+package model
+
+// Filter is the structure of a field/modifier/value item
+type Filter struct {
+ Field string `json:"field"`
+ Modifier string `json:"modifier"`
+ Value []string `json:"value"`
+}
+
+// FilterMapValue is the structure of a filter map value
+type FilterMapValue struct {
+ Type string
+ Field string
+ Schema string
+ Model string
+}
diff --git a/backend/internal/model/model_base.go b/backend/internal/model/model_base.go
new file mode 100644
index 000000000..8465f95e9
--- /dev/null
+++ b/backend/internal/model/model_base.go
@@ -0,0 +1,13 @@
+package model
+
+import (
+ "gorm.io/plugin/soft_delete"
+)
+
+// Base include common fields for db control
+type Base struct {
+ ID uint `json:"id" gorm:"column:id;primaryKey" filter:"id,integer"`
+ CreatedAt int64 `json:"created_at" gorm:"<-:create;autoCreateTime:milli;column:created_at" filter:"created_at,date"`
+ UpdatedAt int64 `json:"updated_at" gorm:"<-;autoUpdateTime:milli;column:updated_at" filter:"updated_at,date"`
+ DeletedAt soft_delete.DeletedAt `json:"-" gorm:"column:is_deleted;softDelete:flag"`
+}
diff --git a/backend/internal/model/pageinfo.go b/backend/internal/model/pageinfo.go
new file mode 100644
index 000000000..ad77a6812
--- /dev/null
+++ b/backend/internal/model/pageinfo.go
@@ -0,0 +1,24 @@
+package model
+
+// PageInfo is the model used by Api Handlers and passed on to other parts
+// of the application
+type PageInfo struct {
+ Sort []Sort `json:"sort"`
+ Offset int `json:"offset"`
+ Limit int `json:"limit"`
+ Expand []string `json:"expand"`
+}
+
+// Sort holds the sorting data
+type Sort struct {
+ Field string `json:"field"`
+ Direction string `json:"direction"`
+}
+
+// GetSort is the sort array
+func (p *PageInfo) GetSort(def Sort) []Sort {
+ if p.Sort == nil {
+ return []Sort{def}
+ }
+ return p.Sort
+}
diff --git a/backend/internal/model/pageinfo_test.go b/backend/internal/model/pageinfo_test.go
new file mode 100644
index 000000000..e3d20e83e
--- /dev/null
+++ b/backend/internal/model/pageinfo_test.go
@@ -0,0 +1,31 @@
+package model
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestPageInfoGetSort(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("testing.tRunner.func1"))
+
+ t.Parallel()
+ pi := PageInfo{}
+ def := Sort{
+ Field: "name",
+ Direction: "asc",
+ }
+ defined := Sort{
+ Field: "email",
+ Direction: "desc",
+ }
+ // default
+ sort := pi.GetSort(def)
+ assert.Equal(t, sort, []Sort{def})
+ // defined
+ pi.Sort = []Sort{defined}
+ sort = pi.GetSort(def)
+ assert.Equal(t, sort, []Sort{defined})
+}
diff --git a/backend/internal/nginx.js b/backend/internal/nginx.js
deleted file mode 100644
index 52bdd66dc..000000000
--- a/backend/internal/nginx.js
+++ /dev/null
@@ -1,435 +0,0 @@
-const _ = require('lodash');
-const fs = require('fs');
-const logger = require('../logger').nginx;
-const utils = require('../lib/utils');
-const error = require('../lib/error');
-const { Liquid } = require('liquidjs');
-const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
-
-const internalNginx = {
-
- /**
- * This will:
- * - test the nginx config first to make sure it's OK
- * - create / recreate the config for the host
- * - test again
- * - IF OK: update the meta with online status
- * - IF BAD: update the meta with offline status and remove the config entirely
- * - then reload nginx
- *
- * @param {Object|String} model
- * @param {String} host_type
- * @param {Object} host
- * @returns {Promise}
- */
- configure: (model, host_type, host) => {
- let combined_meta = {};
-
- return internalNginx.test()
- .then(() => {
- // Nginx is OK
- // We're deleting this config regardless.
- return internalNginx.deleteConfig(host_type, host); // Don't throw errors, as the file may not exist at all
- })
- .then(() => {
- return internalNginx.generateConfig(host_type, host);
- })
- .then(() => {
- // Test nginx again and update meta with result
- return internalNginx.test()
- .then(() => {
- // nginx is ok
- combined_meta = _.assign({}, host.meta, {
- nginx_online: true,
- nginx_err: null
- });
-
- return model
- .query()
- .where('id', host.id)
- .patch({
- meta: combined_meta
- });
- })
- .catch((err) => {
- // Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
- // It will always look like this:
- // nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
-
- let valid_lines = [];
- let err_lines = err.message.split('\n');
- err_lines.map(function (line) {
- if (line.indexOf('/var/log/nginx/error.log') === -1) {
- valid_lines.push(line);
- }
- });
-
- if (debug_mode) {
- logger.error('Nginx test failed:', valid_lines.join('\n'));
- }
-
- // config is bad, update meta and delete config
- combined_meta = _.assign({}, host.meta, {
- nginx_online: false,
- nginx_err: valid_lines.join('\n')
- });
-
- return model
- .query()
- .where('id', host.id)
- .patch({
- meta: combined_meta
- })
- .then(() => {
- return internalNginx.deleteConfig(host_type, host, true);
- });
- });
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- return combined_meta;
- });
- },
-
- /**
- * @returns {Promise}
- */
- test: () => {
- if (debug_mode) {
- logger.info('Testing Nginx configuration');
- }
-
- return utils.exec('/usr/sbin/nginx -t -g "error_log off;"');
- },
-
- /**
- * @returns {Promise}
- */
- reload: () => {
- return internalNginx.test()
- .then(() => {
- logger.info('Reloading Nginx');
- return utils.exec('/usr/sbin/nginx -s reload');
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Integer} host_id
- * @returns {String}
- */
- getConfigName: (host_type, host_id) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- if (host_type === 'default') {
- return '/data/nginx/default_host/site.conf';
- }
-
- return '/data/nginx/' + host_type + '/' + host_id + '.conf';
- },
-
- /**
- * Generates custom locations
- * @param {Object} host
- * @returns {Promise}
- */
- renderLocations: (host) => {
-
- //logger.info('host = ' + JSON.stringify(host, null, 2));
- return new Promise((resolve, reject) => {
- let template;
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/_location.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- let renderer = new Liquid({
- root: __dirname + '/../templates/'
- });
- let renderedLocations = '';
-
- const locationRendering = async () => {
- for (let i = 0; i < host.locations.length; i++) {
- let locationCopy = Object.assign({}, {access_list_id: host.access_list_id}, {certificate_id: host.certificate_id},
- {ssl_forced: host.ssl_forced}, {caching_enabled: host.caching_enabled}, {block_exploits: host.block_exploits},
- {allow_websocket_upgrade: host.allow_websocket_upgrade}, {http2_support: host.http2_support},
- {hsts_enabled: host.hsts_enabled}, {hsts_subdomains: host.hsts_subdomains}, {access_list: host.access_list},
- {certificate: host.certificate}, host.locations[i]);
-
- if (locationCopy.forward_host.indexOf('/') > -1) {
- const splitted = locationCopy.forward_host.split('/');
-
- locationCopy.forward_host = splitted.shift();
- locationCopy.forward_path = `/${splitted.join('/')}`;
- }
-
- //logger.info('locationCopy = ' + JSON.stringify(locationCopy, null, 2));
-
- // eslint-disable-next-line
- renderedLocations += await renderer.parseAndRender(template, locationCopy);
- }
-
- };
-
- locationRendering().then(() => resolve(renderedLocations));
-
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Object} host
- * @returns {Promise}
- */
- generateConfig: (host_type, host) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- if (debug_mode) {
- logger.info('Generating ' + host_type + ' Config:', host);
- }
-
- // logger.info('host = ' + JSON.stringify(host, null, 2));
-
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = internalNginx.getConfigName(host_type, host.id);
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/' + host_type + '.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- let locationsPromise;
- let origLocations;
-
- // Manipulate the data a bit before sending it to the template
- if (host_type !== 'default') {
- host.use_default_location = true;
- if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
- host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
- }
- }
-
- if (host.locations) {
- //logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
- origLocations = [].concat(host.locations);
- locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
- host.locations = renderedLocations;
- });
-
- // Allow someone who is using / custom location path to use it, and skip the default / location
- _.map(host.locations, (location) => {
- if (location.path === '/') {
- host.use_default_location = false;
- }
- });
-
- } else {
- locationsPromise = Promise.resolve();
- }
-
- // Set the IPv6 setting for the host
- host.ipv6 = internalNginx.ipv6Enabled();
-
- locationsPromise.then(() => {
- renderEngine
- .parseAndRender(template, host)
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
- if (debug_mode) {
- logger.success('Wrote config:', filename, config_text);
- }
-
- // Restore locations array
- host.locations = origLocations;
-
- resolve(true);
- })
- .catch((err) => {
- if (debug_mode) {
- logger.warn('Could not write ' + filename + ':', err.message);
- }
-
- reject(new error.ConfigurationError(err.message));
- });
- });
- });
- },
-
- /**
- * This generates a temporary nginx config listening on port 80 for the domain names listed
- * in the certificate setup. It allows the letsencrypt acme challenge to be requested by letsencrypt
- * when requesting a certificate without having a hostname set up already.
- *
- * @param {Object} certificate
- * @returns {Promise}
- */
- generateLetsEncryptRequestConfig: (certificate) => {
- if (debug_mode) {
- logger.info('Generating LetsEncrypt Request Config:', certificate);
- }
-
- let renderEngine = new Liquid({
- root: __dirname + '/../templates/'
- });
-
- return new Promise((resolve, reject) => {
- let template = null;
- let filename = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
-
- try {
- template = fs.readFileSync(__dirname + '/../templates/letsencrypt-request.conf', {encoding: 'utf8'});
- } catch (err) {
- reject(new error.ConfigurationError(err.message));
- return;
- }
-
- certificate.ipv6 = internalNginx.ipv6Enabled();
-
- renderEngine
- .parseAndRender(template, certificate)
- .then((config_text) => {
- fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
-
- if (debug_mode) {
- logger.success('Wrote config:', filename, config_text);
- }
-
- resolve(true);
- })
- .catch((err) => {
- if (debug_mode) {
- logger.warn('Could not write ' + filename + ':', err.message);
- }
-
- reject(new error.ConfigurationError(err.message));
- });
- });
- },
-
- /**
- * This removes the temporary nginx config file generated by `generateLetsEncryptRequestConfig`
- *
- * @param {Object} certificate
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- deleteLetsEncryptRequestConfig: (certificate, throw_errors) => {
- return new Promise((resolve, reject) => {
- try {
- let config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
-
- if (debug_mode) {
- logger.warn('Deleting nginx config: ' + config_file);
- }
-
- fs.unlinkSync(config_file);
- } catch (err) {
- if (debug_mode) {
- logger.warn('Could not delete config:', err.message);
- }
-
- if (throw_errors) {
- reject(err);
- }
- }
-
- resolve();
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Object} [host]
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- deleteConfig: (host_type, host, throw_errors) => {
- host_type = host_type.replace(new RegExp('-', 'g'), '_');
-
- return new Promise((resolve, reject) => {
- try {
- let config_file = internalNginx.getConfigName(host_type, typeof host === 'undefined' ? 0 : host.id);
-
- if (debug_mode) {
- logger.warn('Deleting nginx config: ' + config_file);
- }
-
- fs.unlinkSync(config_file);
- } catch (err) {
- if (debug_mode) {
- logger.warn('Could not delete config:', err.message);
- }
-
- if (throw_errors) {
- reject(err);
- }
- }
-
- resolve();
- });
- },
-
- /**
- * @param {String} host_type
- * @param {Array} hosts
- * @returns {Promise}
- */
- bulkGenerateConfigs: (host_type, hosts) => {
- let promises = [];
- hosts.map(function (host) {
- promises.push(internalNginx.generateConfig(host_type, host));
- });
-
- return Promise.all(promises);
- },
-
- /**
- * @param {String} host_type
- * @param {Array} hosts
- * @param {Boolean} [throw_errors]
- * @returns {Promise}
- */
- bulkDeleteConfigs: (host_type, hosts, throw_errors) => {
- let promises = [];
- hosts.map(function (host) {
- promises.push(internalNginx.deleteConfig(host_type, host, throw_errors));
- });
-
- return Promise.all(promises);
- },
-
- /**
- * @param {string} config
- * @returns {boolean}
- */
- advancedConfigHasDefaultLocation: function (config) {
- return !!config.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
- },
-
- /**
- * @returns {boolean}
- */
- ipv6Enabled: function () {
- if (typeof process.env.DISABLE_IPV6 !== 'undefined') {
- const disabled = process.env.DISABLE_IPV6.toLowerCase();
- return !(disabled === 'on' || disabled === 'true' || disabled === '1' || disabled === 'yes');
- }
-
- return true;
- }
-};
-
-module.exports = internalNginx;
diff --git a/backend/internal/nginx/control.go b/backend/internal/nginx/control.go
new file mode 100644
index 000000000..91cc948fa
--- /dev/null
+++ b/backend/internal/nginx/control.go
@@ -0,0 +1,227 @@
+package nginx
+
+import (
+ "fmt"
+ "os"
+
+ "npm/internal/config"
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/upstream"
+ "npm/internal/logger"
+ "npm/internal/status"
+)
+
+const (
+ DeletedSuffix = ".deleted"
+ DisabledSuffix = ".disabled"
+ ErrorSuffix = ".error"
+)
+
+// ConfigureHost will attempt to write nginx conf and reload nginx
+// When a host is disabled or deleted, it will name the file with a suffix
+// that won't be used by nginx.
+func ConfigureHost(h host.Model) error {
+ // nolint: errcheck, gosec
+ h.Expand([]string{"certificate", "nginxtemplate", "upstream"})
+
+ var certificateTemplate certificate.Template
+ if h.Certificate != nil {
+ certificateTemplate = h.Certificate.GetTemplate()
+ }
+
+ var ups upstream.Model
+ if h.Upstream != nil {
+ ups = *h.Upstream
+ }
+
+ data := TemplateData{
+ Certificate: certificateTemplate,
+ ConfDir: fmt.Sprintf("%s/nginx/hosts", config.Configuration.DataFolder),
+ Config: Config{ // todo
+ Ipv4: !config.Configuration.DisableIPV4,
+ Ipv6: !config.Configuration.DisableIPV6,
+ },
+ DataDir: config.Configuration.DataFolder,
+ Host: h.GetTemplate(),
+ Upstream: ups,
+ }
+
+ removeHostFiles(h)
+ filename := getHostFilename(h, "")
+ // if h.IsDeleted {
+ // filename = getHostFilename(h, DeletedSuffix)
+ // } else if h.IsDisabled {
+ if h.IsDisabled {
+ filename = getHostFilename(h, DisabledSuffix)
+ }
+
+ // Write the config to disk
+ err := writeTemplate(filename, h.NginxTemplate.Template, data, "")
+ if err != nil {
+ // this configuration failed somehow
+ h.Status = status.StatusError
+ h.ErrorMessage = fmt.Sprintf("Template generation failed: %s", err.Error())
+ logger.Debug(h.ErrorMessage)
+ return h.Save(true)
+ }
+
+ // Reload Nginx and check for errors
+ if output, err := reloadNginx(); err != nil {
+ // reloading nginx failed, likely due to this host having a problem
+ h.Status = status.StatusError
+ h.ErrorMessage = fmt.Sprintf("Nginx configuation error: %s - %s", err.Error(), output)
+
+ // Write the .error file, if this isn't a deleted or disabled host
+ // as the reload will only fail because of this host, if it's enabled
+ if !h.IsDisabled {
+ filename = getHostFilename(h, ErrorSuffix)
+ // Clear existing file(s) again
+ removeHostFiles(h)
+ // Write the template again, but with an error message at the end of the file
+ // nolint: errcheck, gosec
+ writeTemplate(filename, h.NginxTemplate.Template, data, h.ErrorMessage)
+ }
+
+ logger.Debug(h.ErrorMessage)
+ } else {
+ // All good
+ h.Status = status.StatusOK
+ h.ErrorMessage = ""
+ logger.Debug("ConfigureHost OK: %+v", h)
+ }
+
+ return h.Save(true)
+}
+
+// ConfigureUpstream will attempt to write nginx conf and reload nginx
+func ConfigureUpstream(u upstream.Model) error {
+ logger.Debug("ConfigureUpstream: %+v)", u)
+
+ // nolint: errcheck, gosec
+ u.Expand([]string{"nginxtemplate"})
+
+ data := TemplateData{
+ ConfDir: fmt.Sprintf("%s/nginx/upstreams", config.Configuration.DataFolder),
+ DataDir: config.Configuration.DataFolder,
+ Upstream: u,
+ }
+
+ removeUpstreamFiles(u)
+ filename := getUpstreamFilename(u, "")
+ // if u.IsDeleted {
+ // filename = getUpstreamFilename(u, DeletedSuffix)
+ // }
+
+ // Write the config to disk
+ err := writeTemplate(filename, u.NginxTemplate.Template, data, "")
+ if err != nil {
+ // this configuration failed somehow
+ u.Status = status.StatusError
+ u.ErrorMessage = fmt.Sprintf("Template generation failed: %s", err.Error())
+ logger.Debug(u.ErrorMessage)
+ return u.Save(true)
+ }
+
+ // nolint: errcheck, gosec
+ if output, err := reloadNginx(); err != nil {
+ // reloading nginx failed, likely due to this host having a problem
+ u.Status = status.StatusError
+ u.ErrorMessage = fmt.Sprintf("Nginx configuation error: %s - %s", err.Error(), output)
+
+ // Write the .error file, if this isn't a deleted upstream
+ // as the reload will only fail because of this upstream
+ // if !u.IsDeleted {
+ filename = getUpstreamFilename(u, ErrorSuffix)
+ // Clear existing file(s) again
+ removeUpstreamFiles(u)
+ // Write the template again, but with an error message at the end of the file
+ // nolint: errcheck, gosec
+ writeTemplate(filename, u.NginxTemplate.Template, data, u.ErrorMessage)
+ // }
+
+ logger.Debug(u.ErrorMessage)
+ } else {
+ // All good
+ u.Status = status.StatusOK
+ u.ErrorMessage = ""
+ logger.Debug("ConfigureUpstream OK: %+v", u)
+ }
+
+ return u.Save(true)
+}
+
+func getHostFilename(h host.Model, appends string) string {
+ confDir := fmt.Sprintf("%s/nginx/hosts", config.Configuration.DataFolder)
+ return fmt.Sprintf("%s/host_%d.conf%s", confDir, h.ID, appends)
+}
+
+func getUpstreamFilename(u upstream.Model, appends string) string {
+ confDir := fmt.Sprintf("%s/nginx/upstreams", config.Configuration.DataFolder)
+ return fmt.Sprintf("%s/upstream_%d.conf%s", confDir, u.ID, appends)
+}
+
+func removeHostFiles(h host.Model) {
+ removeFiles([]string{
+ getHostFilename(h, ""),
+ getHostFilename(h, DeletedSuffix),
+ getHostFilename(h, DisabledSuffix),
+ getHostFilename(h, ErrorSuffix),
+ })
+}
+
+func removeUpstreamFiles(u upstream.Model) {
+ removeFiles([]string{
+ getUpstreamFilename(u, ""),
+ getUpstreamFilename(u, DeletedSuffix),
+ getUpstreamFilename(u, ErrorSuffix),
+ })
+}
+
+func removeFiles(files []string) {
+ for _, file := range files {
+ if _, err := os.Stat(file); err == nil {
+ // nolint: errcheck, gosec
+ os.Remove(file)
+ }
+ }
+}
+
+// GetHostConfigContent returns nginx config as it exists on disk
+func GetHostConfigContent(h host.Model) (string, error) {
+ filename := getHostFilename(h, "")
+ if h.ErrorMessage != "" {
+ filename = getHostFilename(h, ErrorSuffix)
+ }
+ if h.IsDisabled {
+ filename = getHostFilename(h, DisabledSuffix)
+ }
+ // if h.IsDeleted {
+ // filename = getHostFilename(h, DeletedSuffix)
+ // }
+
+ // nolint: gosec
+ cnt, err := os.ReadFile(filename)
+ if err != nil {
+ return "", err
+ }
+ return string(cnt), nil
+}
+
+// GetUpstreamConfigContent returns nginx config as it exists on disk
+func GetUpstreamConfigContent(u upstream.Model) (string, error) {
+ filename := getUpstreamFilename(u, "")
+ if u.ErrorMessage != "" {
+ filename = getUpstreamFilename(u, ErrorSuffix)
+ }
+ // if u.IsDeleted {
+ // filename = getUpstreamFilename(u, DeletedSuffix)
+ // }
+
+ // nolint: gosec
+ cnt, err := os.ReadFile(filename)
+ if err != nil {
+ return "", err
+ }
+ return string(cnt), nil
+}
diff --git a/backend/internal/nginx/control_test.go b/backend/internal/nginx/control_test.go
new file mode 100644
index 000000000..dfbd19c67
--- /dev/null
+++ b/backend/internal/nginx/control_test.go
@@ -0,0 +1,52 @@
+package nginx
+
+import (
+ "testing"
+
+ "npm/internal/entity/host"
+ "npm/internal/model"
+ "npm/internal/test"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetHostFilename(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ test.InitConfig(t)
+ tests := []struct {
+ name string
+ host host.Model
+ append string
+ want string
+ }{
+ {
+ "test1",
+ host.Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ },
+ "",
+ "/data/nginx/hosts/host_10.conf",
+ },
+ {
+ "test2",
+ host.Model{
+ Base: model.Base{
+ ID: 10,
+ },
+ },
+ ".deleted",
+ "/data/nginx/hosts/host_10.conf.deleted",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ filename := getHostFilename(tt.host, tt.append)
+ assert.Equal(t, tt.want, filename)
+ })
+ }
+}
diff --git a/backend/internal/nginx/exec.go b/backend/internal/nginx/exec.go
new file mode 100644
index 000000000..81d682096
--- /dev/null
+++ b/backend/internal/nginx/exec.go
@@ -0,0 +1,42 @@
+package nginx
+
+import (
+ "os/exec"
+
+ "npm/internal/logger"
+
+ "github.com/rotisserie/eris"
+)
+
+func reloadNginx() (string, error) {
+ return shExec([]string{"-s", "reload"})
+}
+
+func getNginxFilePath() (string, error) {
+ path, err := exec.LookPath("nginx")
+ if err != nil {
+ return path, eris.Wrapf(err, "Cannot find nginx execuatable script in PATH")
+ }
+ return path, nil
+}
+
+// shExec executes nginx with arguments
+func shExec(args []string) (string, error) {
+ ng, err := getNginxFilePath()
+ if err != nil {
+ logger.Error("NginxError", err)
+ return "", err
+ }
+
+ logger.Debug("CMD: %s %v", ng, args)
+ // nolint: gosec
+ c := exec.Command(ng, args...)
+
+ b, e := c.CombinedOutput()
+ if e != nil {
+ logger.Error("NginxError", eris.Wrapf(e, "Command error: %s -- %v\n%+v", ng, args, e))
+ logger.Warn(string(b))
+ }
+
+ return string(b), e
+}
diff --git a/backend/internal/nginx/template_test.go b/backend/internal/nginx/template_test.go
new file mode 100644
index 000000000..f61c6193e
--- /dev/null
+++ b/backend/internal/nginx/template_test.go
@@ -0,0 +1,125 @@
+package nginx
+
+import (
+ "testing"
+
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/model"
+ "npm/internal/test"
+ "npm/internal/types"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestRenderTemplate(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ test.InitConfig(t)
+
+ template := `
+{{#if Host.IsDisabled}}
+ # Host is disabled
+{{else}}
+server {
+ {{#if Certificate.IsProvided}}
+ {{#if Certificate.IsAcme}}
+ include {{ConfDir}}/npm/conf.d/acme-challenge.conf;
+ include {{ConfDir}}/npm/conf.d/include/ssl-ciphers.conf;
+ {{/if}}
+ ssl_certificate {{Certificate.Folder}}/fullchain.pem;
+ ssl_certificate_key {{Certificate.Folder}}/privkey.pem;
+ {{/if}}
+}
+{{/if}}
+`
+
+ type want struct {
+ output string
+ err error
+ }
+
+ tests := []struct {
+ name string
+ data TemplateData
+ host host.Model
+ cert certificate.Model
+ want want
+ }{
+ {
+ name: "Basic Template enabled",
+ host: host.Model{
+ IsDisabled: false,
+ },
+ cert: certificate.Model{
+ Base: model.Base{
+ ID: 77,
+ },
+ Status: certificate.StatusProvided,
+ Type: certificate.TypeHTTP,
+ CertificateAuthorityID: types.NullableDBUint{Uint: 99},
+ },
+ want: want{
+ output: `
+server {
+ include /etc/nginx/conf.d/npm/conf.d/acme-challenge.conf;
+ include /etc/nginx/conf.d/npm/conf.d/include/ssl-ciphers.conf;
+ ssl_certificate /data/.acme.sh/certs/npm-77/fullchain.pem;
+ ssl_certificate_key /data/.acme.sh/certs/npm-77/privkey.pem;
+}
+`,
+ err: nil,
+ },
+ },
+ {
+ name: "Basic Template custom ssl",
+ host: host.Model{
+ IsDisabled: false,
+ },
+ cert: certificate.Model{
+ Base: model.Base{
+ ID: 66,
+ },
+ Status: certificate.StatusProvided,
+ Type: certificate.TypeCustom,
+ },
+ want: want{
+ output: `
+server {
+ ssl_certificate /data/custom_ssl/npm-66/fullchain.pem;
+ ssl_certificate_key /data/custom_ssl/npm-66/privkey.pem;
+}
+`,
+ err: nil,
+ },
+ },
+ {
+ name: "Basic Template disabled",
+ host: host.Model{
+ IsDisabled: true,
+ },
+ cert: certificate.Model{},
+ want: want{
+ output: "\n # Host is disabled\n",
+ err: nil,
+ },
+ },
+ }
+
+ for _, tst := range tests {
+ t.Run(tst.name, func(st *testing.T) {
+ templateData := TemplateData{
+ ConfDir: "/etc/nginx/conf.d",
+ DataDir: "/data",
+ Host: tst.host.GetTemplate(),
+ Certificate: tst.cert.GetTemplate(),
+ }
+
+ output, err := renderTemplate(template, templateData)
+ assert.Equal(st, tst.want.err, err)
+ assert.Equal(st, tst.want.output, output)
+ })
+ }
+}
diff --git a/backend/internal/nginx/templates.go b/backend/internal/nginx/templates.go
new file mode 100644
index 000000000..bd5276b10
--- /dev/null
+++ b/backend/internal/nginx/templates.go
@@ -0,0 +1,61 @@
+package nginx
+
+import (
+ "fmt"
+ "os"
+
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/upstream"
+ "npm/internal/logger"
+ "npm/internal/util"
+
+ "github.com/aymerick/raymond"
+)
+
+type Config struct {
+ Ipv4 bool
+ Ipv6 bool
+}
+
+// TemplateData is a struct
+type TemplateData struct {
+ ConfDir string
+ Config Config
+ DataDir string
+ Host host.Template
+ Certificate certificate.Template
+ Upstream upstream.Model
+}
+
+func renderTemplate(template string, data TemplateData) (string, error) {
+ return raymond.Render(template, data)
+}
+
+func writeTemplate(filename, template string, data TemplateData, errorInfo string) error {
+ output, err := renderTemplate(template, data)
+ if err != nil {
+ errorInfo = err.Error()
+ }
+
+ output = util.CleanupWhitespace(output)
+
+ // Write some given error information to the end
+ if errorInfo != "" {
+ output = fmt.Sprintf("%s\n\n# =========================\n# ERROR:\n# %s\n# ========================\n", output, errorInfo)
+ }
+
+ // Write it. This will also write an error comment if generation failed
+ // nolint: gosec
+ writeErr := writeConfigFile(filename, output)
+ if err != nil {
+ return err
+ }
+ return writeErr
+}
+
+func writeConfigFile(filename, content string) error {
+ logger.Debug("Writing %s with:\n%s", filename, content)
+ // nolint: gosec
+ return os.WriteFile(filename, []byte(content), 0644)
+}
diff --git a/backend/internal/proxy-host.js b/backend/internal/proxy-host.js
deleted file mode 100644
index 09b8bca51..000000000
--- a/backend/internal/proxy-host.js
+++ /dev/null
@@ -1,466 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const proxyHostModel = require('../models/proxy_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalProxyHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('proxy_hosts:create', data)
- .then(() => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return proxyHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalProxyHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalProxyHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner', 'access_list.[clients,items]']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- // Audit log
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('proxy_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return proxyHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalProxyHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate', 'access_list.[clients,items]']
- })
- .then((row) => {
- if (!row.enabled) {
- // No need to add nginx config if host is disabled
- return row;
- }
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('proxy_hosts:get', data.id)
- .then((access_data) => {
- let query = proxyHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,access_list,access_list.[clients,items],certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('proxy_hosts:delete', data.id)
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('proxy_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('proxy_hosts:update', data.id)
- .then(() => {
- return internalProxyHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner', 'access_list']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(proxyHostModel, 'proxy_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('proxy_hosts:update', data.id)
- .then(() => {
- return internalProxyHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return proxyHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('proxy_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'proxy-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('proxy_hosts:list')
- .then((access_data) => {
- let query = proxyHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,access_list,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = proxyHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalProxyHost;
diff --git a/backend/internal/redirection-host.js b/backend/internal/redirection-host.js
deleted file mode 100644
index f22c36688..000000000
--- a/backend/internal/redirection-host.js
+++ /dev/null
@@ -1,461 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const redirectionHostModel = require('../models/redirection_host');
-const internalHost = require('./host');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-const internalCertificate = require('./certificate');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalRedirectionHost = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('redirection_hosts:create', data)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- })
- .then(() => {
- // At this point the domains should have been checked
- data.owner_user_id = access.token.getUserId(1);
- data = internalHost.cleanSslHstsData(data);
-
- return redirectionHostModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, data)
- .then((cert) => {
- // update host with cert id
- return internalRedirectionHost.update(access, {
- id: row.id,
- certificate_id: cert.id
- });
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // re-fetch with cert
- return internalRedirectionHost.get(access, {
- id: row.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
- .then(() => {
- return row;
- });
- })
- .then((row) => {
- data.meta = _.assign({}, data.meta || {}, row.meta);
-
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- let create_certificate = data.certificate_id === 'new';
-
- if (create_certificate) {
- delete data.certificate_id;
- }
-
- return access.can('redirection_hosts:update', data.id)
- .then((/*access_data*/) => {
- // Get a list of the domain names and check each of them against existing records
- let domain_name_check_promises = [];
-
- if (typeof data.domain_names !== 'undefined') {
- data.domain_names.map(function (domain_name) {
- domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
- });
-
- return Promise.all(domain_name_check_promises)
- .then((check_results) => {
- check_results.map(function (result) {
- if (result.is_taken) {
- throw new error.ValidationError(result.hostname + ' is already in use');
- }
- });
- });
- }
- })
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- if (create_certificate) {
- return internalCertificate.createQuickCertificate(access, {
- domain_names: data.domain_names || row.domain_names,
- meta: _.assign({}, row.meta, data.meta)
- })
- .then((cert) => {
- // update host with cert id
- data.certificate_id = cert.id;
- })
- .then(() => {
- return row;
- });
- } else {
- return row;
- }
- })
- .then((row) => {
- // Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
- data = _.assign({}, {
- domain_names: row.domain_names
- }, data);
-
- data = internalHost.cleanSslHstsData(data, row);
-
- return redirectionHostModel
- .query()
- .where({id: data.id})
- .patch(data)
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- })
- .then(() => {
- return internalRedirectionHost.get(access, {
- id: data.id,
- expand: ['owner', 'certificate']
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
- .then((new_meta) => {
- row.meta = new_meta;
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('redirection_hosts:get', data.id)
- .then((access_data) => {
- let query = redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner,certificate]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- row = internalHost.cleanRowCertificateMeta(row);
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('redirection_hosts:delete', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('redirection_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('redirection_hosts:update', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {
- id: data.id,
- expand: ['certificate', 'owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('redirection_hosts:update', data.id)
- .then(() => {
- return internalRedirectionHost.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return redirectionHostModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('redirection_host', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'redirection-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Hosts
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('redirection_hosts:list')
- .then((access_data) => {
- let query = redirectionHostModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner,certificate]')
- .orderBy('domain_names', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('domain_names', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((rows) => {
- if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
- return internalHost.cleanAllRowsCertificateMeta(rows);
- }
-
- return rows;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = redirectionHostModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalRedirectionHost;
diff --git a/backend/internal/report.js b/backend/internal/report.js
deleted file mode 100644
index 4dde659bd..000000000
--- a/backend/internal/report.js
+++ /dev/null
@@ -1,38 +0,0 @@
-const internalProxyHost = require('./proxy-host');
-const internalRedirectionHost = require('./redirection-host');
-const internalDeadHost = require('./dead-host');
-const internalStream = require('./stream');
-
-const internalReport = {
-
- /**
- * @param {Access} access
- * @return {Promise}
- */
- getHostsReport: (access) => {
- return access.can('reports:hosts', 1)
- .then((access_data) => {
- let user_id = access.token.getUserId(1);
-
- let promises = [
- internalProxyHost.getCount(user_id, access_data.visibility),
- internalRedirectionHost.getCount(user_id, access_data.visibility),
- internalStream.getCount(user_id, access_data.visibility),
- internalDeadHost.getCount(user_id, access_data.visibility)
- ];
-
- return Promise.all(promises);
- })
- .then((counts) => {
- return {
- proxy: counts.shift(),
- redirection: counts.shift(),
- stream: counts.shift(),
- dead: counts.shift()
- };
- });
-
- }
-};
-
-module.exports = internalReport;
diff --git a/backend/internal/serverevents/sse.go b/backend/internal/serverevents/sse.go
new file mode 100644
index 000000000..636e91aae
--- /dev/null
+++ b/backend/internal/serverevents/sse.go
@@ -0,0 +1,75 @@
+package serverevents
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "npm/internal/logger"
+
+ "github.com/jc21/go-sse"
+)
+
+var instance *sse.Server
+
+const defaultChannel = "changes"
+
+// Message is how we're going to send the data
+type Message struct {
+ Lang string `json:"lang,omitempty"`
+ LangParams map[string]string `json:"lang_params,omitempty"`
+ Type string `json:"type,omitempty"`
+ Affects string `json:"affects,omitempty"`
+}
+
+// Get will return a sse server
+func Get() *sse.Server {
+ if instance == nil {
+ instance = sse.NewServer(&sse.Options{
+ Logger: logger.Get(),
+ ChannelNameFunc: func(_ *http.Request) string {
+ return defaultChannel // This is the channel for all updates regardless of visibility
+ },
+ })
+ }
+ return instance
+}
+
+// Shutdown will shutdown the server
+func Shutdown() {
+ if instance != nil {
+ instance.Shutdown()
+ }
+}
+
+// SendChange will send a specific change
+func SendChange(affects string) {
+ Send(Message{Affects: affects}, "")
+}
+
+// SendMessage will construct a message for the UI
+func SendMessage(typ, lang string, langParams map[string]string) {
+ Send(Message{
+ Type: typ,
+ Lang: lang,
+ LangParams: langParams,
+ }, "")
+}
+
+// Send will send a message
+func Send(msg Message, channel string) {
+ if channel == "" {
+ channel = defaultChannel
+ }
+ logger.Debug("SSE Sending: %+v", msg)
+ if data, err := json.Marshal(msg); err != nil {
+ logger.Error("SSEError", err)
+ } else {
+ Get().SendMessage(channel, sse.SimpleMessage(string(data)))
+ }
+}
+
+// TODO: if we end up implementing user visibility,
+// then we'll have to subscribe people to their own
+// channels and publish to all or some depending on visibility.
+// This means using a specific ChannelNameFunc that revolves
+// around the user and their visibility.
diff --git a/backend/internal/serverevents/sse_test.go b/backend/internal/serverevents/sse_test.go
new file mode 100644
index 000000000..f1353f883
--- /dev/null
+++ b/backend/internal/serverevents/sse_test.go
@@ -0,0 +1,27 @@
+package serverevents
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGet(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("github.com/jc21/go-sse.(*Server).dispatch"))
+
+ s := Get()
+ assert.NotEqual(t, nil, s)
+}
+
+// This is just for code coverage more than anything
+func TestEverything(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t)
+
+ Get()
+ SendMessage("test", "test", map[string]string{"user_id": "10"})
+ SendChange("hosts")
+ Shutdown()
+}
diff --git a/backend/internal/setting.js b/backend/internal/setting.js
deleted file mode 100644
index d4ac67d8a..000000000
--- a/backend/internal/setting.js
+++ /dev/null
@@ -1,133 +0,0 @@
-const fs = require('fs');
-const error = require('../lib/error');
-const settingModel = require('../models/setting');
-const internalNginx = require('./nginx');
-
-const internalSetting = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('settings:update', data.id)
- .then((/*access_data*/) => {
- return internalSetting.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return settingModel
- .query()
- .where({id: data.id})
- .patch(data);
- })
- .then(() => {
- return internalSetting.get(access, {
- id: data.id
- });
- })
- .then((row) => {
- if (row.id === 'default-site') {
- // write the html if we need to
- if (row.value === 'html') {
- fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
- }
-
- // Configure nginx
- return internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.generateConfig('default', row);
- })
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- return row;
- })
- .catch((/*err*/) => {
- internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- })
- .then(() => {
- // I'm being slack here I know..
- throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
- });
- });
- } else {
- return row;
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {String} data.id
- * @return {Promise}
- */
- get: (access, data) => {
- return access.can('settings:get', data.id)
- .then(() => {
- return settingModel
- .query()
- .where('id', data.id)
- .first();
- })
- .then((row) => {
- if (row) {
- return row;
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * This will only count the settings
- *
- * @param {Access} access
- * @returns {*}
- */
- getCount: (access) => {
- return access.can('settings:list')
- .then(() => {
- return settingModel
- .query()
- .count('id as count')
- .first();
- })
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * All settings
- *
- * @param {Access} access
- * @returns {Promise}
- */
- getAll: (access) => {
- return access.can('settings:list')
- .then(() => {
- return settingModel
- .query()
- .orderBy('description', 'ASC');
- });
- }
-};
-
-module.exports = internalSetting;
diff --git a/backend/internal/status/status.go b/backend/internal/status/status.go
new file mode 100644
index 000000000..84f8dc6f4
--- /dev/null
+++ b/backend/internal/status/status.go
@@ -0,0 +1,10 @@
+package status
+
+const (
+ // StatusReady means a host is ready to configure
+ StatusReady = "ready"
+ // StatusOK means a host is configured within Nginx
+ StatusOK = "ok"
+ // StatusError is self explanatory
+ StatusError = "error"
+)
diff --git a/backend/internal/stream.js b/backend/internal/stream.js
deleted file mode 100644
index 9c458a10b..000000000
--- a/backend/internal/stream.js
+++ /dev/null
@@ -1,348 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const streamModel = require('../models/stream');
-const internalNginx = require('./nginx');
-const internalAuditLog = require('./audit-log');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalStream = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- return access.can('streams:create', data)
- .then((/*access_data*/) => {
- // TODO: At this point the existing ports should have been checked
- data.owner_user_id = access.token.getUserId(1);
-
- if (typeof data.meta === 'undefined') {
- data.meta = {};
- }
-
- return streamModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((row) => {
- // Configure nginx
- return internalNginx.configure(streamModel, 'stream', row)
- .then(() => {
- return internalStream.get(access, {id: row.id, expand: ['owner']});
- });
- })
- .then((row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'stream',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return row;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @return {Promise}
- */
- update: (access, data) => {
- return access.can('streams:update', data.id)
- .then((/*access_data*/) => {
- // TODO: at this point the existing streams should have been checked
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (row.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
- }
-
- return streamModel
- .query()
- .omit(omissions())
- .patchAndFetchById(row.id, data)
- .then((saved_row) => {
- return internalNginx.configure(streamModel, 'stream', saved_row)
- .then(() => {
- return internalStream.get(access, {id: row.id, expand: ['owner']});
- });
- })
- .then((saved_row) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'stream',
- object_id: row.id,
- meta: data
- })
- .then(() => {
- return _.omit(saved_row, omissions());
- });
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- return access.can('streams:get', data.id)
- .then((access_data) => {
- let query = streamModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[owner]')
- .first();
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('streams:delete', data.id)
- .then(() => {
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('stream', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'stream',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- enable: (access, data) => {
- return access.can('streams:update', data.id)
- .then(() => {
- return internalStream.get(access, {
- id: data.id,
- expand: ['owner']
- });
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (row.enabled) {
- throw new error.ValidationError('Host is already enabled');
- }
-
- row.enabled = 1;
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 1
- })
- .then(() => {
- // Configure nginx
- return internalNginx.configure(streamModel, 'stream', row);
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'enabled',
- object_type: 'stream',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Number} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- disable: (access, data) => {
- return access.can('streams:update', data.id)
- .then(() => {
- return internalStream.get(access, {id: data.id});
- })
- .then((row) => {
- if (!row) {
- throw new error.ItemNotFoundError(data.id);
- } else if (!row.enabled) {
- throw new error.ValidationError('Host is already disabled');
- }
-
- row.enabled = 0;
-
- return streamModel
- .query()
- .where('id', row.id)
- .patch({
- enabled: 0
- })
- .then(() => {
- // Delete Nginx Config
- return internalNginx.deleteConfig('stream', row)
- .then(() => {
- return internalNginx.reload();
- });
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'disabled',
- object_type: 'stream-host',
- object_id: row.id,
- meta: _.omit(row, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * All Streams
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('streams:list')
- .then((access_data) => {
- let query = streamModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[owner]')
- .orderBy('incoming_port', 'ASC');
-
- if (access_data.permission_visibility !== 'all') {
- query.andWhere('owner_user_id', access.token.getUserId(1));
- }
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('incoming_port', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * Report use
- *
- * @param {Number} user_id
- * @param {String} visibility
- * @returns {Promise}
- */
- getCount: (user_id, visibility) => {
- let query = streamModel
- .query()
- .count('id as count')
- .where('is_deleted', 0);
-
- if (visibility !== 'all') {
- query.andWhere('owner_user_id', user_id);
- }
-
- return query.first()
- .then((row) => {
- return parseInt(row.count, 10);
- });
- }
-};
-
-module.exports = internalStream;
diff --git a/backend/internal/tags/filters.go b/backend/internal/tags/filters.go
new file mode 100644
index 000000000..bce6319e3
--- /dev/null
+++ b/backend/internal/tags/filters.go
@@ -0,0 +1,309 @@
+package tags
+
+import (
+ "fmt"
+ "reflect"
+ "regexp"
+ "strings"
+
+ "npm/internal/database"
+ "npm/internal/logger"
+ "npm/internal/model"
+ "npm/internal/util"
+
+ "github.com/rotisserie/eris"
+)
+
+// GetFilterMap ...
+func GetFilterMap(m any, globalTablePrefix string) map[string]model.FilterMapValue {
+ name := getName(m)
+ filterMap := make(map[string]model.FilterMapValue)
+
+ // TypeOf returns the reflection Type that represents the dynamic type of variable.
+ // If variable is a nil interface value, TypeOf returns nil.
+ t := reflect.TypeOf(m)
+
+ // Get the table name from the model function, if it exists
+ if globalTablePrefix == "" {
+ v := reflect.ValueOf(m)
+ tableNameFunc, ok := t.MethodByName("TableName")
+ if ok {
+ n := tableNameFunc.Func.Call([]reflect.Value{v})
+ if len(n) > 0 {
+ globalTablePrefix = fmt.Sprintf(
+ `%s.`,
+ database.QuoteTableName(n[0].String()),
+ )
+ }
+ }
+ }
+
+ // If this is an entity model (and it probably is)
+ // then include the base model as well
+ if strings.Contains(name, ".Model") && !strings.Contains(name, "Base") {
+ filterMap = GetFilterMap(model.Base{}, globalTablePrefix)
+ }
+
+ if t.Kind() != reflect.Struct {
+ logger.Error("GetFilterMapError", eris.Errorf("%v type can't have attributes inspected", t.Kind()))
+ return nil
+ }
+
+ // Iterate over all available fields and read the tag value
+ for i := 0; i < t.NumField(); i++ {
+ // Get the field, returns https://golang.org/pkg/reflect/#StructField
+ field := t.Field(i)
+
+ // Get the field tag value
+ filterTag := field.Tag.Get("filter")
+ dbTag := field.Tag.Get("gorm")
+
+ // Filter -> Schema mapping
+ if filterTag != "" && filterTag != "-" {
+ f := model.FilterMapValue{
+ Model: name,
+ }
+
+ f.Schema = getFilterTagSchema(filterTag)
+ parts := strings.Split(filterTag, ",")
+
+ // Filter -> DB Field mapping
+ if dbTag != "" && dbTag != "-" {
+ // Filter tag can be a 2 part thing: name,type
+ // ie: account_id,integer
+ // So we need to split and use the first part
+ tablePrefix := globalTablePrefix
+ if len(parts) > 1 {
+ f.Type = parts[1]
+ if len(parts) > 2 {
+ tablePrefix = fmt.Sprintf(`"%s".`, parts[2])
+ }
+ }
+
+ // db can have many parts, we need to pull out the "column:value" part
+ f.Field = database.QuoteTableName(field.Name)
+ r := regexp.MustCompile(`(?:^|;)column:([^;|$]+)(?:$|;)`)
+ if matches := r.FindStringSubmatch(dbTag); len(matches) > 1 {
+ f.Field = fmt.Sprintf("%s%s", tablePrefix, database.QuoteTableName(matches[1]))
+ }
+ }
+
+ filterMap[parts[0]] = f
+ }
+ }
+
+ return filterMap
+}
+
+func getFilterTagSchema(filterTag string) string {
+ // split out tag value "field,filtreType"
+ // with a default filter type of string
+ items := strings.Split(filterTag, ",")
+ if len(items) == 1 {
+ items = append(items, "string")
+ }
+
+ switch items[1] {
+ case "number":
+ fallthrough
+ case "int":
+ fallthrough
+ case "integer":
+ return intFieldSchema(items[0])
+ case "bool":
+ fallthrough
+ case "boolean":
+ return boolFieldSchema(items[0])
+ case "date":
+ return dateFieldSchema(items[0])
+ case "regex":
+ if len(items) < 3 {
+ items = append(items, ".*")
+ }
+ return regexFieldSchema(items[0], items[2])
+
+ default:
+ return stringFieldSchema(items[0])
+ }
+}
+
+// GetFilterSchema creates a jsonschema for validating filters, based on the model
+// object given and by reading the struct "filter" tags.
+func GetFilterSchema(m any) string {
+ filterMap := GetFilterMap(m, "")
+ schemas := make([]string, 0)
+
+ for _, f := range filterMap {
+ schemas = append(schemas, f.Schema)
+ }
+
+ str := fmt.Sprintf(baseFilterSchema, strings.Join(schemas, ", "))
+ return util.PrettyPrintJSON(str)
+}
+
+// boolFieldSchema returns the Field Schema for a Boolean accepted value field
+func boolFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, boolModifiers, filterBool, filterBool)
+}
+
+// intFieldSchema returns the Field Schema for a Integer accepted value field
+func intFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+// stringFieldSchema returns the Field Schema for a String accepted value field
+func stringFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ %s,
+ {
+ "type": "array",
+ "items": %s
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, filterString, filterString)
+}
+
+// regexFieldSchema returns the Field Schema for a String accepted value field matching a Regex
+func regexFieldSchema(fieldName string, regex string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "%s"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "%s"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, stringModifiers, regex, regex)
+}
+
+// dateFieldSchema returns the Field Schema for a String accepted value field matching a Date format
+func dateFieldSchema(fieldName string) string {
+ return fmt.Sprintf(`{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^%s$"
+ },
+ "modifier": %s,
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))$"
+ }
+ }
+ ]
+ }
+ }
+ }`, fieldName, allModifiers)
+}
+
+const allModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin|min|max|greater|less)$"
+}`
+
+const boolModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not)$"
+}`
+
+const stringModifiers = `{
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin)$"
+}`
+
+const filterBool = `{
+ "type": "string",
+ "pattern": "^(TRUE|true|t|yes|y|on|1|FALSE|f|false|n|no|off|0)$"
+}`
+
+const filterString = `{
+ "type": "string",
+ "minLength": 1
+}`
+
+const baseFilterSchema = `{
+ "type": "array",
+ "items": {
+ "oneOf": [
+ %s
+ ]
+ }
+}`
diff --git a/backend/internal/tags/filters_test.go b/backend/internal/tags/filters_test.go
new file mode 100644
index 000000000..b6685f1cb
--- /dev/null
+++ b/backend/internal/tags/filters_test.go
@@ -0,0 +1,108 @@
+package tags
+
+import (
+ "testing"
+ "time"
+
+ "npm/internal/util"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetFilterSchema(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type testDemo struct {
+ ID uint `json:"id" gorm:"column:user_id" filter:"id,number"`
+ Created time.Time `json:"created" gorm:"column:user_created_date" filter:"created,date"`
+ Name string `json:"name" gorm:"column:user_name" filter:"name,string"`
+ IsDisabled string `json:"is_disabled" gorm:"column:user_is_disabled" filter:"is_disabled,bool"`
+ Permissions string `json:"permissions" gorm:"column:user_permissions" filter:"permissions,regex"`
+ History string `json:"history" gorm:"column:user_history" filter:"history,regex,(id|name)"`
+ }
+
+ m := testDemo{ID: 10, Name: "test"}
+
+ filterSchema := GetFilterSchema(m)
+ assert.Greater(t, len(filterSchema), 4000)
+ // Trigger again for code coverage of cached item
+ GetFilterSchema(m)
+}
+
+func TestGetFilterTagSchema(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ schema := util.PrettyPrintJSON(getFilterTagSchema("id,integer"))
+
+ expectedSchema := `{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^id$"
+ },
+ "modifier": {
+ "type": "string",
+ "pattern": "^(equals|not|contains|starts|ends|in|notin|min|max|greater|less)$"
+ },
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^[0-9]+$"
+ }
+ }
+ ]
+ }
+ }
+}`
+
+ assert.Equal(t, expectedSchema, schema)
+}
+
+func TestBoolFieldSchema(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ schema := util.PrettyPrintJSON(boolFieldSchema("active"))
+
+ expectedSchema := `{
+ "type": "object",
+ "properties": {
+ "field": {
+ "type": "string",
+ "pattern": "^active$"
+ },
+ "modifier": {
+ "type": "string",
+ "pattern": "^(equals|not)$"
+ },
+ "value": {
+ "oneOf": [
+ {
+ "type": "string",
+ "pattern": "^(TRUE|true|t|yes|y|on|1|FALSE|f|false|n|no|off|0)$"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^(TRUE|true|t|yes|y|on|1|FALSE|f|false|n|no|off|0)$"
+ }
+ }
+ ]
+ }
+ }
+}`
+
+ assert.Equal(t, expectedSchema, schema)
+}
diff --git a/backend/internal/tags/reflect.go b/backend/internal/tags/reflect.go
new file mode 100644
index 000000000..0dbadbee2
--- /dev/null
+++ b/backend/internal/tags/reflect.go
@@ -0,0 +1,33 @@
+package tags
+
+import (
+ "fmt"
+ "reflect"
+
+ "npm/internal/model"
+)
+
+var tagCache map[string]map[string]model.FilterMapValue
+
+// getName returns the name of the type given
+func getName(m any) string {
+ fc := reflect.TypeOf(m)
+ return fmt.Sprint(fc)
+}
+
+// getCache tries to find a cached item for this name
+func getCache(name string) (map[string]model.FilterMapValue, bool) {
+ if val, ok := tagCache[name]; ok {
+ return val, true
+ }
+ return nil, false
+}
+
+// setCache sets the name to this value
+func setCache(name string, val map[string]model.FilterMapValue) {
+ // Hack to initialise empty map
+ if len(tagCache) == 0 {
+ tagCache = make(map[string]map[string]model.FilterMapValue, 0)
+ }
+ tagCache[name] = val
+}
diff --git a/backend/internal/tags/reflect_test.go b/backend/internal/tags/reflect_test.go
new file mode 100644
index 000000000..e29c59855
--- /dev/null
+++ b/backend/internal/tags/reflect_test.go
@@ -0,0 +1,46 @@
+package tags
+
+import (
+ "testing"
+
+ "npm/internal/model"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestGetName(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type testDemo struct {
+ UserID uint `json:"user_id" gorm:"column:user_id" filter:"user_id,integer"`
+ Type string `json:"type" gorm:"column:type" filter:"type,string"`
+ }
+
+ m := testDemo{UserID: 10, Type: "test"}
+ assert.Equal(t, "tags.testDemo", getName(m))
+}
+
+func TestCache(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ name := "testdemo"
+ // Should return error
+ _, exists := getCache(name)
+ assert.Equal(t, false, exists)
+
+ setCache(name, map[string]model.FilterMapValue{
+ "test": {
+ Field: "test",
+ Type: "test",
+ },
+ })
+
+ // Should return value
+ val, exists := getCache(name)
+ assert.Equal(t, true, exists)
+ assert.Equal(t, "test", val["test"].Field)
+ assert.Equal(t, "test", val["test"].Type)
+}
diff --git a/backend/internal/test/suite.go b/backend/internal/test/suite.go
new file mode 100644
index 000000000..8d9fec23d
--- /dev/null
+++ b/backend/internal/test/suite.go
@@ -0,0 +1,44 @@
+package test
+
+import (
+ "strings"
+ "testing"
+
+ "npm/internal/config"
+ "npm/internal/database"
+
+ "github.com/DATA-DOG/go-sqlmock"
+ "gorm.io/driver/postgres"
+ "gorm.io/gorm"
+)
+
+// Setup ...
+func Setup() (sqlmock.Sqlmock, error) {
+ db, mock, err := sqlmock.New()
+ if err != nil {
+ return nil, err
+ }
+ dialector := postgres.New(postgres.Config{
+ Conn: db,
+ DriverName: "postgres",
+ })
+ gormDB, err := gorm.Open(dialector, &gorm.Config{})
+ database.SetDB(gormDB)
+ return mock, err
+}
+
+// InitConfig ...
+func InitConfig(t *testing.T, envs ...string) {
+ if len(envs) > 0 {
+ for _, env := range envs {
+ parts := strings.Split(env, "=")
+ if len(parts) == 2 {
+ t.Setenv(parts[0], parts[1])
+ }
+ }
+ }
+
+ version := "999.999.999"
+ commit := "abcd123"
+ config.Init(&version, &commit)
+}
diff --git a/backend/internal/token.js b/backend/internal/token.js
deleted file mode 100644
index a64b90105..000000000
--- a/backend/internal/token.js
+++ /dev/null
@@ -1,162 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const userModel = require('../models/user');
-const authModel = require('../models/auth');
-const helpers = require('../lib/helpers');
-const TokenModel = require('../models/token');
-
-module.exports = {
-
- /**
- * @param {Object} data
- * @param {String} data.identity
- * @param {String} data.secret
- * @param {String} [data.scope]
- * @param {String} [data.expiry]
- * @param {String} [issuer]
- * @returns {Promise}
- */
- getTokenFromEmail: (data, issuer) => {
- let Token = new TokenModel();
-
- data.scope = data.scope || 'user';
- data.expiry = data.expiry || '1d';
-
- return userModel
- .query()
- .where('email', data.identity)
- .andWhere('is_deleted', 0)
- .andWhere('is_disabled', 0)
- .first()
- .then((user) => {
- if (user) {
- // Get auth
- return authModel
- .query()
- .where('user_id', '=', user.id)
- .where('type', '=', 'password')
- .first()
- .then((auth) => {
- if (auth) {
- return auth.verifyPassword(data.secret)
- .then((valid) => {
- if (valid) {
-
- if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
- // The scope requested doesn't exist as a role against the user,
- // you shall not pass.
- throw new error.AuthError('Invalid scope: ' + data.scope);
- }
-
- // Create a moment of the expiry expression
- let expiry = helpers.parseDatePeriod(data.expiry);
- if (expiry === null) {
- throw new error.AuthError('Invalid expiry time: ' + data.expiry);
- }
-
- return Token.create({
- iss: issuer || 'api',
- attrs: {
- id: user.id
- },
- scope: [data.scope],
- expiresIn: data.expiry
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString()
- };
- });
- } else {
- throw new error.AuthError('Invalid password');
- }
- });
- } else {
- throw new error.AuthError('No password auth for user');
- }
- });
- } else {
- throw new error.AuthError('No relevant user found');
- }
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} [data]
- * @param {String} [data.expiry]
- * @param {String} [data.scope] Only considered if existing token scope is admin
- * @returns {Promise}
- */
- getFreshToken: (access, data) => {
- let Token = new TokenModel();
-
- data = data || {};
- data.expiry = data.expiry || '1d';
-
- if (access && access.token.getUserId(0)) {
-
- // Create a moment of the expiry expression
- let expiry = helpers.parseDatePeriod(data.expiry);
- if (expiry === null) {
- throw new error.AuthError('Invalid expiry time: ' + data.expiry);
- }
-
- let token_attrs = {
- id: access.token.getUserId(0)
- };
-
- // Only admins can request otherwise scoped tokens
- let scope = access.token.get('scope');
- if (data.scope && access.token.hasScope('admin')) {
- scope = [data.scope];
-
- if (data.scope === 'job-board' || data.scope === 'worker') {
- token_attrs.id = 0;
- }
- }
-
- return Token.create({
- iss: 'api',
- scope: scope,
- attrs: token_attrs,
- expiresIn: data.expiry
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString()
- };
- });
- } else {
- throw new error.AssertionFailedError('Existing token contained invalid user data');
- }
- },
-
- /**
- * @param {Object} user
- * @returns {Promise}
- */
- getTokenFromUser: (user) => {
- const expire = '1d';
- const Token = new TokenModel();
- const expiry = helpers.parseDatePeriod(expire);
-
- return Token.create({
- iss: 'api',
- attrs: {
- id: user.id
- },
- scope: ['user'],
- expiresIn: expire
- })
- .then((signed) => {
- return {
- token: signed.token,
- expires: expiry.toISOString(),
- user: user
- };
- });
- }
-};
diff --git a/backend/internal/types/db_date.go b/backend/internal/types/db_date.go
new file mode 100644
index 000000000..27e212df5
--- /dev/null
+++ b/backend/internal/types/db_date.go
@@ -0,0 +1,39 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "time"
+)
+
+// DBDate is a date time
+// type DBDate time.Time
+type DBDate struct {
+ Time time.Time
+}
+
+// Value encodes the type ready for the database
+func (d DBDate) Value() (driver.Value, error) {
+ return driver.Value(d.Time.Unix()), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *DBDate) Scan(src any) error {
+ d.Time = time.Unix(src.(int64), 0)
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *DBDate) UnmarshalJSON(data []byte) error {
+ var u int64
+ if err := json.Unmarshal(data, &u); err != nil {
+ return err
+ }
+ d.Time = time.Unix(u, 0)
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d DBDate) MarshalJSON() ([]byte, error) {
+ return json.Marshal(d.Time.Unix())
+}
diff --git a/backend/internal/types/db_date_test.go b/backend/internal/types/db_date_test.go
new file mode 100644
index 000000000..1aeafec11
--- /dev/null
+++ b/backend/internal/types/db_date_test.go
@@ -0,0 +1,124 @@
+package types
+
+import (
+ "encoding/json"
+ "testing"
+ "time"
+
+ "go.uber.org/goleak"
+)
+
+func TestDBDate_Value(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Create a DBDate instance with a specific time
+ expectedTime := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ dbDate := DBDate{Time: expectedTime}
+
+ // Call the Value method
+ value, err := dbDate.Value()
+
+ // Assert the value and error
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ // Convert the value to int64
+ unixTime := value.(int64)
+
+ // Convert the unix time back to time.Time
+ actualTime := time.Unix(unixTime, 0)
+
+ // Compare the actual time with the expected time
+ if !actualTime.Equal(expectedTime) {
+ t.Errorf("Expected time '%v', got '%v'", expectedTime, actualTime)
+ }
+}
+
+func TestDBDate_Scan(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Simulate a value from the database (unix timestamp)
+ unixTime := int64(1640995200)
+
+ // Create a DBDate instance
+ dbDate := DBDate{}
+
+ // Call the Scan method
+ err := dbDate.Scan(unixTime)
+
+ // Assert the error
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ // Convert the DBDate's time to unix timestamp for comparison
+ actualUnixTime := dbDate.Time.Unix()
+
+ // Compare the actual unix time with the expected unix time
+ if actualUnixTime != unixTime {
+ t.Errorf("Expected unix time '%v', got '%v'", unixTime, actualUnixTime)
+ }
+}
+
+func TestDBDate_UnmarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Simulate a JSON input representing a unix timestamp
+ jsonData := []byte("1640995200")
+
+ // Create a DBDate instance
+ dbDate := DBDate{}
+
+ // Call the UnmarshalJSON method
+ err := dbDate.UnmarshalJSON(jsonData)
+
+ // Assert the error
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ // Convert the DBDate's time to unix timestamp for comparison
+ actualUnixTime := dbDate.Time.Unix()
+
+ // Compare the actual unix time with the expected unix time
+ expectedUnixTime := int64(1640995200)
+ if actualUnixTime != expectedUnixTime {
+ t.Errorf("Expected unix time '%v', got '%v'", expectedUnixTime, actualUnixTime)
+ }
+}
+
+func TestDBDate_MarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ // Create a DBDate instance with a specific time
+ expectedTime := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ dbDate := DBDate{Time: expectedTime}
+
+ // Call the MarshalJSON method
+ jsonData, err := dbDate.MarshalJSON()
+
+ // Assert the value and error
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ // Convert the JSON data to an integer
+ var actualUnixTime int64
+ err = json.Unmarshal(jsonData, &actualUnixTime)
+ if err != nil {
+ t.Errorf("Failed to unmarshal JSON data: %v", err)
+ }
+
+ // Convert the unix time back to time.Time
+ actualTime := time.Unix(actualUnixTime, 0)
+
+ // Compare the actual time with the expected time
+ if !actualTime.Equal(expectedTime) {
+ t.Errorf("Expected time '%v', got '%v'", expectedTime, actualTime)
+ }
+}
diff --git a/backend/internal/types/db_nullable_int.go b/backend/internal/types/db_nullable_int.go
new file mode 100644
index 000000000..0ece3f5d8
--- /dev/null
+++ b/backend/internal/types/db_nullable_int.go
@@ -0,0 +1,63 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "strconv"
+)
+
+// NullableDBInt works with database values that can be null or an integer value
+type NullableDBInt struct {
+ Int int
+}
+
+// Value encodes the type ready for the database
+func (d NullableDBInt) Value() (driver.Value, error) {
+ if d.Int == 0 {
+ return nil, nil
+ }
+ // According to current database/sql docs, the sql has four builtin functions that
+ // returns driver.Value, and the underlying types are `int64`, `float64`, `string` and `bool`
+ return driver.Value(int64(d.Int)), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *NullableDBInt) Scan(src any) error {
+ var i int
+ switch v := src.(type) {
+ case int:
+ i = v
+ case int64:
+ i = int(v)
+ case float32:
+ i = int(v)
+ case float64:
+ i = int(v)
+ case string:
+ i, _ = strconv.Atoi(v)
+ }
+ d.Int = i
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *NullableDBInt) UnmarshalJSON(data []byte) error {
+ // total_deploy_time: 10,
+ // total_deploy_time: null,
+
+ var i int
+ if err := json.Unmarshal(data, &i); err != nil {
+ i = 0
+ return nil
+ }
+ d.Int = i
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d NullableDBInt) MarshalJSON() ([]byte, error) {
+ if d.Int == 0 {
+ return json.Marshal(nil)
+ }
+ return json.Marshal(d.Int)
+}
diff --git a/backend/internal/types/db_nullable_int_test.go b/backend/internal/types/db_nullable_int_test.go
new file mode 100644
index 000000000..13f8558f7
--- /dev/null
+++ b/backend/internal/types/db_nullable_int_test.go
@@ -0,0 +1,105 @@
+package types
+
+import (
+ "testing"
+
+ "go.uber.org/goleak"
+)
+
+func TestNullableDBIntValue(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var d NullableDBInt
+
+ // Test when Int is 0 (null)
+ d.Int = 0
+ value, err := d.Value()
+ if value != nil || err != nil {
+ t.Errorf("Expected Value() to return nil, nil but got %v, %v", value, err)
+ }
+
+ // Test when Int is not null
+ d.Int = 10
+ value, err = d.Value()
+ if value != int64(10) || err != nil {
+ t.Errorf("Expected Value() to return 10, nil but got %v, %v", value, err)
+ }
+}
+
+func TestNullableDBIntScan(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var d NullableDBInt
+
+ // Test when src is an int
+ err := d.Scan(20)
+ if d.Int != 20 || err != nil {
+ t.Errorf("Expected Scan(20) to set d.Int to 20 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+
+ // Test when src is an int64
+ err = d.Scan(int64(30))
+ if d.Int != 30 || err != nil {
+ t.Errorf("Expected Scan(int64(30)) to set d.Int to 30 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+
+ // Test when src is a float32
+ err = d.Scan(float32(40))
+ if d.Int != 40 || err != nil {
+ t.Errorf("Expected Scan(float32(40)) to set d.Int to 40 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+
+ // Test when src is a float64
+ err = d.Scan(float64(50))
+ if d.Int != 50 || err != nil {
+ t.Errorf("Expected Scan(float64(50)) to set d.Int to 50 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+
+ // Test when src is a string
+ err = d.Scan("60")
+ if d.Int != 60 || err != nil {
+ t.Errorf("Expected Scan(\"60\") to set d.Int to 60 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+}
+
+func TestNullableDBIntUnmarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var d NullableDBInt
+
+ // Test when data is an integer value
+ err := d.UnmarshalJSON([]byte("10"))
+ if d.Int != 10 || err != nil {
+ t.Errorf("Expected UnmarshalJSON([]byte(\"10\")) to set d.Int to 10 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+
+ // Test when data is null
+ err = d.UnmarshalJSON([]byte("null"))
+ if d.Int != 0 || err != nil {
+ t.Errorf("Expected UnmarshalJSON([]byte(\"null\")) to set d.Int to 0 and return nil but got d.Int = %d, err = %v", d.Int, err)
+ }
+}
+
+func TestNullableDBIntMarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var d NullableDBInt
+
+ // Test when Int is 0 (null)
+ d.Int = 0
+ result, err := d.MarshalJSON()
+ if string(result) != "null" || err != nil {
+ t.Errorf("Expected MarshalJSON() to return \"null\", nil but got %s, %v", result, err)
+ }
+
+ // Test when Int is not null
+ d.Int = 10
+ result, err = d.MarshalJSON()
+ if string(result) != "10" || err != nil {
+ t.Errorf("Expected MarshalJSON() to return \"10\", nil but got %s, %v", result, err)
+ }
+}
diff --git a/backend/internal/types/db_nullable_uint.go b/backend/internal/types/db_nullable_uint.go
new file mode 100644
index 000000000..e268621d7
--- /dev/null
+++ b/backend/internal/types/db_nullable_uint.go
@@ -0,0 +1,68 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "strconv"
+)
+
+// NullableDBUint works with database values that can be null or an integer value
+type NullableDBUint struct {
+ Uint uint
+}
+
+// Value encodes the type ready for the database
+func (d NullableDBUint) Value() (driver.Value, error) {
+ if d.Uint == 0 {
+ return nil, nil
+ }
+ // According to current database/sql docs, the sql has four builtin functions that
+ // returns driver.Value, and the underlying types are `int64`, `float64`, `string` and `bool`
+ // nolint: gosec
+ return driver.Value(int64(d.Uint)), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *NullableDBUint) Scan(src any) error {
+ var i uint
+ switch v := src.(type) {
+ case int:
+ // nolint: gosec
+ i = uint(v)
+ case int64:
+ // nolint: gosec
+ i = uint(v)
+ case float32:
+ i = uint(v)
+ case float64:
+ i = uint(v)
+ case string:
+ a, _ := strconv.Atoi(v)
+ // nolint: gosec
+ i = uint(a)
+ }
+ d.Uint = i
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *NullableDBUint) UnmarshalJSON(data []byte) error {
+ // total_deploy_time: 10,
+ // total_deploy_time: null,
+
+ var i uint
+ if err := json.Unmarshal(data, &i); err != nil {
+ i = 0
+ return nil
+ }
+ d.Uint = i
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d NullableDBUint) MarshalJSON() ([]byte, error) {
+ if d.Uint == 0 {
+ return json.Marshal(nil)
+ }
+ return json.Marshal(d.Uint)
+}
diff --git a/backend/internal/types/db_nullable_uint_test.go b/backend/internal/types/db_nullable_uint_test.go
new file mode 100644
index 000000000..129f91a77
--- /dev/null
+++ b/backend/internal/types/db_nullable_uint_test.go
@@ -0,0 +1,160 @@
+package types
+
+import (
+ "database/sql/driver"
+ "testing"
+
+ "go.uber.org/goleak"
+)
+
+func TestNullableDBUint_Value(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ input NullableDBUint
+ wantValue driver.Value
+ wantErr bool
+ }{
+ {
+ name: "Value should return nil when Uint is 0",
+ input: NullableDBUint{Uint: 0},
+ wantValue: nil,
+ wantErr: false,
+ },
+ {
+ name: "Value should return int64 value of Uint",
+ input: NullableDBUint{Uint: 10},
+ wantValue: int64(10),
+ wantErr: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotValue, gotErr := tt.input.Value()
+ if gotValue != tt.wantValue {
+ t.Errorf("Value() = %v, want %v", gotValue, tt.wantValue)
+ }
+ if (gotErr != nil) != tt.wantErr {
+ t.Errorf("Value() error = %v, wantErr %v", gotErr, tt.wantErr)
+ }
+ })
+ }
+}
+
+func TestNullableDBUint_Scan(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ input any
+ wantUint uint
+ wantErr bool
+ }{
+ {
+ name: "Scan should convert int to uint",
+ input: int(10),
+ wantUint: uint(10),
+ wantErr: false,
+ },
+ {
+ name: "Scan should convert int64 to uint",
+ input: int64(10),
+ wantUint: uint(10),
+ wantErr: false,
+ },
+ // Add more tests for other supported types
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var d NullableDBUint
+ err := d.Scan(tt.input)
+ if err != nil && !tt.wantErr {
+ t.Errorf("Scan() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ if d.Uint != tt.wantUint {
+ t.Errorf("Scan() Uint = %v, want %v", d.Uint, tt.wantUint)
+ }
+ })
+ }
+}
+
+func TestNullableDBUint_UnmarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ input []byte
+ wantUint uint
+ wantErr bool
+ }{
+ {
+ name: "UnmarshalJSON should unmarshal integer value",
+ input: []byte("10"),
+ wantUint: uint(10),
+ wantErr: false,
+ },
+ {
+ name: "UnmarshalJSON should return zero Uint when data is invalid",
+ input: []byte(`"invalid"`),
+ wantUint: uint(0),
+ wantErr: false,
+ },
+ // Add more tests for other scenarios
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var d NullableDBUint
+ err := d.UnmarshalJSON(tt.input)
+ if err != nil && !tt.wantErr {
+ t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ if d.Uint != tt.wantUint {
+ t.Errorf("UnmarshalJSON() Uint = %v, want %v", d.Uint, tt.wantUint)
+ }
+ })
+ }
+}
+
+func TestNullableDBUint_MarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ input NullableDBUint
+ wantOutput []byte
+ wantErr bool
+ }{
+ {
+ name: "MarshalJSON should marshal nil when Uint is 0",
+ input: NullableDBUint{Uint: 0},
+ wantOutput: []byte("null"),
+ wantErr: false,
+ },
+ {
+ name: "MarshalJSON should marshal Uint as JSON value",
+ input: NullableDBUint{Uint: 10},
+ wantOutput: []byte("10"),
+ wantErr: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotOutput, gotErr := tt.input.MarshalJSON()
+ if (gotErr != nil) != tt.wantErr {
+ t.Errorf("MarshalJSON() error = %v, wantErr %v", gotErr, tt.wantErr)
+ }
+ if string(gotOutput) != string(tt.wantOutput) {
+ t.Errorf("MarshalJSON() output = %s, want %s", gotOutput, tt.wantOutput)
+ }
+ })
+ }
+}
diff --git a/backend/internal/types/jsonb.go b/backend/internal/types/jsonb.go
new file mode 100644
index 000000000..1cd8b3b7b
--- /dev/null
+++ b/backend/internal/types/jsonb.go
@@ -0,0 +1,72 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+
+ "github.com/rotisserie/eris"
+)
+
+// JSONB can be anything
+type JSONB struct {
+ Encoded string `json:"decoded"`
+ Decoded any `json:"encoded"`
+}
+
+// Value encodes the type ready for the database
+func (j JSONB) Value() (driver.Value, error) {
+ jsn, err := json.Marshal(j.Decoded)
+ return driver.Value(string(jsn)), err
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (j *JSONB) Scan(src any) error {
+ var jsonb JSONB
+ var srcString string
+ switch v := src.(type) {
+ case string:
+ srcString = src.(string)
+ case []uint8:
+ srcString = string(src.([]uint8))
+ default:
+ return eris.Errorf("Incompatible type for JSONB: %v", v)
+ }
+
+ jsonb.Encoded = srcString
+
+ if err := json.Unmarshal([]byte(srcString), &jsonb.Decoded); err != nil {
+ return err
+ }
+
+ *j = jsonb
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (j *JSONB) UnmarshalJSON(data []byte) error {
+ var jsonb JSONB
+ jsonb.Encoded = string(data)
+ if err := json.Unmarshal(data, &jsonb.Decoded); err != nil {
+ return err
+ }
+ *j = jsonb
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (j JSONB) MarshalJSON() ([]byte, error) {
+ return json.Marshal(j.Decoded)
+}
+
+// AsStringArray will attempt to return as []string
+func (j JSONB) AsStringArray() ([]string, error) {
+ var strs []string
+
+ // Encode then Decode onto this type
+ b, _ := j.MarshalJSON()
+ if err := json.Unmarshal(b, &strs); err != nil {
+ return strs, err
+ }
+
+ return strs, nil
+}
diff --git a/backend/internal/types/jsonb_test.go b/backend/internal/types/jsonb_test.go
new file mode 100644
index 000000000..0ec7e55c5
--- /dev/null
+++ b/backend/internal/types/jsonb_test.go
@@ -0,0 +1,133 @@
+package types
+
+import (
+ "encoding/json"
+ "testing"
+)
+
+// TestJSONBValue tests the Value method of the JSONB type
+func TestJSONBValue(t *testing.T) {
+ j := JSONB{
+ Decoded: map[string]any{
+ "name": "John",
+ "age": 30,
+ },
+ }
+
+ value, err := j.Value()
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ // nolint: goconst
+ if value != `{"age":30,"name":"John"}` {
+ t.Errorf("Incorrect value. Expected: %s, Got: %s", `{"name":"John","age":30}`, value)
+ }
+}
+
+// TestJSONBScan tests the Scan method of the JSONB type
+func TestJSONBScan(t *testing.T) {
+ src := `{"name":"John","age":30}`
+ var j JSONB
+
+ err := j.Scan(src)
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedDecoded := map[string]any{
+ "name": "John",
+ "age": 30,
+ }
+
+ if !jsonEqual(j.Decoded, expectedDecoded) {
+ t.Errorf("Incorrect decoded value. Expected: %v, Got: %v", expectedDecoded, j.Decoded)
+ }
+
+ if j.Encoded != src {
+ t.Errorf("Incorrect encoded value. Expected: %s, Got: %s", src, j.Encoded)
+ }
+}
+
+// TestJSONBUnmarshalJSON tests the UnmarshalJSON method of the JSONB type
+func TestJSONBUnmarshalJSON(t *testing.T) {
+ data := []byte(`{"name":"John","age":30}`)
+ var j JSONB
+
+ err := j.UnmarshalJSON(data)
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedDecoded := map[string]any{
+ "name": "John",
+ "age": 30,
+ }
+
+ if !jsonEqual(j.Decoded, expectedDecoded) {
+ t.Errorf("Incorrect decoded value. Expected: %v, Got: %v", expectedDecoded, j.Decoded)
+ }
+
+ if j.Encoded != string(data) {
+ t.Errorf("Incorrect encoded value. Expected: %s, Got: %s", string(data), j.Encoded)
+ }
+}
+
+// TestJSONBMarshalJSON tests the MarshalJSON method of the JSONB type
+func TestJSONBMarshalJSON(t *testing.T) {
+ j := JSONB{
+ Decoded: map[string]any{
+ "name": "John",
+ "age": 30,
+ },
+ }
+
+ result, err := j.MarshalJSON()
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedResult := `{"age":30,"name":"John"}`
+
+ if string(result) != expectedResult {
+ t.Errorf("Incorrect result. Expected: %s, Got: %s", expectedResult, string(result))
+ }
+}
+
+// TestJSONBAsStringArray tests the AsStringArray method of the JSONB type
+func TestJSONBAsStringArray(t *testing.T) {
+ j := JSONB{
+ Decoded: []string{"apple", "banana", "orange"},
+ }
+
+ strs, err := j.AsStringArray()
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedStrs := []string{"apple", "banana", "orange"}
+
+ if !stringSliceEqual(strs, expectedStrs) {
+ t.Errorf("Incorrect result. Expected: %v, Got: %v", expectedStrs, strs)
+ }
+}
+
+// Helper function to compare JSON objects
+func jsonEqual(a, b any) bool {
+ aJSON, _ := json.Marshal(a)
+ bJSON, _ := json.Marshal(b)
+ return string(aJSON) == string(bJSON)
+}
+
+// Helper function to compare string slices
+func stringSliceEqual(a, b []string) bool {
+ if len(a) != len(b) {
+ return false
+ }
+ for i := range a {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/backend/internal/types/nullable_db_date.go b/backend/internal/types/nullable_db_date.go
new file mode 100644
index 000000000..f6532ee90
--- /dev/null
+++ b/backend/internal/types/nullable_db_date.go
@@ -0,0 +1,74 @@
+package types
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "time"
+)
+
+// NullableDBDate is a date time that can be null in the db
+// type DBDate time.Time
+type NullableDBDate struct {
+ Time *time.Time
+}
+
+// Value encodes the type ready for the database
+func (d NullableDBDate) Value() (driver.Value, error) {
+ if d.Time == nil {
+ return nil, nil
+ }
+ // According to current database/sql docs, the sql has four builtin functions that
+ // returns driver.Value, and the underlying types are `int64`, `float64`, `string` and `bool`
+ return driver.Value(d.Time.Unix()), nil
+}
+
+// Scan takes data from the database and modifies it for Go Types
+func (d *NullableDBDate) Scan(src any) error {
+ var tme time.Time
+ if src != nil {
+ tme = time.Unix(src.(int64), 0)
+ }
+
+ d.Time = &tme
+ return nil
+}
+
+// UnmarshalJSON will unmarshal both database and post given values
+func (d *NullableDBDate) UnmarshalJSON(data []byte) error {
+ var t time.Time
+ var u int64
+ if err := json.Unmarshal(data, &u); err != nil {
+ d.Time = &t
+ return nil
+ }
+ t = time.Unix(u, 0)
+ d.Time = &t
+ return nil
+}
+
+// MarshalJSON will marshal for output in api responses
+func (d NullableDBDate) MarshalJSON() ([]byte, error) {
+ if d.Time == nil || d.Time.IsZero() {
+ return json.Marshal(nil)
+ }
+
+ return json.Marshal(d.Time.Unix())
+}
+
+// AsInt64 will attempt to return a unixtime
+func (d NullableDBDate) AsInt64() int64 {
+ if d.Time == nil || d.Time.IsZero() {
+ return 0
+ }
+
+ return d.Time.Unix()
+}
+
+// AsString returns date as a string
+func (d NullableDBDate) AsString() string {
+ if d.Time == nil || d.Time.IsZero() {
+ return ""
+ }
+
+ return d.Time.String()
+}
diff --git a/backend/internal/types/nullable_db_date_test.go b/backend/internal/types/nullable_db_date_test.go
new file mode 100644
index 000000000..613f3fa07
--- /dev/null
+++ b/backend/internal/types/nullable_db_date_test.go
@@ -0,0 +1,127 @@
+package types
+
+import (
+ "testing"
+ "time"
+
+ "go.uber.org/goleak"
+)
+
+// TestNullableDBDateValue tests the Value method of the NullableDBDate type
+func TestNullableDBDateValue(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tme := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ d := NullableDBDate{
+ Time: &tme,
+ }
+
+ value, err := d.Value()
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedValue := tme.Unix()
+
+ if value != expectedValue {
+ t.Errorf("Incorrect value. Expected: %d, Got: %v", expectedValue, value)
+ }
+}
+
+// TestNullableDBDateScan tests the Scan method of the NullableDBDate type
+func TestNullableDBDateScan(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var d NullableDBDate
+
+ err := d.Scan(int64(1640995200))
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedTime := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+
+ if !expectedTime.Equal(*d.Time) {
+ t.Errorf("Incorrect time. Expected: %v, Got: %v", expectedTime, *d.Time)
+ }
+}
+
+// TestNullableDBDateUnmarshalJSON tests the UnmarshalJSON method of the NullableDBDate type
+func TestNullableDBDateUnmarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ data := []byte(`1640995200`)
+ var d NullableDBDate
+
+ err := d.UnmarshalJSON(data)
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedTime := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+
+ if !expectedTime.Equal(*d.Time) {
+ t.Errorf("Incorrect time. Expected: %v, Got: %v", expectedTime, *d.Time)
+ }
+}
+
+// TestNullableDBDateMarshalJSON tests the MarshalJSON method of the NullableDBDate type
+func TestNullableDBDateMarshalJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tme := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ d := NullableDBDate{
+ Time: &tme,
+ }
+
+ result, err := d.MarshalJSON()
+ if err != nil {
+ t.Errorf("Unexpected error: %v", err)
+ }
+
+ expectedResult := []byte(`1640995200`)
+
+ if string(result) != string(expectedResult) {
+ t.Errorf("Incorrect result. Expected: %s, Got: %s", expectedResult, result)
+ }
+}
+
+// TestNullableDBDateAsInt64 tests the AsInt64 method of the NullableDBDate type
+func TestNullableDBDateAsInt64(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tme := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ d := NullableDBDate{
+ Time: &tme,
+ }
+
+ unixtime := d.AsInt64()
+ expectedUnixtime := tme.Unix()
+
+ if unixtime != expectedUnixtime {
+ t.Errorf("Incorrect unixtime. Expected: %d, Got: %d", expectedUnixtime, unixtime)
+ }
+}
+
+// TestNullableDBDateAsString tests the AsString method of the NullableDBDate type
+func TestNullableDBDateAsString(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tme := time.Date(2022, time.January, 1, 0, 0, 0, 0, time.UTC)
+ d := NullableDBDate{
+ Time: &tme,
+ }
+
+ str := d.AsString()
+ expectedStr := tme.String()
+
+ if str != expectedStr {
+ t.Errorf("Incorrect string. Expected: %s, Got: %s", expectedStr, str)
+ }
+}
diff --git a/backend/internal/user.js b/backend/internal/user.js
deleted file mode 100644
index 2e2d8abf6..000000000
--- a/backend/internal/user.js
+++ /dev/null
@@ -1,518 +0,0 @@
-const _ = require('lodash');
-const error = require('../lib/error');
-const userModel = require('../models/user');
-const userPermissionModel = require('../models/user_permission');
-const authModel = require('../models/auth');
-const gravatar = require('gravatar');
-const internalToken = require('./token');
-const internalAuditLog = require('./audit-log');
-
-function omissions () {
- return ['is_deleted'];
-}
-
-const internalUser = {
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @returns {Promise}
- */
- create: (access, data) => {
- let auth = data.auth || null;
- delete data.auth;
-
- data.avatar = data.avatar || '';
- data.roles = data.roles || [];
-
- if (typeof data.is_disabled !== 'undefined') {
- data.is_disabled = data.is_disabled ? 1 : 0;
- }
-
- return access.can('users:create', data)
- .then(() => {
- data.avatar = gravatar.url(data.email, {default: 'mm'});
-
- return userModel
- .query()
- .omit(omissions())
- .insertAndFetch(data);
- })
- .then((user) => {
- if (auth) {
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: auth.type,
- secret: auth.secret,
- meta: {}
- })
- .then(() => {
- return user;
- });
- } else {
- return user;
- }
- })
- .then((user) => {
- // Create permissions row as well
- let is_admin = data.roles.indexOf('admin') !== -1;
-
- return userPermissionModel
- .query()
- .insert({
- user_id: user.id,
- visibility: is_admin ? 'all' : 'user',
- proxy_hosts: 'manage',
- redirection_hosts: 'manage',
- dead_hosts: 'manage',
- streams: 'manage',
- access_lists: 'manage',
- certificates: 'manage'
- })
- .then(() => {
- return internalUser.get(access, {id: user.id, expand: ['permissions']});
- });
- })
- .then((user) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'created',
- object_type: 'user',
- object_id: user.id,
- meta: user
- })
- .then(() => {
- return user;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.email]
- * @param {String} [data.name]
- * @return {Promise}
- */
- update: (access, data) => {
- if (typeof data.is_disabled !== 'undefined') {
- data.is_disabled = data.is_disabled ? 1 : 0;
- }
-
- return access.can('users:update', data.id)
- .then(() => {
-
- // Make sure that the user being updated doesn't change their email to another user that is already using it
- // 1. get user we want to update
- return internalUser.get(access, {id: data.id})
- .then((user) => {
-
- // 2. if email is to be changed, find other users with that email
- if (typeof data.email !== 'undefined') {
- data.email = data.email.toLowerCase().trim();
-
- if (user.email !== data.email) {
- return internalUser.isEmailAvailable(data.email, data.id)
- .then((available) => {
- if (!available) {
- throw new error.ValidationError('Email address already in use - ' + data.email);
- }
-
- return user;
- });
- }
- }
-
- // No change to email:
- return user;
- });
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
-
- return userModel
- .query()
- .omit(omissions())
- .patchAndFetchById(user.id, data)
- .then((saved_user) => {
- return _.omit(saved_user, omissions());
- });
- })
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: data
- })
- .then(() => {
- return user;
- });
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} [data]
- * @param {Integer} [data.id] Defaults to the token user
- * @param {Array} [data.expand]
- * @param {Array} [data.omit]
- * @return {Promise}
- */
- get: (access, data) => {
- if (typeof data === 'undefined') {
- data = {};
- }
-
- if (typeof data.id === 'undefined' || !data.id) {
- data.id = access.token.getUserId(0);
- }
-
- return access.can('users:get', data.id)
- .then(() => {
- let query = userModel
- .query()
- .where('is_deleted', 0)
- .andWhere('id', data.id)
- .allowEager('[permissions]')
- .first();
-
- // Custom omissions
- if (typeof data.omit !== 'undefined' && data.omit !== null) {
- query.omit(data.omit);
- }
-
- if (typeof data.expand !== 'undefined' && data.expand !== null) {
- query.eager('[' + data.expand.join(', ') + ']');
- }
-
- return query;
- })
- .then((row) => {
- if (row) {
- return _.omit(row, omissions());
- } else {
- throw new error.ItemNotFoundError(data.id);
- }
- });
- },
-
- /**
- * Checks if an email address is available, but if a user_id is supplied, it will ignore checking
- * against that user.
- *
- * @param email
- * @param user_id
- */
- isEmailAvailable: (email, user_id) => {
- let query = userModel
- .query()
- .where('email', '=', email.toLowerCase().trim())
- .where('is_deleted', 0)
- .first();
-
- if (typeof user_id !== 'undefined') {
- query.where('id', '!=', user_id);
- }
-
- return query
- .then((user) => {
- return !user;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} [data.reason]
- * @returns {Promise}
- */
- delete: (access, data) => {
- return access.can('users:delete', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (!user) {
- throw new error.ItemNotFoundError(data.id);
- }
-
- // Make sure user can't delete themselves
- if (user.id === access.token.getUserId(0)) {
- throw new error.PermissionError('You cannot delete yourself.');
- }
-
- return userModel
- .query()
- .where('id', user.id)
- .patch({
- is_deleted: 1
- })
- .then(() => {
- // Add to audit log
- return internalAuditLog.add(access, {
- action: 'deleted',
- object_type: 'user',
- object_id: user.id,
- meta: _.omit(user, omissions())
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * This will only count the users
- *
- * @param {Access} access
- * @param {String} [search_query]
- * @returns {*}
- */
- getCount: (access, search_query) => {
- return access.can('users:list')
- .then(() => {
- let query = userModel
- .query()
- .count('id as count')
- .where('is_deleted', 0)
- .first();
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('user.name', 'like', '%' + search_query + '%')
- .orWhere('user.email', 'like', '%' + search_query + '%');
- });
- }
-
- return query;
- })
- .then((row) => {
- return parseInt(row.count, 10);
- });
- },
-
- /**
- * All users
- *
- * @param {Access} access
- * @param {Array} [expand]
- * @param {String} [search_query]
- * @returns {Promise}
- */
- getAll: (access, expand, search_query) => {
- return access.can('users:list')
- .then(() => {
- let query = userModel
- .query()
- .where('is_deleted', 0)
- .groupBy('id')
- .omit(['is_deleted'])
- .allowEager('[permissions]')
- .orderBy('name', 'ASC');
-
- // Query is used for searching
- if (typeof search_query === 'string') {
- query.where(function () {
- this.where('name', 'like', '%' + search_query + '%')
- .orWhere('email', 'like', '%' + search_query + '%');
- });
- }
-
- if (typeof expand !== 'undefined' && expand !== null) {
- query.eager('[' + expand.join(', ') + ']');
- }
-
- return query;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Integer} [id_requested]
- * @returns {[String]}
- */
- getUserOmisionsByAccess: (access, id_requested) => {
- let response = []; // Admin response
-
- if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
- response = ['roles', 'is_deleted']; // Restricted response
- }
-
- return response;
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- * @param {String} data.type
- * @param {String} data.secret
- * @return {Promise}
- */
- setPassword: (access, data) => {
- return access.can('users:password', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- if (user.id === access.token.getUserId(0)) {
- // they're setting their own password. Make sure their current password is correct
- if (typeof data.current === 'undefined' || !data.current) {
- throw new error.ValidationError('Current password was not supplied');
- }
-
- return internalToken.getTokenFromEmail({
- identity: user.email,
- secret: data.current
- })
- .then(() => {
- return user;
- });
- }
-
- return user;
- })
- .then((user) => {
- // Get auth, patch if it exists
- return authModel
- .query()
- .where('user_id', user.id)
- .andWhere('type', data.type)
- .first()
- .then((existing_auth) => {
- if (existing_auth) {
- // patch
- return authModel
- .query()
- .where('user_id', user.id)
- .andWhere('type', data.type)
- .patch({
- type: data.type, // This is required for the model to encrypt on save
- secret: data.secret
- });
- } else {
- // insert
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: data.type,
- secret: data.secret,
- meta: {}
- });
- }
- })
- .then(() => {
- // Add to Audit Log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: {
- name: user.name,
- password_changed: true,
- auth_type: data.type
- }
- });
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @return {Promise}
- */
- setPermissions: (access, data) => {
- return access.can('users:permissions', data.id)
- .then(() => {
- return internalUser.get(access, {id: data.id});
- })
- .then((user) => {
- if (user.id !== data.id) {
- // Sanity check that something crazy hasn't happened
- throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
- }
-
- return user;
- })
- .then((user) => {
- // Get perms row, patch if it exists
- return userPermissionModel
- .query()
- .where('user_id', user.id)
- .first()
- .then((existing_auth) => {
- if (existing_auth) {
- // patch
- return userPermissionModel
- .query()
- .where('user_id', user.id)
- .patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
- } else {
- // insert
- return userPermissionModel
- .query()
- .insertAndFetch(_.assign({user_id: user.id}, data));
- }
- })
- .then((permissions) => {
- // Add to Audit Log
- return internalAuditLog.add(access, {
- action: 'updated',
- object_type: 'user',
- object_id: user.id,
- meta: {
- name: user.name,
- permissions: permissions
- }
- });
-
- });
- })
- .then(() => {
- return true;
- });
- },
-
- /**
- * @param {Access} access
- * @param {Object} data
- * @param {Integer} data.id
- */
- loginAs: (access, data) => {
- return access.can('users:loginas', data.id)
- .then(() => {
- return internalUser.get(access, data);
- })
- .then((user) => {
- return internalToken.getTokenFromUser(user);
- });
- }
-};
-
-module.exports = internalUser;
diff --git a/backend/internal/util/interfaces.go b/backend/internal/util/interfaces.go
new file mode 100644
index 000000000..d647dcb6e
--- /dev/null
+++ b/backend/internal/util/interfaces.go
@@ -0,0 +1,36 @@
+package util
+
+// FindItemInInterface Find key in interface (recursively) and return value as interface
+func FindItemInInterface(key string, obj any) (any, bool) {
+ // if the argument is not a map, ignore it
+ mobj, ok := obj.(map[string]any)
+ if !ok {
+ return nil, false
+ }
+
+ for k, v := range mobj {
+ // key match, return value
+ if k == key {
+ return v, true
+ }
+
+ // if the value is a map, search recursively
+ if m, ok := v.(map[string]any); ok {
+ if res, ok := FindItemInInterface(key, m); ok {
+ return res, true
+ }
+ }
+ // if the value is an array, search recursively
+ // from each element
+ if va, ok := v.([]any); ok {
+ for _, a := range va {
+ if res, ok := FindItemInInterface(key, a); ok {
+ return res, true
+ }
+ }
+ }
+ }
+
+ // element not found
+ return nil, false
+}
diff --git a/backend/internal/util/interfaces_test.go b/backend/internal/util/interfaces_test.go
new file mode 100644
index 000000000..c2b480235
--- /dev/null
+++ b/backend/internal/util/interfaces_test.go
@@ -0,0 +1,37 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestFindItemInInterface(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ obj := map[string]any{
+ "key1": "value1",
+ "key2": 10,
+ "key3": map[string]any{
+ "nestedKey": "nestedValue",
+ },
+ "key4": []any{"item1", "item2"},
+ }
+
+ // Test case 1: Key exists at the top level
+ result, found := FindItemInInterface("key1", obj)
+ assert.Equal(t, true, found)
+ assert.Equal(t, "value1", result)
+
+ // Test case 2: Key exists at a nested level
+ result, found = FindItemInInterface("nestedKey", obj)
+ assert.Equal(t, true, found)
+ assert.Equal(t, "nestedValue", result)
+
+ // Test case 3: Key does not exist
+ result, found = FindItemInInterface("nonExistentKey", obj)
+ assert.Equal(t, false, found)
+ assert.Equal(t, nil, result)
+}
diff --git a/backend/internal/util/maps.go b/backend/internal/util/maps.go
new file mode 100644
index 000000000..a2cd27883
--- /dev/null
+++ b/backend/internal/util/maps.go
@@ -0,0 +1,9 @@
+package util
+
+// MapContainsKey is fairly self explanatory
+func MapContainsKey(dict map[string]any, key string) bool {
+ if _, ok := dict[key]; ok {
+ return true
+ }
+ return false
+}
diff --git a/backend/internal/util/maps_test.go b/backend/internal/util/maps_test.go
new file mode 100644
index 000000000..0d1bc621a
--- /dev/null
+++ b/backend/internal/util/maps_test.go
@@ -0,0 +1,49 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+type rect struct {
+ width int
+ height int
+}
+
+func TestMapContainsKey(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ var r rect
+ r.width = 5
+ r.height = 5
+ m := map[string]any{
+ "rect_width": r.width,
+ "rect_height": r.height,
+ }
+ tests := []struct {
+ name string
+ pass string
+ want bool
+ }{
+ {
+ name: "exists",
+ pass: "rect_width",
+ want: true,
+ },
+ {
+ name: "Does not exist",
+ pass: "rect_perimeter",
+ want: false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := MapContainsKey(m, tt.pass)
+
+ assert.Equal(t, result, tt.want)
+ })
+ }
+}
diff --git a/backend/internal/util/slices.go b/backend/internal/util/slices.go
new file mode 100644
index 000000000..b02cd3463
--- /dev/null
+++ b/backend/internal/util/slices.go
@@ -0,0 +1,44 @@
+package util
+
+import (
+ "strconv"
+ "strings"
+)
+
+// SliceContainsItem returns whether the slice given contains the item given
+func SliceContainsItem(slice []string, item string) bool {
+ for _, a := range slice {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// SliceContainsInt returns whether the slice given contains the item given
+func SliceContainsInt(slice []int, item int) bool {
+ for _, a := range slice {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// ConvertIntSliceToString returns a comma separated string of all items in the slice
+func ConvertIntSliceToString(slice []int) string {
+ strs := []string{}
+ for _, item := range slice {
+ strs = append(strs, strconv.Itoa(item))
+ }
+ return strings.Join(strs, ",")
+}
+
+// ConvertStringSliceToInterface is required in some special cases
+func ConvertStringSliceToInterface(slice []string) []any {
+ res := make([]any, len(slice))
+ for i := range slice {
+ res[i] = slice[i]
+ }
+ return res
+}
diff --git a/backend/internal/util/slices_test.go b/backend/internal/util/slices_test.go
new file mode 100644
index 000000000..4afee77f4
--- /dev/null
+++ b/backend/internal/util/slices_test.go
@@ -0,0 +1,115 @@
+package util
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestSliceContainsItem(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ type want struct {
+ result bool
+ }
+ tests := []struct {
+ name string
+ inputString string
+ inputArray []string
+ want want
+ }{
+ {
+ name: "In array",
+ inputString: "test",
+ inputArray: []string{"no", "more", "tests", "test"},
+ want: want{
+ result: true,
+ },
+ },
+ {
+ name: "Not in array",
+ inputString: "test",
+ inputArray: []string{"no", "more", "tests"},
+ want: want{
+ result: false,
+ },
+ },
+ {
+ name: "Case sensitive",
+ inputString: "test",
+ inputArray: []string{"no", "TEST", "more"},
+ want: want{
+ result: false,
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := SliceContainsItem(tt.inputArray, tt.inputString)
+ assert.Equal(t, tt.want.result, got)
+ })
+ }
+}
+
+func TestSliceContainsInt(t *testing.T) {
+ type want struct {
+ result bool
+ }
+ tests := []struct {
+ name string
+ inputInt int
+ inputArray []int
+ want want
+ }{
+ {
+ name: "In array",
+ inputInt: 1,
+ inputArray: []int{1, 2, 3, 4},
+ want: want{
+ result: true,
+ },
+ },
+ {
+ name: "Not in array",
+ inputInt: 1,
+ inputArray: []int{10, 2, 3, 4},
+ want: want{
+ result: false,
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := SliceContainsInt(tt.inputArray, tt.inputInt)
+ assert.Equal(t, tt.want.result, got)
+ })
+ }
+}
+
+func TestConvertIntSliceToString(t *testing.T) {
+ items := []int{1, 2, 3, 4, 5, 6, 7}
+ expectedStr := "1,2,3,4,5,6,7"
+ str := ConvertIntSliceToString(items)
+ assert.Equal(t, expectedStr, str)
+}
+
+func TestConvertStringSliceToInterface(t *testing.T) {
+ testCases := []struct {
+ input []string
+ expected []any
+ }{
+ {[]string{"hello", "world"}, []any{"hello", "world"}},
+ {[]string{"apple", "banana", "cherry"}, []any{"apple", "banana", "cherry"}},
+ {[]string{}, []any{}}, // Empty slice should return an empty slice
+ }
+
+ for _, tc := range testCases {
+ result := ConvertStringSliceToInterface(tc.input)
+ if !reflect.DeepEqual(result, tc.expected) {
+ t.Errorf("Expected: %v, Got: %v", tc.expected, result)
+ }
+ }
+}
diff --git a/backend/internal/util/strings.go b/backend/internal/util/strings.go
new file mode 100644
index 000000000..c3e9b70a0
--- /dev/null
+++ b/backend/internal/util/strings.go
@@ -0,0 +1,41 @@
+package util
+
+import (
+ "bytes"
+ "encoding/json"
+ "regexp"
+ "strings"
+ "unicode"
+
+ "npm/internal/logger"
+)
+
+// CleanupWhitespace will trim up and remove extra lines and stuff
+func CleanupWhitespace(s string) string {
+ // Remove trailing whitespace from all lines
+ slices := strings.Split(s, "\n")
+ for idx := range slices {
+ slices[idx] = strings.TrimRightFunc(slices[idx], unicode.IsSpace)
+ }
+ // Output: [a b c]
+ result := strings.Join(slices, "\n")
+
+ // Remove empty lines
+ r1 := regexp.MustCompile("\n+")
+ result = r1.ReplaceAllString(result, "\n")
+
+ return result
+}
+
+// PrettyPrintJSON takes a string and as long as it's JSON,
+// it will return a pretty printed and formatted version
+func PrettyPrintJSON(s string) string {
+ byt := []byte(s)
+ var prettyJSON bytes.Buffer
+ if err := json.Indent(&prettyJSON, byt, "", " "); err != nil {
+ logger.Debug("Can't pretty print non-json string: %s", s)
+ return s
+ }
+
+ return prettyJSON.String()
+}
diff --git a/backend/internal/util/strings_test.go b/backend/internal/util/strings_test.go
new file mode 100644
index 000000000..cb289f0b1
--- /dev/null
+++ b/backend/internal/util/strings_test.go
@@ -0,0 +1,74 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestCleanupWhitespace(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ name string
+ input string
+ want string
+ }{
+ {
+ name: "test a",
+ input: `# ------------------------------------------------------------
+# Upstream 5: API servers 2
+# ------------------------------------------------------------
+
+upstream npm_upstream_5 {` + ` ` + /* this adds whitespace to the end without the ide trimming it */ `
+
+
+
+
+
+
+
+
+
+ server 192.168.0.10:80 weight=100 ;
+ server 192.168.0.11:80 weight=50 ;
+
+}`,
+ want: `# ------------------------------------------------------------
+# Upstream 5: API servers 2
+# ------------------------------------------------------------
+upstream npm_upstream_5 {
+ server 192.168.0.10:80 weight=100 ;
+ server 192.168.0.11:80 weight=50 ;
+}`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ output := CleanupWhitespace(tt.input)
+ assert.Equal(t, tt.want, output)
+ })
+ }
+}
+
+func TestPrettyPrintJSON(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ testCases := []struct {
+ input string
+ expected string
+ }{
+ {`{"name":"John","age":30,"city":"New York"}`, "{\n \"name\": \"John\",\n \"age\": 30,\n \"city\": \"New York\"\n}"},
+ {`{"fruit":"apple","color":"red"}`, "{\n \"fruit\": \"apple\",\n \"color\": \"red\"\n}"},
+ {"invalid-json", "invalid-json"}, // non-JSON input should return the original string unchanged
+ }
+
+ for _, tc := range testCases {
+ result := PrettyPrintJSON(tc.input)
+ assert.Equal(t, tc.expected, result)
+ }
+}
diff --git a/backend/internal/util/time.go b/backend/internal/util/time.go
new file mode 100644
index 000000000..b1aed5887
--- /dev/null
+++ b/backend/internal/util/time.go
@@ -0,0 +1,9 @@
+package util
+
+import "time"
+
+// UnixMilliToNiceFormat converts a millisecond to nice string
+func UnixMilliToNiceFormat(milli int64) string {
+ t := time.Unix(0, milli*int64(time.Millisecond))
+ return t.Format("2006-01-02 15:04:05")
+}
diff --git a/backend/internal/util/time_test.go b/backend/internal/util/time_test.go
new file mode 100644
index 000000000..d15c7e499
--- /dev/null
+++ b/backend/internal/util/time_test.go
@@ -0,0 +1,29 @@
+package util
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "go.uber.org/goleak"
+)
+
+func TestUnixMilliToNiceFormat(t *testing.T) {
+ // goleak is used to detect goroutine leaks
+ defer goleak.VerifyNone(t, goleak.IgnoreAnyFunction("database/sql.(*DB).connectionOpener"))
+
+ tests := []struct {
+ input int64
+ expected string
+ }{
+ {0, "1970-01-01 10:00:00"}, // Unix epoch time
+ {1568000000000, "2019-09-09 13:33:20"}, // Arbitrary millisecond timestamp
+ {1636598400000, "2021-11-11 12:40:00"}, // Another arbitrary millisecond timestamp
+ {-1000000000000, "1938-04-25 08:13:20"}, // Negative millisecond timestamp
+ {9223372036854775807, "1970-01-01 09:59:59"}, // Maximum representable millisecond timestamp
+ }
+
+ for _, test := range tests {
+ output := UnixMilliToNiceFormat(test.input)
+ assert.Equal(t, test.expected, output)
+ }
+}
diff --git a/backend/internal/validator/hosts.go b/backend/internal/validator/hosts.go
new file mode 100644
index 000000000..685a18b43
--- /dev/null
+++ b/backend/internal/validator/hosts.go
@@ -0,0 +1,55 @@
+package validator
+
+import (
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/upstream"
+
+ "github.com/rotisserie/eris"
+)
+
+var (
+ certificateGetByID = certificate.GetByID
+ upstreamGetByID = upstream.GetByID
+ nginxtemplateGetByID = nginxtemplate.GetByID
+)
+
+// ValidateHost will check if associated objects exist and other checks
+// will return a nil error if things are OK
+func ValidateHost(h host.Model) error {
+ if h.CertificateID.Uint > 0 {
+ // Check certificate exists and is valid
+ // This will not determine if the certificate is Ready to use,
+ // as this validation only cares that the row exists.
+ if _, cErr := certificateGetByID(h.CertificateID.Uint); cErr != nil {
+ return eris.Wrapf(cErr, "Certificate #%d does not exist", h.CertificateID.Uint)
+ }
+ }
+
+ if h.UpstreamID.Uint > 0 {
+ // Check upstream exists
+ if _, uErr := upstreamGetByID(h.UpstreamID.Uint); uErr != nil {
+ return eris.Wrapf(uErr, "Upstream #%d does not exist", h.UpstreamID.Uint)
+ }
+ }
+
+ // Ensure either UpstreamID is set or appropriate proxy host params are set
+ if h.UpstreamID.Uint > 0 && (h.ProxyHost != "" || h.ProxyPort > 0) {
+ return eris.Errorf("Proxy Host or Port cannot be set when using an Upstream")
+ }
+ if h.UpstreamID.Uint == 0 && (h.ProxyHost == "" || h.ProxyPort < 1) {
+ return eris.Errorf("Proxy Host and Port must be specified, unless using an Upstream")
+ }
+
+ // Check the nginx template exists and has the same type.
+ nginxTemplate, tErr := nginxtemplateGetByID(h.NginxTemplateID)
+ if tErr != nil {
+ return eris.Wrapf(tErr, "Host Template #%d does not exist", h.NginxTemplateID)
+ }
+ if nginxTemplate.Type != h.Type {
+ return eris.Errorf("Host Template #%d is not valid for this host type", h.NginxTemplateID)
+ }
+
+ return nil
+}
diff --git a/backend/internal/validator/hosts_test.go b/backend/internal/validator/hosts_test.go
new file mode 100644
index 000000000..946762925
--- /dev/null
+++ b/backend/internal/validator/hosts_test.go
@@ -0,0 +1,145 @@
+package validator
+
+import (
+ "testing"
+
+ "npm/internal/entity/certificate"
+ "npm/internal/entity/host"
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/upstream"
+ "npm/internal/types"
+
+ "github.com/stretchr/testify/mock"
+ "github.com/stretchr/testify/require"
+ "gorm.io/gorm"
+)
+
+// Mocking the dependencies
+type MockCertificate struct {
+ mock.Mock
+}
+
+func (m *MockCertificate) GetByID(id uint) (certificate.Model, error) {
+ args := m.Called(id)
+ return args.Get(0).(certificate.Model), args.Error(1)
+}
+
+type MockUpstream struct {
+ mock.Mock
+}
+
+func (m *MockUpstream) GetByID(id uint) (upstream.Model, error) {
+ args := m.Called(id)
+ return args.Get(0).(upstream.Model), args.Error(1)
+}
+
+type MockNginxTemplate struct {
+ mock.Mock
+}
+
+func (m *MockNginxTemplate) GetByID(id uint) (nginxtemplate.Model, error) {
+ args := m.Called(id)
+ return args.Get(0).(nginxtemplate.Model), args.Error(1)
+}
+
+func TestValidateHost(t *testing.T) {
+ tests := []struct {
+ name string
+ host host.Model
+ wantErr string
+ }{
+ {
+ name: "valid host with certificate and upstream",
+ host: host.Model{
+ CertificateID: types.NullableDBUint{Uint: 1},
+ UpstreamID: types.NullableDBUint{Uint: 1},
+ NginxTemplateID: 1,
+ Type: "some-type",
+ },
+ wantErr: "",
+ },
+ {
+ name: "certificate does not exist",
+ host: host.Model{
+ CertificateID: types.NullableDBUint{Uint: 9},
+ },
+ wantErr: "Certificate #9 does not exist: record not found",
+ },
+ {
+ name: "upstream does not exist",
+ host: host.Model{
+ UpstreamID: types.NullableDBUint{Uint: 9},
+ },
+ wantErr: "Upstream #9 does not exist: record not found",
+ },
+ {
+ name: "proxy host and port set with upstream",
+ host: host.Model{
+ UpstreamID: types.NullableDBUint{Uint: 1},
+ ProxyHost: "proxy",
+ ProxyPort: 8080,
+ },
+ wantErr: "Proxy Host or Port cannot be set when using an Upstream",
+ },
+ {
+ name: "proxy host and port not set without upstream",
+ host: host.Model{
+ ProxyHost: "",
+ ProxyPort: 0,
+ },
+ wantErr: "Proxy Host and Port must be specified, unless using an Upstream",
+ },
+ {
+ name: "nginx template does not exist",
+ host: host.Model{
+ ProxyHost: "proxy",
+ ProxyPort: 8080,
+ NginxTemplateID: 9,
+ },
+ wantErr: "Host Template #9 does not exist: record not found",
+ },
+ {
+ name: "nginx template type mismatch",
+ host: host.Model{
+ ProxyHost: "proxy",
+ ProxyPort: 8080,
+ NginxTemplateID: 8,
+ Type: "some-type",
+ },
+ wantErr: "Host Template #8 is not valid for this host type",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockCert := new(MockCertificate)
+ mockUpstream := new(MockUpstream)
+ mockNginxTemplate := new(MockNginxTemplate)
+
+ certificateGetByID = mockCert.GetByID
+ upstreamGetByID = mockUpstream.GetByID
+ nginxtemplateGetByID = mockNginxTemplate.GetByID
+
+ // id 1 is valid
+ mockCert.On("GetByID", uint(1)).Return(certificate.Model{}, nil)
+ mockUpstream.On("GetByID", uint(1)).Return(upstream.Model{}, nil)
+ mockNginxTemplate.On("GetByID", uint(1)).Return(nginxtemplate.Model{Type: "some-type"}, nil)
+
+ // id 9 is errors
+ mockCert.On("GetByID", uint(9)).Return(certificate.Model{}, gorm.ErrRecordNotFound)
+ mockUpstream.On("GetByID", uint(9)).Return(upstream.Model{}, gorm.ErrRecordNotFound)
+ mockNginxTemplate.On("GetByID", uint(9)).Return(nginxtemplate.Model{}, gorm.ErrRecordNotFound)
+
+ // 8 is special
+ mockNginxTemplate.On("GetByID", uint(8)).Return(nginxtemplate.Model{Type: "different-type"}, nil)
+
+ err := ValidateHost(tt.host)
+ if tt.wantErr != "" {
+ require.NotNil(t, err)
+ require.Equal(t, tt.wantErr, err.Error())
+ } else {
+ require.Nil(t, err)
+ }
+ })
+ }
+}
diff --git a/backend/internal/validator/upstreams.go b/backend/internal/validator/upstreams.go
new file mode 100644
index 000000000..5d099f417
--- /dev/null
+++ b/backend/internal/validator/upstreams.go
@@ -0,0 +1,37 @@
+package validator
+
+import (
+ "npm/internal/entity/upstream"
+
+ "github.com/rotisserie/eris"
+)
+
+// ValidateUpstream will check if associated objects exist and other checks
+// will return a nil error if things are OK
+func ValidateUpstream(u upstream.Model) error {
+ // Needs to have more than 1 server
+ if len(u.Servers) < 2 {
+ return eris.New("Upstreams require at least 2 servers")
+ }
+
+ // Backup servers aren't permitted with hash balancing
+ if u.IPHash {
+ // check all servers for a backup param
+ for _, server := range u.Servers {
+ if server.Backup {
+ return eris.New("Backup servers cannot be used with hash balancing")
+ }
+ }
+ }
+
+ // Check the nginx template exists and has the same type.
+ nginxTemplate, err := nginxtemplateGetByID(u.NginxTemplateID)
+ if err != nil {
+ return eris.Errorf("Nginx Template #%d does not exist", u.NginxTemplateID)
+ }
+ if nginxTemplate.Type != "upstream" {
+ return eris.Errorf("Host Template #%d is not valid for this upstream", u.NginxTemplateID)
+ }
+
+ return nil
+}
diff --git a/backend/internal/validator/upstreams_test.go b/backend/internal/validator/upstreams_test.go
new file mode 100644
index 000000000..e9b29a939
--- /dev/null
+++ b/backend/internal/validator/upstreams_test.go
@@ -0,0 +1,94 @@
+package validator
+
+import (
+ "testing"
+
+ "npm/internal/entity/nginxtemplate"
+ "npm/internal/entity/upstream"
+ "npm/internal/entity/upstreamserver"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gorm.io/gorm"
+)
+
+func TestValidateUpstream(t *testing.T) {
+ tests := []struct {
+ name string
+ upstreamModel upstream.Model
+ expectedError string
+ }{
+ {
+ name: "less than 2 servers",
+ upstreamModel: upstream.Model{
+ Servers: []upstreamserver.Model{
+ {Server: "192.168.1.1"},
+ },
+ },
+ expectedError: "Upstreams require at least 2 servers",
+ },
+ {
+ name: "backup server with IP hash",
+ upstreamModel: upstream.Model{
+ Servers: []upstreamserver.Model{
+ {Server: "192.168.1.1", Backup: true},
+ {Server: "192.168.1.2"},
+ },
+ IPHash: true,
+ },
+ expectedError: "Backup servers cannot be used with hash balancing",
+ },
+ {
+ name: "nginx template does not exist",
+ upstreamModel: upstream.Model{
+ Servers: []upstreamserver.Model{
+ {Server: "192.168.1.1"},
+ {Server: "192.168.1.2"},
+ },
+ NginxTemplateID: 999,
+ },
+ expectedError: "Nginx Template #999 does not exist",
+ },
+ {
+ name: "nginx template type mismatch",
+ upstreamModel: upstream.Model{
+ Servers: []upstreamserver.Model{
+ {Server: "192.168.1.1"},
+ {Server: "192.168.1.2"},
+ },
+ NginxTemplateID: 2,
+ },
+ expectedError: "Host Template #2 is not valid for this upstream",
+ },
+ {
+ name: "valid upstream",
+ upstreamModel: upstream.Model{
+ Servers: []upstreamserver.Model{
+ {Server: "192.168.1.1"},
+ {Server: "192.168.1.2"},
+ },
+ NginxTemplateID: 1,
+ },
+ expectedError: "",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ mockNginxTemplate := new(MockNginxTemplate)
+ nginxtemplateGetByID = mockNginxTemplate.GetByID
+
+ mockNginxTemplate.On("GetByID", uint(1)).Return(nginxtemplate.Model{Type: "upstream"}, nil)
+ mockNginxTemplate.On("GetByID", uint(2)).Return(nginxtemplate.Model{Type: "redirect"}, nil)
+ mockNginxTemplate.On("GetByID", uint(999)).Return(nginxtemplate.Model{}, gorm.ErrRecordNotFound)
+
+ err := ValidateUpstream(tt.upstreamModel)
+ if tt.expectedError != "" {
+ require.NotNil(t, err)
+ assert.Equal(t, tt.expectedError, err.Error())
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
diff --git a/backend/knexfile.js b/backend/knexfile.js
deleted file mode 100644
index 391ca0050..000000000
--- a/backend/knexfile.js
+++ /dev/null
@@ -1,19 +0,0 @@
-module.exports = {
- development: {
- client: 'mysql',
- migrations: {
- tableName: 'migrations',
- stub: 'lib/migrate_template.js',
- directory: 'migrations'
- }
- },
-
- production: {
- client: 'mysql',
- migrations: {
- tableName: 'migrations',
- stub: 'lib/migrate_template.js',
- directory: 'migrations'
- }
- }
-};
diff --git a/backend/lib/access.js b/backend/lib/access.js
deleted file mode 100644
index 9d7329d94..000000000
--- a/backend/lib/access.js
+++ /dev/null
@@ -1,314 +0,0 @@
-/**
- * Some Notes: This is a friggin complicated piece of code.
- *
- * "scope" in this file means "where did this token come from and what is using it", so 99% of the time
- * the "scope" is going to be "user" because it would be a user token. This is not to be confused with
- * the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
- *
- *
- */
-
-const _ = require('lodash');
-const logger = require('../logger').access;
-const validator = require('ajv');
-const error = require('./error');
-const userModel = require('../models/user');
-const proxyHostModel = require('../models/proxy_host');
-const TokenModel = require('../models/token');
-const roleSchema = require('./access/roles.json');
-const permsSchema = require('./access/permissions.json');
-
-module.exports = function (token_string) {
- let Token = new TokenModel();
- let token_data = null;
- let initialised = false;
- let object_cache = {};
- let allow_internal_access = false;
- let user_roles = [];
- let permissions = {};
-
- /**
- * Loads the Token object from the token string
- *
- * @returns {Promise}
- */
- this.init = () => {
- return new Promise((resolve, reject) => {
- if (initialised) {
- resolve();
- } else if (!token_string) {
- reject(new error.PermissionError('Permission Denied'));
- } else {
- resolve(Token.load(token_string)
- .then((data) => {
- token_data = data;
-
- // At this point we need to load the user from the DB and make sure they:
- // - exist (and not soft deleted)
- // - still have the appropriate scopes for this token
- // This is only required when the User ID is supplied or if the token scope has `user`
-
- if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
- // Has token user id or token user scope
- return userModel
- .query()
- .where('id', token_data.attrs.id)
- .andWhere('is_deleted', 0)
- .andWhere('is_disabled', 0)
- .allowEager('[permissions]')
- .eager('[permissions]')
- .first()
- .then((user) => {
- if (user) {
- // make sure user has all scopes of the token
- // The `user` role is not added against the user row, so we have to just add it here to get past this check.
- user.roles.push('user');
-
- let is_ok = true;
- _.forEach(token_data.scope, (scope_item) => {
- if (_.indexOf(user.roles, scope_item) === -1) {
- is_ok = false;
- }
- });
-
- if (!is_ok) {
- throw new error.AuthError('Invalid token scope for User');
- } else {
- initialised = true;
- user_roles = user.roles;
- permissions = user.permissions;
- }
-
- } else {
- throw new error.AuthError('User cannot be loaded for Token');
- }
- });
- } else {
- initialised = true;
- }
- }));
- }
- });
- };
-
- /**
- * Fetches the object ids from the database, only once per object type, for this token.
- * This only applies to USER token scopes, as all other tokens are not really bound
- * by object scopes
- *
- * @param {String} object_type
- * @returns {Promise}
- */
- this.loadObjects = (object_type) => {
- return new Promise((resolve, reject) => {
- if (Token.hasScope('user')) {
- if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
- reject(new error.AuthError('User Token supplied without a User ID'));
- } else {
- let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
- let query;
-
- if (typeof object_cache[object_type] === 'undefined') {
- switch (object_type) {
-
- // USERS - should only return yourself
- case 'users':
- resolve(token_user_id ? [token_user_id] : []);
- break;
-
- // Proxy Hosts
- case 'proxy_hosts':
- query = proxyHostModel
- .query()
- .select('id')
- .andWhere('is_deleted', 0);
-
- if (permissions.visibility === 'user') {
- query.andWhere('owner_user_id', token_user_id);
- }
-
- resolve(query
- .then((rows) => {
- let result = [];
- _.forEach(rows, (rule_row) => {
- result.push(rule_row.id);
- });
-
- // enum should not have less than 1 item
- if (!result.length) {
- result.push(0);
- }
-
- return result;
- })
- );
- break;
-
- // DEFAULT: null
- default:
- resolve(null);
- break;
- }
- } else {
- resolve(object_cache[object_type]);
- }
- }
- } else {
- resolve(null);
- }
- })
- .then((objects) => {
- object_cache[object_type] = objects;
- return objects;
- });
- };
-
- /**
- * Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
- *
- * @param {String} permission_label
- * @returns {Object}
- */
- this.getObjectSchema = (permission_label) => {
- let base_object_type = permission_label.split(':').shift();
-
- let schema = {
- $id: 'objects',
- $schema: 'http://json-schema.org/draft-07/schema#',
- description: 'Actor Properties',
- type: 'object',
- additionalProperties: false,
- properties: {
- user_id: {
- anyOf: [
- {
- type: 'number',
- enum: [Token.get('attrs').id]
- }
- ]
- },
- scope: {
- type: 'string',
- pattern: '^' + Token.get('scope') + '$'
- }
- }
- };
-
- return this.loadObjects(base_object_type)
- .then((object_result) => {
- if (typeof object_result === 'object' && object_result !== null) {
- schema.properties[base_object_type] = {
- type: 'number',
- enum: object_result,
- minimum: 1
- };
- } else {
- schema.properties[base_object_type] = {
- type: 'number',
- minimum: 1
- };
- }
-
- return schema;
- });
- };
-
- return {
-
- token: Token,
-
- /**
- *
- * @param {Boolean} [allow_internal]
- * @returns {Promise}
- */
- load: (allow_internal) => {
- return new Promise(function (resolve/*, reject*/) {
- if (token_string) {
- resolve(Token.load(token_string));
- } else {
- allow_internal_access = allow_internal;
- resolve(allow_internal_access || null);
- }
- });
- },
-
- reloadObjects: this.loadObjects,
-
- /**
- *
- * @param {String} permission
- * @param {*} [data]
- * @returns {Promise}
- */
- can: (permission, data) => {
- if (allow_internal_access === true) {
- return Promise.resolve(true);
- //return true;
- } else {
- return this.init()
- .then(() => {
- // Initialised, token decoded ok
- return this.getObjectSchema(permission)
- .then((objectSchema) => {
- let data_schema = {
- [permission]: {
- data: data,
- scope: Token.get('scope'),
- roles: user_roles,
- permission_visibility: permissions.visibility,
- permission_proxy_hosts: permissions.proxy_hosts,
- permission_redirection_hosts: permissions.redirection_hosts,
- permission_dead_hosts: permissions.dead_hosts,
- permission_streams: permissions.streams,
- permission_access_lists: permissions.access_lists,
- permission_certificates: permissions.certificates
- }
- };
-
- let permissionSchema = {
- $schema: 'http://json-schema.org/draft-07/schema#',
- $async: true,
- $id: 'permissions',
- additionalProperties: false,
- properties: {}
- };
-
- permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
-
- // logger.info('objectSchema', JSON.stringify(objectSchema, null, 2));
- // logger.info('permissionSchema', JSON.stringify(permissionSchema, null, 2));
- // logger.info('data_schema', JSON.stringify(data_schema, null, 2));
-
- let ajv = validator({
- verbose: true,
- allErrors: true,
- format: 'full',
- missingRefs: 'fail',
- breakOnError: true,
- coerceTypes: true,
- schemas: [
- roleSchema,
- permsSchema,
- objectSchema,
- permissionSchema
- ]
- });
-
- return ajv.validate('permissions', data_schema)
- .then(() => {
- return data_schema[permission];
- });
- });
- })
- .catch((err) => {
- err.permission = permission;
- err.permission_data = data;
- logger.error(permission, data, err.message);
-
- throw new error.PermissionError('Permission Denied', err);
- });
- }
- }
- };
-};
diff --git a/backend/lib/access/access_lists-create.json b/backend/lib/access/access_lists-create.json
deleted file mode 100644
index 5a16a8642..000000000
--- a/backend/lib/access/access_lists-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-delete.json b/backend/lib/access/access_lists-delete.json
deleted file mode 100644
index 5a16a8642..000000000
--- a/backend/lib/access/access_lists-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-get.json b/backend/lib/access/access_lists-get.json
deleted file mode 100644
index 8f6dd8cc6..000000000
--- a/backend/lib/access/access_lists-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-list.json b/backend/lib/access/access_lists-list.json
deleted file mode 100644
index 8f6dd8cc6..000000000
--- a/backend/lib/access/access_lists-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/access_lists-update.json b/backend/lib/access/access_lists-update.json
deleted file mode 100644
index 5a16a8642..000000000
--- a/backend/lib/access/access_lists-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_access_lists", "roles"],
- "properties": {
- "permission_access_lists": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/auditlog-list.json b/backend/lib/access/auditlog-list.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/auditlog-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/certificates-create.json b/backend/lib/access/certificates-create.json
deleted file mode 100644
index bcdf66742..000000000
--- a/backend/lib/access/certificates-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-delete.json b/backend/lib/access/certificates-delete.json
deleted file mode 100644
index bcdf66742..000000000
--- a/backend/lib/access/certificates-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-get.json b/backend/lib/access/certificates-get.json
deleted file mode 100644
index 9ccfa4f15..000000000
--- a/backend/lib/access/certificates-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-list.json b/backend/lib/access/certificates-list.json
deleted file mode 100644
index 9ccfa4f15..000000000
--- a/backend/lib/access/certificates-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/certificates-update.json b/backend/lib/access/certificates-update.json
deleted file mode 100644
index bcdf66742..000000000
--- a/backend/lib/access/certificates-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_certificates", "roles"],
- "properties": {
- "permission_certificates": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-create.json b/backend/lib/access/dead_hosts-create.json
deleted file mode 100644
index a276c681d..000000000
--- a/backend/lib/access/dead_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-delete.json b/backend/lib/access/dead_hosts-delete.json
deleted file mode 100644
index a276c681d..000000000
--- a/backend/lib/access/dead_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-get.json b/backend/lib/access/dead_hosts-get.json
deleted file mode 100644
index 87aa12e7d..000000000
--- a/backend/lib/access/dead_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-list.json b/backend/lib/access/dead_hosts-list.json
deleted file mode 100644
index 87aa12e7d..000000000
--- a/backend/lib/access/dead_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/dead_hosts-update.json b/backend/lib/access/dead_hosts-update.json
deleted file mode 100644
index a276c681d..000000000
--- a/backend/lib/access/dead_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_dead_hosts", "roles"],
- "properties": {
- "permission_dead_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/permissions.json b/backend/lib/access/permissions.json
deleted file mode 100644
index 8480f9a1c..000000000
--- a/backend/lib/access/permissions.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "perms",
- "definitions": {
- "view": {
- "type": "string",
- "pattern": "^(view|manage)$"
- },
- "manage": {
- "type": "string",
- "pattern": "^(manage)$"
- }
- }
-}
diff --git a/backend/lib/access/proxy_hosts-create.json b/backend/lib/access/proxy_hosts-create.json
deleted file mode 100644
index 166527a39..000000000
--- a/backend/lib/access/proxy_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-delete.json b/backend/lib/access/proxy_hosts-delete.json
deleted file mode 100644
index 166527a39..000000000
--- a/backend/lib/access/proxy_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-get.json b/backend/lib/access/proxy_hosts-get.json
deleted file mode 100644
index d88e4cfff..000000000
--- a/backend/lib/access/proxy_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-list.json b/backend/lib/access/proxy_hosts-list.json
deleted file mode 100644
index d88e4cfff..000000000
--- a/backend/lib/access/proxy_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/proxy_hosts-update.json b/backend/lib/access/proxy_hosts-update.json
deleted file mode 100644
index 166527a39..000000000
--- a/backend/lib/access/proxy_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_proxy_hosts", "roles"],
- "properties": {
- "permission_proxy_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-create.json b/backend/lib/access/redirection_hosts-create.json
deleted file mode 100644
index 342babc88..000000000
--- a/backend/lib/access/redirection_hosts-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-delete.json b/backend/lib/access/redirection_hosts-delete.json
deleted file mode 100644
index 342babc88..000000000
--- a/backend/lib/access/redirection_hosts-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-get.json b/backend/lib/access/redirection_hosts-get.json
deleted file mode 100644
index ba2292064..000000000
--- a/backend/lib/access/redirection_hosts-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-list.json b/backend/lib/access/redirection_hosts-list.json
deleted file mode 100644
index ba2292064..000000000
--- a/backend/lib/access/redirection_hosts-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/redirection_hosts-update.json b/backend/lib/access/redirection_hosts-update.json
deleted file mode 100644
index 342babc88..000000000
--- a/backend/lib/access/redirection_hosts-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_redirection_hosts", "roles"],
- "properties": {
- "permission_redirection_hosts": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/reports-hosts.json b/backend/lib/access/reports-hosts.json
deleted file mode 100644
index dbc9e0c0f..000000000
--- a/backend/lib/access/reports-hosts.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/user"
- }
- ]
-}
diff --git a/backend/lib/access/roles.json b/backend/lib/access/roles.json
deleted file mode 100644
index 16b33b55b..000000000
--- a/backend/lib/access/roles.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "roles",
- "definitions": {
- "admin": {
- "type": "object",
- "required": ["scope", "roles"],
- "properties": {
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- },
- "roles": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^admin$"
- }
- }
- }
- },
- "user": {
- "type": "object",
- "required": ["scope"],
- "properties": {
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- }
-}
diff --git a/backend/lib/access/settings-get.json b/backend/lib/access/settings-get.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/settings-get.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/settings-list.json b/backend/lib/access/settings-list.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/settings-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/settings-update.json b/backend/lib/access/settings-update.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/settings-update.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/streams-create.json b/backend/lib/access/streams-create.json
deleted file mode 100644
index fbeb1cc91..000000000
--- a/backend/lib/access/streams-create.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-delete.json b/backend/lib/access/streams-delete.json
deleted file mode 100644
index fbeb1cc91..000000000
--- a/backend/lib/access/streams-delete.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-get.json b/backend/lib/access/streams-get.json
deleted file mode 100644
index 7e9962874..000000000
--- a/backend/lib/access/streams-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-list.json b/backend/lib/access/streams-list.json
deleted file mode 100644
index 7e9962874..000000000
--- a/backend/lib/access/streams-list.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/view"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/streams-update.json b/backend/lib/access/streams-update.json
deleted file mode 100644
index fbeb1cc91..000000000
--- a/backend/lib/access/streams-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["permission_streams", "roles"],
- "properties": {
- "permission_streams": {
- "$ref": "perms#/definitions/manage"
- },
- "roles": {
- "type": "array",
- "items": {
- "type": "string",
- "enum": ["user"]
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-create.json b/backend/lib/access/users-create.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/users-create.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-delete.json b/backend/lib/access/users-delete.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/users-delete.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-get.json b/backend/lib/access/users-get.json
deleted file mode 100644
index 2a2f0423a..000000000
--- a/backend/lib/access/users-get.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-list.json b/backend/lib/access/users-list.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/users-list.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-loginas.json b/backend/lib/access/users-loginas.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/users-loginas.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-password.json b/backend/lib/access/users-password.json
deleted file mode 100644
index 2a2f0423a..000000000
--- a/backend/lib/access/users-password.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/access/users-permissions.json b/backend/lib/access/users-permissions.json
deleted file mode 100644
index aeadc94ba..000000000
--- a/backend/lib/access/users-permissions.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- }
- ]
-}
diff --git a/backend/lib/access/users-update.json b/backend/lib/access/users-update.json
deleted file mode 100644
index 2a2f0423a..000000000
--- a/backend/lib/access/users-update.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "anyOf": [
- {
- "$ref": "roles#/definitions/admin"
- },
- {
- "type": "object",
- "required": ["data", "scope"],
- "properties": {
- "data": {
- "$ref": "objects#/properties/users"
- },
- "scope": {
- "type": "array",
- "contains": {
- "type": "string",
- "pattern": "^user$"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/lib/error.js b/backend/lib/error.js
deleted file mode 100644
index 9e456f051..000000000
--- a/backend/lib/error.js
+++ /dev/null
@@ -1,90 +0,0 @@
-const _ = require('lodash');
-const util = require('util');
-
-module.exports = {
-
- PermissionError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = 'Permission Denied';
- this.public = true;
- this.status = 403;
- },
-
- ItemNotFoundError: function (id, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = 'Item Not Found - ' + id;
- this.public = true;
- this.status = 404;
- },
-
- AuthError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = true;
- this.status = 401;
- },
-
- InternalError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 500;
- this.public = false;
- },
-
- InternalValidationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 400;
- this.public = false;
- },
-
- ConfigurationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.status = 400;
- this.public = true;
- },
-
- CacheError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.message = message;
- this.previous = previous;
- this.status = 500;
- this.public = false;
- },
-
- ValidationError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = true;
- this.status = 400;
- },
-
- AssertionFailedError: function (message, previous) {
- Error.captureStackTrace(this, this.constructor);
- this.name = this.constructor.name;
- this.previous = previous;
- this.message = message;
- this.public = false;
- this.status = 400;
- }
-};
-
-_.forEach(module.exports, function (error) {
- util.inherits(error, Error);
-});
diff --git a/backend/lib/express/cors.js b/backend/lib/express/cors.js
deleted file mode 100644
index c9befeec8..000000000
--- a/backend/lib/express/cors.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const validator = require('../validator');
-
-module.exports = function (req, res, next) {
-
- if (req.headers.origin) {
-
- const originSchema = {
- oneOf: [
- {
- type: 'string',
- pattern: '^[a-z\\-]+:\\/\\/(?:[\\w\\-\\.]+(:[0-9]+)?/?)?$'
- },
- {
- type: 'string',
- pattern: '^[a-z\\-]+:\\/\\/(?:\\[([a-z0-9]{0,4}\\:?)+\\])?/?(:[0-9]+)?$'
- }
- ]
- };
-
- // very relaxed validation....
- validator(originSchema, req.headers.origin)
- .then(function () {
- res.set({
- 'Access-Control-Allow-Origin': req.headers.origin,
- 'Access-Control-Allow-Credentials': true,
- 'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
- 'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
- 'Access-Control-Max-Age': 5 * 60,
- 'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
- });
- next();
- })
- .catch(next);
-
- } else {
- // No origin
- next();
- }
-
-};
diff --git a/backend/lib/express/jwt-decode.js b/backend/lib/express/jwt-decode.js
deleted file mode 100644
index 17edccec0..000000000
--- a/backend/lib/express/jwt-decode.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const Access = require('../access');
-
-module.exports = () => {
- return function (req, res, next) {
- res.locals.access = null;
- let access = new Access(res.locals.token || null);
- access.load()
- .then(() => {
- res.locals.access = access;
- next();
- })
- .catch(next);
- };
-};
-
diff --git a/backend/lib/express/jwt.js b/backend/lib/express/jwt.js
deleted file mode 100644
index 44aa36934..000000000
--- a/backend/lib/express/jwt.js
+++ /dev/null
@@ -1,13 +0,0 @@
-module.exports = function () {
- return function (req, res, next) {
- if (req.headers.authorization) {
- let parts = req.headers.authorization.split(' ');
-
- if (parts && parts[0] === 'Bearer' && parts[1]) {
- res.locals.token = parts[1];
- }
- }
-
- next();
- };
-};
diff --git a/backend/lib/express/pagination.js b/backend/lib/express/pagination.js
deleted file mode 100644
index 24ffa58d0..000000000
--- a/backend/lib/express/pagination.js
+++ /dev/null
@@ -1,55 +0,0 @@
-let _ = require('lodash');
-
-module.exports = function (default_sort, default_offset, default_limit, max_limit) {
-
- /**
- * This will setup the req query params with filtered data and defaults
- *
- * sort will be an array of fields and their direction
- * offset will be an int, defaulting to zero if no other default supplied
- * limit will be an int, defaulting to 50 if no other default supplied, and limited to the max if that was supplied
- *
- */
-
- return function (req, res, next) {
-
- req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
- req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
-
- if (max_limit && req.query.limit > max_limit) {
- req.query.limit = max_limit;
- }
-
- // Sorting
- let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
- let myRegexp = /.*\.(asc|desc)$/ig;
- let sort_array = [];
-
- sort = sort.split(',');
- _.map(sort, function (val) {
- let matches = myRegexp.exec(val);
-
- if (matches !== null) {
- let dir = matches[1];
- sort_array.push({
- field: val.substr(0, val.length - (dir.length + 1)),
- dir: dir.toLowerCase()
- });
- } else {
- sort_array.push({
- field: val,
- dir: 'asc'
- });
- }
- });
-
- // Sort will now be in this format:
- // [
- // { field: 'field1', dir: 'asc' },
- // { field: 'field2', dir: 'desc' }
- // ]
-
- req.query.sort = sort_array;
- next();
- };
-};
diff --git a/backend/lib/express/user-id-from-me.js b/backend/lib/express/user-id-from-me.js
deleted file mode 100644
index 4a37a4069..000000000
--- a/backend/lib/express/user-id-from-me.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = (req, res, next) => {
- if (req.params.user_id === 'me' && res.locals.access) {
- req.params.user_id = res.locals.access.token.get('attrs').id;
- } else {
- req.params.user_id = parseInt(req.params.user_id, 10);
- }
-
- next();
-};
diff --git a/backend/lib/helpers.js b/backend/lib/helpers.js
deleted file mode 100644
index e38be991e..000000000
--- a/backend/lib/helpers.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const moment = require('moment');
-
-module.exports = {
-
- /**
- * Takes an expression such as 30d and returns a moment object of that date in future
- *
- * Key Shorthand
- * ==================
- * years y
- * quarters Q
- * months M
- * weeks w
- * days d
- * hours h
- * minutes m
- * seconds s
- * milliseconds ms
- *
- * @param {String} expression
- * @returns {Object}
- */
- parseDatePeriod: function (expression) {
- let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
- if (matches) {
- return moment().add(matches[1], matches[2]);
- }
-
- return null;
- }
-
-};
diff --git a/backend/lib/migrate_template.js b/backend/lib/migrate_template.js
deleted file mode 100644
index f75f77ef4..000000000
--- a/backend/lib/migrate_template.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const migrate_name = 'identifier_for_migrate';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex, Promise) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- // Create Table example:
-
- /*return knex.schema.createTable('notification', (table) => {
- table.increments().primary();
- table.string('name').notNull();
- table.string('type').notNull();
- table.integer('created_on').notNull();
- table.integer('modified_on').notNull();
- })
- .then(function () {
- logger.info('[' + migrate_name + '] Notification Table created');
- });*/
-
- logger.info('[' + migrate_name + '] Migrating Up Complete');
-
- return Promise.resolve(true);
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- // Drop table example:
-
- /*return knex.schema.dropTable('notification')
- .then(() => {
- logger.info('[' + migrate_name + '] Notification Table dropped');
- });*/
-
- logger.info('[' + migrate_name + '] Migrating Down Complete');
-
- return Promise.resolve(true);
-};
diff --git a/backend/lib/utils.js b/backend/lib/utils.js
deleted file mode 100644
index 4c8b62a84..000000000
--- a/backend/lib/utils.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const exec = require('child_process').exec;
-
-module.exports = {
-
- /**
- * @param {String} cmd
- * @returns {Promise}
- */
- exec: function (cmd) {
- return new Promise((resolve, reject) => {
- exec(cmd, function (err, stdout, /*stderr*/) {
- if (err && typeof err === 'object') {
- reject(err);
- } else {
- resolve(stdout.trim());
- }
- });
- });
- }
-};
diff --git a/backend/lib/validator/api.js b/backend/lib/validator/api.js
deleted file mode 100644
index 3f51b5969..000000000
--- a/backend/lib/validator/api.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const error = require('../error');
-const path = require('path');
-const parser = require('json-schema-ref-parser');
-
-const ajv = require('ajv')({
- verbose: true,
- validateSchema: true,
- allErrors: false,
- format: 'full',
- coerceTypes: true
-});
-
-/**
- * @param {Object} schema
- * @param {Object} payload
- * @returns {Promise}
- */
-function apiValidator (schema, payload/*, description*/) {
- return new Promise(function Promise_apiValidator (resolve, reject) {
- if (typeof payload === 'undefined') {
- reject(new error.ValidationError('Payload is undefined'));
- }
-
- let validate = ajv.compile(schema);
- let valid = validate(payload);
-
- if (valid && !validate.errors) {
- resolve(payload);
- } else {
- let message = ajv.errorsText(validate.errors);
- let err = new error.ValidationError(message);
- err.debug = [validate.errors, payload];
- reject(err);
- }
- });
-}
-
-apiValidator.loadSchemas = parser
- .dereference(path.resolve('schema/index.json'))
- .then((schema) => {
- ajv.addSchema(schema);
- return schema;
- });
-
-module.exports = apiValidator;
diff --git a/backend/lib/validator/index.js b/backend/lib/validator/index.js
deleted file mode 100644
index fca6f4bf2..000000000
--- a/backend/lib/validator/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const _ = require('lodash');
-const error = require('../error');
-const definitions = require('../../schema/definitions.json');
-
-RegExp.prototype.toJSON = RegExp.prototype.toString;
-
-const ajv = require('ajv')({
- verbose: true, //process.env.NODE_ENV === 'development',
- allErrors: true,
- format: 'full', // strict regexes for format checks
- coerceTypes: true,
- schemas: [
- definitions
- ]
-});
-
-/**
- *
- * @param {Object} schema
- * @param {Object} payload
- * @returns {Promise}
- */
-function validator (schema, payload) {
- return new Promise(function (resolve, reject) {
- if (!payload) {
- reject(new error.InternalValidationError('Payload is falsy'));
- } else {
- try {
- let validate = ajv.compile(schema);
-
- let valid = validate(payload);
- if (valid && !validate.errors) {
- resolve(_.cloneDeep(payload));
- } else {
- let message = ajv.errorsText(validate.errors);
- reject(new error.InternalValidationError(message));
- }
-
- } catch (err) {
- reject(err);
- }
-
- }
-
- });
-
-}
-
-module.exports = validator;
diff --git a/backend/logger.js b/backend/logger.js
deleted file mode 100644
index 680af6d51..000000000
--- a/backend/logger.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const {Signale} = require('signale');
-
-module.exports = {
- global: new Signale({scope: 'Global '}),
- migrate: new Signale({scope: 'Migrate '}),
- express: new Signale({scope: 'Express '}),
- access: new Signale({scope: 'Access '}),
- nginx: new Signale({scope: 'Nginx '}),
- ssl: new Signale({scope: 'SSL '}),
- import: new Signale({scope: 'Importer '}),
- setup: new Signale({scope: 'Setup '}),
- ip_ranges: new Signale({scope: 'IP Ranges'})
-};
diff --git a/backend/migrate.js b/backend/migrate.js
deleted file mode 100644
index 263c87020..000000000
--- a/backend/migrate.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const db = require('./db');
-const logger = require('./logger').migrate;
-
-module.exports = {
- latest: function () {
- return db.migrate.currentVersion()
- .then((version) => {
- logger.info('Current database version:', version);
- return db.migrate.latest({
- tableName: 'migrations',
- directory: 'migrations'
- });
- });
- }
-};
diff --git a/backend/migrations/20180618015850_initial.js b/backend/migrations/20180618015850_initial.js
deleted file mode 100644
index a112e8261..000000000
--- a/backend/migrations/20180618015850_initial.js
+++ /dev/null
@@ -1,205 +0,0 @@
-const migrate_name = 'initial-schema';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('auth', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('type', 30).notNull();
- table.string('secret').notNull();
- table.json('meta').notNull();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] auth Table created');
-
- return knex.schema.createTable('user', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.integer('is_disabled').notNull().unsigned().defaultTo(0);
- table.string('email').notNull();
- table.string('name').notNull();
- table.string('nickname').notNull();
- table.string('avatar').notNull();
- table.json('roles').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] user Table created');
-
- return knex.schema.createTable('user_permission', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('visibility').notNull();
- table.string('proxy_hosts').notNull();
- table.string('redirection_hosts').notNull();
- table.string('dead_hosts').notNull();
- table.string('streams').notNull();
- table.string('access_lists').notNull();
- table.string('certificates').notNull();
- table.unique('user_id');
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] user_permission Table created');
-
- return knex.schema.createTable('proxy_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.string('forward_ip').notNull();
- table.integer('forward_port').notNull().unsigned();
- table.integer('access_list_id').notNull().unsigned().defaultTo(0);
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
- table.integer('block_exploits').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table created');
-
- return knex.schema.createTable('redirection_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.string('forward_domain_name').notNull();
- table.integer('preserve_path').notNull().unsigned().defaultTo(0);
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.integer('block_exploits').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table created');
-
- return knex.schema.createTable('dead_host', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.json('domain_names').notNull();
- table.integer('certificate_id').notNull().unsigned().defaultTo(0);
- table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
- table.text('advanced_config').notNull().defaultTo('');
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table created');
-
- return knex.schema.createTable('stream', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.integer('incoming_port').notNull().unsigned();
- table.string('forward_ip').notNull();
- table.integer('forwarding_port').notNull().unsigned();
- table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
- table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] stream Table created');
-
- return knex.schema.createTable('access_list', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.string('name').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table created');
-
- return knex.schema.createTable('certificate', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('owner_user_id').notNull().unsigned();
- table.integer('is_deleted').notNull().unsigned().defaultTo(0);
- table.string('provider').notNull();
- table.string('nice_name').notNull().defaultTo('');
- table.json('domain_names').notNull();
- table.dateTime('expires_on').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] certificate Table created');
-
- return knex.schema.createTable('access_list_auth', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('access_list_id').notNull().unsigned();
- table.string('username').notNull();
- table.string('password').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list_auth Table created');
-
- return knex.schema.createTable('audit_log', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('user_id').notNull().unsigned();
- table.string('object_type').notNull().defaultTo('');
- table.integer('object_id').notNull().unsigned().defaultTo(0);
- table.string('action').notNull();
- table.json('meta').notNull();
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] audit_log Table created');
- });
-
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20180929054513_websockets.js b/backend/migrations/20180929054513_websockets.js
deleted file mode 100644
index 060548502..000000000
--- a/backend/migrations/20180929054513_websockets.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const migrate_name = 'websockets';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
\ No newline at end of file
diff --git a/backend/migrations/20181019052346_forward_host.js b/backend/migrations/20181019052346_forward_host.js
deleted file mode 100644
index 05c277396..000000000
--- a/backend/migrations/20181019052346_forward_host.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'forward_host';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.renameColumn('forward_ip', 'forward_host');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
\ No newline at end of file
diff --git a/backend/migrations/20181113041458_http2_support.js b/backend/migrations/20181113041458_http2_support.js
deleted file mode 100644
index 9f6b43367..000000000
--- a/backend/migrations/20181113041458_http2_support.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const migrate_name = 'http2_support';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
-
diff --git a/backend/migrations/20181213013211_forward_scheme.js b/backend/migrations/20181213013211_forward_scheme.js
deleted file mode 100644
index 22ae619ed..000000000
--- a/backend/migrations/20181213013211_forward_scheme.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'forward_scheme';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.string('forward_scheme').notNull().defaultTo('http');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190104035154_disabled.js b/backend/migrations/20190104035154_disabled.js
deleted file mode 100644
index 2780c4df2..000000000
--- a/backend/migrations/20190104035154_disabled.js
+++ /dev/null
@@ -1,55 +0,0 @@
-const migrate_name = 'disabled';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
-
- return knex.schema.table('stream', function (stream) {
- stream.integer('enabled').notNull().unsigned().defaultTo(1);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190215115310_customlocations.js b/backend/migrations/20190215115310_customlocations.js
deleted file mode 100644
index 4bcfd51ad..000000000
--- a/backend/migrations/20190215115310_customlocations.js
+++ /dev/null
@@ -1,35 +0,0 @@
-const migrate_name = 'custom_locations';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- * Extends proxy_host table with locations field
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.json('locations');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190218060101_hsts.js b/backend/migrations/20190218060101_hsts.js
deleted file mode 100644
index 648b162a0..000000000
--- a/backend/migrations/20190218060101_hsts.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const migrate_name = 'hsts';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('proxy_host', function (proxy_host) {
- proxy_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- proxy_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] proxy_host Table altered');
-
- return knex.schema.table('redirection_host', function (redirection_host) {
- redirection_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- redirection_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
-
- return knex.schema.table('dead_host', function (dead_host) {
- dead_host.integer('hsts_enabled').notNull().unsigned().defaultTo(0);
- dead_host.integer('hsts_subdomains').notNull().unsigned().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] dead_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20190227065017_settings.js b/backend/migrations/20190227065017_settings.js
deleted file mode 100644
index 7dc9c1928..000000000
--- a/backend/migrations/20190227065017_settings.js
+++ /dev/null
@@ -1,38 +0,0 @@
-const migrate_name = 'settings';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('setting', (table) => {
- table.string('id').notNull().primary();
- table.string('name', 100).notNull();
- table.string('description', 255).notNull();
- table.string('value', 255).notNull();
- table.json('meta').notNull();
- })
- .then(() => {
- logger.info('[' + migrate_name + '] setting Table created');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20200410143839_access_list_client.js b/backend/migrations/20200410143839_access_list_client.js
deleted file mode 100644
index 3511e35b3..000000000
--- a/backend/migrations/20200410143839_access_list_client.js
+++ /dev/null
@@ -1,53 +0,0 @@
-const migrate_name = 'access_list_client';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.createTable('access_list_client', (table) => {
- table.increments().primary();
- table.dateTime('created_on').notNull();
- table.dateTime('modified_on').notNull();
- table.integer('access_list_id').notNull().unsigned();
- table.string('address').notNull();
- table.string('directive').notNull();
- table.json('meta').notNull();
-
- })
- .then(function () {
- logger.info('[' + migrate_name + '] access_list_client Table created');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.integer('satify_any').notNull().defaultTo(0);
- });
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.dropTable('access_list_client')
- .then(() => {
- logger.info('[' + migrate_name + '] access_list_client Table dropped');
- });
-};
diff --git a/backend/migrations/20200410143840_access_list_client_fix.js b/backend/migrations/20200410143840_access_list_client_fix.js
deleted file mode 100644
index ee0f0906f..000000000
--- a/backend/migrations/20200410143840_access_list_client_fix.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const migrate_name = 'access_list_client_fix';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.renameColumn('satify_any', 'satisfy_any');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex, Promise) {
- logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
- return Promise.resolve(true);
-};
diff --git a/backend/migrations/20201014143841_pass_auth.js b/backend/migrations/20201014143841_pass_auth.js
deleted file mode 100644
index a7767eb19..000000000
--- a/backend/migrations/20201014143841_pass_auth.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'pass_auth';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.integer('pass_auth').notNull().defaultTo(1);
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('access_list', function (access_list) {
- access_list.dropColumn('pass_auth');
- })
- .then(() => {
- logger.info('[' + migrate_name + '] access_list pass_auth Column dropped');
- });
-};
diff --git a/backend/migrations/20210210154702_redirection_scheme.js b/backend/migrations/20210210154702_redirection_scheme.js
deleted file mode 100644
index 0dad48768..000000000
--- a/backend/migrations/20210210154702_redirection_scheme.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'redirection_scheme';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.string('forward_scheme').notNull().defaultTo('$scheme');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.dropColumn('forward_scheme');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
diff --git a/backend/migrations/20210210154703_redirection_status_code.js b/backend/migrations/20210210154703_redirection_status_code.js
deleted file mode 100644
index b9bea0b92..000000000
--- a/backend/migrations/20210210154703_redirection_status_code.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const migrate_name = 'redirection_status_code';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
-
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.integer('forward_http_code').notNull().unsigned().defaultTo(302);
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('redirection_host', (table) => {
- table.dropColumn('forward_http_code');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] redirection_host Table altered');
- });
-};
diff --git a/backend/migrations/20210423103500_stream_domain.js b/backend/migrations/20210423103500_stream_domain.js
deleted file mode 100644
index a894ca5e6..000000000
--- a/backend/migrations/20210423103500_stream_domain.js
+++ /dev/null
@@ -1,40 +0,0 @@
-const migrate_name = 'stream_domain';
-const logger = require('../logger').migrate;
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return knex.schema.table('stream', (table) => {
- table.renameColumn('forward_ip', 'forwarding_host');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex/*, Promise*/) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return knex.schema.table('stream', (table) => {
- table.renameColumn('forwarding_host', 'forward_ip');
- })
- .then(function () {
- logger.info('[' + migrate_name + '] stream Table altered');
- });
-};
diff --git a/backend/migrations/20211108145214_regenerate_default_host.js b/backend/migrations/20211108145214_regenerate_default_host.js
deleted file mode 100644
index 4c50941ff..000000000
--- a/backend/migrations/20211108145214_regenerate_default_host.js
+++ /dev/null
@@ -1,50 +0,0 @@
-const migrate_name = 'stream_domain';
-const logger = require('../logger').migrate;
-const internalNginx = require('../internal/nginx');
-
-async function regenerateDefaultHost(knex) {
- const row = await knex('setting').select('*').where('id', 'default-site').first();
-
- if (!row) {
- return Promise.resolve();
- }
-
- return internalNginx.deleteConfig('default')
- .then(() => {
- return internalNginx.generateConfig('default', row);
- })
- .then(() => {
- return internalNginx.test();
- })
- .then(() => {
- return internalNginx.reload();
- });
-}
-
-/**
- * Migrate
- *
- * @see http://knexjs.org/#Schema
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.up = function (knex) {
- logger.info('[' + migrate_name + '] Migrating Up...');
-
- return regenerateDefaultHost(knex);
-};
-
-/**
- * Undo Migrate
- *
- * @param {Object} knex
- * @param {Promise} Promise
- * @returns {Promise}
- */
-exports.down = function (knex) {
- logger.info('[' + migrate_name + '] Migrating Down...');
-
- return regenerateDefaultHost(knex);
-};
\ No newline at end of file
diff --git a/backend/models/access_list.js b/backend/models/access_list.js
deleted file mode 100644
index 01974e86e..000000000
--- a/backend/models/access_list.js
+++ /dev/null
@@ -1,102 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const AccessListAuth = require('./access_list_auth');
-const AccessListClient = require('./access_list_client');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessList extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessList';
- }
-
- static get tableName () {
- return 'access_list';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- const ProxyHost = require('./proxy_host');
-
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'access_list.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- items: {
- relation: Model.HasManyRelation,
- modelClass: AccessListAuth,
- join: {
- from: 'access_list.id',
- to: 'access_list_auth.access_list_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
- }
- },
- clients: {
- relation: Model.HasManyRelation,
- modelClass: AccessListClient,
- join: {
- from: 'access_list.id',
- to: 'access_list_client.access_list_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'access_list_id', 'meta']);
- }
- },
- proxy_hosts: {
- relation: Model.HasManyRelation,
- modelClass: ProxyHost,
- join: {
- from: 'access_list.id',
- to: 'proxy_host.access_list_id'
- },
- modify: function (qb) {
- qb.where('proxy_host.is_deleted', 0);
- qb.omit(['is_deleted', 'meta']);
- }
- }
- };
- }
-
- get satisfy() {
- return this.satisfy_any ? 'satisfy any' : 'satisfy all';
- }
-
- get passauth() {
- return this.pass_auth ? '' : 'proxy_set_header Authorization "";';
- }
-}
-
-module.exports = AccessList;
diff --git a/backend/models/access_list_auth.js b/backend/models/access_list_auth.js
deleted file mode 100644
index 932371f3e..000000000
--- a/backend/models/access_list_auth.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessListAuth extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessListAuth';
- }
-
- static get tableName () {
- return 'access_list_auth';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: require('./access_list'),
- join: {
- from: 'access_list_auth.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
- }
- }
- };
- }
-}
-
-module.exports = AccessListAuth;
diff --git a/backend/models/access_list_client.js b/backend/models/access_list_client.js
deleted file mode 100644
index e257213a6..000000000
--- a/backend/models/access_list_client.js
+++ /dev/null
@@ -1,59 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AccessListClient extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AccessListClient';
- }
-
- static get tableName () {
- return 'access_list_client';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: require('./access_list'),
- join: {
- from: 'access_list_client.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['created_on', 'modified_on', 'is_deleted', 'access_list_id']);
- }
- }
- };
- }
-
- get rule() {
- return `${this.directive} ${this.address}`;
- }
-}
-
-module.exports = AccessListClient;
diff --git a/backend/models/audit-log.js b/backend/models/audit-log.js
deleted file mode 100644
index a3a318c8f..000000000
--- a/backend/models/audit-log.js
+++ /dev/null
@@ -1,55 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class AuditLog extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'AuditLog';
- }
-
- static get tableName () {
- return 'audit_log';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- user: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'audit_log.user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = AuditLog;
diff --git a/backend/models/auth.js b/backend/models/auth.js
deleted file mode 100644
index 5ba5f3804..000000000
--- a/backend/models/auth.js
+++ /dev/null
@@ -1,86 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const bcrypt = require('bcrypt');
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-function encryptPassword () {
- /* jshint -W040 */
- let _this = this;
-
- if (_this.type === 'password' && _this.secret) {
- return bcrypt.hash(_this.secret, 13)
- .then(function (hash) {
- _this.secret = hash;
- });
- }
-
- return null;
-}
-
-class Auth extends Model {
- $beforeInsert (queryContext) {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- return encryptPassword.apply(this, queryContext);
- }
-
- $beforeUpdate (queryContext) {
- this.modified_on = now();
- return encryptPassword.apply(this, queryContext);
- }
-
- /**
- * Verify a plain password against the encrypted password
- *
- * @param {String} password
- * @returns {Promise}
- */
- verifyPassword (password) {
- return bcrypt.compare(password, this.secret);
- }
-
- static get name () {
- return 'Auth';
- }
-
- static get tableName () {
- return 'auth';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- user: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'auth.user_id',
- to: 'user.id'
- },
- filter: {
- is_deleted: 0
- },
- modify: function (qb) {
- qb.omit(['is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = Auth;
diff --git a/backend/models/certificate.js b/backend/models/certificate.js
deleted file mode 100644
index 6084a9953..000000000
--- a/backend/models/certificate.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class Certificate extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for expires_on
- if (typeof this.expires_on === 'undefined') {
- this.expires_on = now();
- }
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'Certificate';
- }
-
- static get tableName () {
- return 'certificate';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'certificate.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = Certificate;
diff --git a/backend/models/dead_host.js b/backend/models/dead_host.js
deleted file mode 100644
index 6de42a337..000000000
--- a/backend/models/dead_host.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class DeadHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'DeadHost';
- }
-
- static get tableName () {
- return 'dead_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'dead_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'dead_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = DeadHost;
diff --git a/backend/models/now_helper.js b/backend/models/now_helper.js
deleted file mode 100644
index def16d083..000000000
--- a/backend/models/now_helper.js
+++ /dev/null
@@ -1,13 +0,0 @@
-const db = require('../db');
-const config = require('config');
-const Model = require('objection').Model;
-
-Model.knex(db);
-
-module.exports = function () {
- if (config.database.knex && config.database.knex.client === 'sqlite3') {
- return Model.raw('datetime(\'now\',\'localtime\')');
- } else {
- return Model.raw('NOW()');
- }
-};
diff --git a/backend/models/proxy_host.js b/backend/models/proxy_host.js
deleted file mode 100644
index a75830886..000000000
--- a/backend/models/proxy_host.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const AccessList = require('./access_list');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class ProxyHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'ProxyHost';
- }
-
- static get tableName () {
- return 'proxy_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta', 'locations'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'proxy_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- access_list: {
- relation: Model.HasOneRelation,
- modelClass: AccessList,
- join: {
- from: 'proxy_host.access_list_id',
- to: 'access_list.id'
- },
- modify: function (qb) {
- qb.where('access_list.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'proxy_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = ProxyHost;
diff --git a/backend/models/redirection_host.js b/backend/models/redirection_host.js
deleted file mode 100644
index dd149b769..000000000
--- a/backend/models/redirection_host.js
+++ /dev/null
@@ -1,81 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const Certificate = require('./certificate');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class RedirectionHost extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for domain_names
- if (typeof this.domain_names === 'undefined') {
- this.domain_names = [];
- }
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
-
- this.domain_names.sort();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
-
- // Sort domain_names
- if (typeof this.domain_names !== 'undefined') {
- this.domain_names.sort();
- }
- }
-
- static get name () {
- return 'RedirectionHost';
- }
-
- static get tableName () {
- return 'redirection_host';
- }
-
- static get jsonAttributes () {
- return ['domain_names', 'meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'redirection_host.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- },
- certificate: {
- relation: Model.HasOneRelation,
- modelClass: Certificate,
- join: {
- from: 'redirection_host.certificate_id',
- to: 'certificate.id'
- },
- modify: function (qb) {
- qb.where('certificate.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted']);
- }
- }
- };
- }
-}
-
-module.exports = RedirectionHost;
diff --git a/backend/models/setting.js b/backend/models/setting.js
deleted file mode 100644
index 75aa90076..000000000
--- a/backend/models/setting.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-
-Model.knex(db);
-
-class Setting extends Model {
- $beforeInsert () {
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- static get name () {
- return 'Setting';
- }
-
- static get tableName () {
- return 'setting';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-}
-
-module.exports = Setting;
diff --git a/backend/models/stream.js b/backend/models/stream.js
deleted file mode 100644
index ed65de0fc..000000000
--- a/backend/models/stream.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const User = require('./user');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class Stream extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for meta
- if (typeof this.meta === 'undefined') {
- this.meta = {};
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'Stream';
- }
-
- static get tableName () {
- return 'stream';
- }
-
- static get jsonAttributes () {
- return ['meta'];
- }
-
- static get relationMappings () {
- return {
- owner: {
- relation: Model.HasOneRelation,
- modelClass: User,
- join: {
- from: 'stream.owner_user_id',
- to: 'user.id'
- },
- modify: function (qb) {
- qb.where('user.is_deleted', 0);
- qb.omit(['id', 'created_on', 'modified_on', 'is_deleted', 'email', 'roles']);
- }
- }
- };
- }
-}
-
-module.exports = Stream;
diff --git a/backend/models/token.js b/backend/models/token.js
deleted file mode 100644
index 4e1b1826e..000000000
--- a/backend/models/token.js
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- NOTE: This is not a database table, this is a model of a Token object that can be created/loaded
- and then has abilities after that.
- */
-
-const _ = require('lodash');
-const jwt = require('jsonwebtoken');
-const crypto = require('crypto');
-const error = require('../lib/error');
-const ALGO = 'RS256';
-
-let public_key = null;
-let private_key = null;
-
-function checkJWTKeyPair() {
- if (!public_key || !private_key) {
- let config = require('config');
- public_key = config.get('jwt.pub');
- private_key = config.get('jwt.key');
- }
-}
-
-module.exports = function () {
-
- let token_data = {};
-
- let self = {
- /**
- * @param {Object} payload
- * @returns {Promise}
- */
- create: (payload) => {
- // sign with RSA SHA256
- let options = {
- algorithm: ALGO,
- expiresIn: payload.expiresIn || '1d'
- };
-
- payload.jti = crypto.randomBytes(12)
- .toString('base64')
- .substr(-8);
-
- checkJWTKeyPair();
-
- return new Promise((resolve, reject) => {
- jwt.sign(payload, private_key, options, (err, token) => {
- if (err) {
- reject(err);
- } else {
- token_data = payload;
- resolve({
- token: token,
- payload: payload
- });
- }
- });
- });
- },
-
- /**
- * @param {String} token
- * @returns {Promise}
- */
- load: function (token) {
- return new Promise((resolve, reject) => {
- checkJWTKeyPair();
- try {
- if (!token || token === null || token === 'null') {
- reject(new error.AuthError('Empty token'));
- } else {
- jwt.verify(token, public_key, {ignoreExpiration: false, algorithms: [ALGO]}, (err, result) => {
- if (err) {
-
- if (err.name === 'TokenExpiredError') {
- reject(new error.AuthError('Token has expired', err));
- } else {
- reject(err);
- }
-
- } else {
- token_data = result;
-
- // Hack: some tokens out in the wild have a scope of 'all' instead of 'user'.
- // For 30 days at least, we need to replace 'all' with user.
- if ((typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'all') !== -1)) {
- //console.log('Warning! Replacing "all" scope with "user"');
-
- token_data.scope = ['user'];
- }
-
- resolve(token_data);
- }
- });
- }
- } catch (err) {
- reject(err);
- }
- });
-
- },
-
- /**
- * Does the token have the specified scope?
- *
- * @param {String} scope
- * @returns {Boolean}
- */
- hasScope: function (scope) {
- return typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, scope) !== -1;
- },
-
- /**
- * @param {String} key
- * @return {*}
- */
- get: function (key) {
- if (typeof token_data[key] !== 'undefined') {
- return token_data[key];
- }
-
- return null;
- },
-
- /**
- * @param {String} key
- * @param {*} value
- */
- set: function (key, value) {
- token_data[key] = value;
- },
-
- /**
- * @param [default_value]
- * @returns {Integer}
- */
- getUserId: (default_value) => {
- let attrs = self.get('attrs');
- if (attrs && typeof attrs.id !== 'undefined' && attrs.id) {
- return attrs.id;
- }
-
- return default_value || 0;
- }
- };
-
- return self;
-};
diff --git a/backend/models/user.js b/backend/models/user.js
deleted file mode 100644
index c76f7dbf5..000000000
--- a/backend/models/user.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const UserPermission = require('./user_permission');
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class User extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
-
- // Default for roles
- if (typeof this.roles === 'undefined') {
- this.roles = [];
- }
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'User';
- }
-
- static get tableName () {
- return 'user';
- }
-
- static get jsonAttributes () {
- return ['roles'];
- }
-
- static get relationMappings () {
- return {
- permissions: {
- relation: Model.HasOneRelation,
- modelClass: UserPermission,
- join: {
- from: 'user.id',
- to: 'user_permission.user_id'
- },
- modify: function (qb) {
- qb.omit(['id', 'created_on', 'modified_on', 'user_id']);
- }
- }
- };
- }
-
-}
-
-module.exports = User;
diff --git a/backend/models/user_permission.js b/backend/models/user_permission.js
deleted file mode 100644
index bb87d5dc4..000000000
--- a/backend/models/user_permission.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// Objection Docs:
-// http://vincit.github.io/objection.js/
-
-const db = require('../db');
-const Model = require('objection').Model;
-const now = require('./now_helper');
-
-Model.knex(db);
-
-class UserPermission extends Model {
- $beforeInsert () {
- this.created_on = now();
- this.modified_on = now();
- }
-
- $beforeUpdate () {
- this.modified_on = now();
- }
-
- static get name () {
- return 'UserPermission';
- }
-
- static get tableName () {
- return 'user_permission';
- }
-}
-
-module.exports = UserPermission;
diff --git a/backend/nodemon.json b/backend/nodemon.json
deleted file mode 100644
index 3d6d13420..000000000
--- a/backend/nodemon.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "verbose": false,
- "ignore": [
- "data"
- ],
- "ext": "js json ejs"
-}
diff --git a/backend/package.json b/backend/package.json
deleted file mode 100644
index 28b6f178b..000000000
--- a/backend/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "name": "nginx-proxy-manager",
- "version": "0.0.0",
- "description": "A beautiful interface for creating Nginx endpoints",
- "main": "js/index.js",
- "dependencies": {
- "ajv": "^6.12.0",
- "archiver": "^5.3.0",
- "batchflow": "^0.4.0",
- "bcrypt": "^5.0.0",
- "body-parser": "^1.19.0",
- "compression": "^1.7.4",
- "config": "^3.3.1",
- "express": "^4.17.1",
- "express-fileupload": "^1.1.9",
- "gravatar": "^1.8.0",
- "json-schema-ref-parser": "^8.0.0",
- "jsonwebtoken": "^8.5.1",
- "knex": "^0.20.13",
- "liquidjs": "^9.11.10",
- "lodash": "^4.17.21",
- "moment": "^2.24.0",
- "mysql": "^2.18.1",
- "node-rsa": "^1.0.8",
- "nodemon": "^2.0.2",
- "objection": "^2.2.16",
- "path": "^0.12.7",
- "signale": "^1.4.0",
- "sqlite3": "^4.1.1",
- "temp-write": "^4.0.0"
- },
- "signale": {
- "displayDate": true,
- "displayTimestamp": true
- },
- "author": "Jamie Curnow ",
- "license": "MIT",
- "devDependencies": {
- "eslint": "^6.8.0",
- "eslint-plugin-align-assignments": "^1.1.2",
- "prettier": "^2.0.4"
- }
-}
diff --git a/backend/routes/api/audit-log.js b/backend/routes/api/audit-log.js
deleted file mode 100644
index 8a2490c3f..000000000
--- a/backend/routes/api/audit-log.js
+++ /dev/null
@@ -1,52 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalAuditLog = require('../../internal/audit-log');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/audit-log
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/audit-log
- *
- * Retrieve all logs
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalAuditLog.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/main.js b/backend/routes/api/main.js
deleted file mode 100644
index 33cbbc21f..000000000
--- a/backend/routes/api/main.js
+++ /dev/null
@@ -1,51 +0,0 @@
-const express = require('express');
-const pjson = require('../../package.json');
-const error = require('../../lib/error');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * Health Check
- * GET /api
- */
-router.get('/', (req, res/*, next*/) => {
- let version = pjson.version.split('-').shift().split('.');
-
- res.status(200).send({
- status: 'OK',
- version: {
- major: parseInt(version.shift(), 10),
- minor: parseInt(version.shift(), 10),
- revision: parseInt(version.shift(), 10)
- }
- });
-});
-
-router.use('/schema', require('./schema'));
-router.use('/tokens', require('./tokens'));
-router.use('/users', require('./users'));
-router.use('/audit-log', require('./audit-log'));
-router.use('/reports', require('./reports'));
-router.use('/settings', require('./settings'));
-router.use('/nginx/proxy-hosts', require('./nginx/proxy_hosts'));
-router.use('/nginx/redirection-hosts', require('./nginx/redirection_hosts'));
-router.use('/nginx/dead-hosts', require('./nginx/dead_hosts'));
-router.use('/nginx/streams', require('./nginx/streams'));
-router.use('/nginx/access-lists', require('./nginx/access_lists'));
-router.use('/nginx/certificates', require('./nginx/certificates'));
-
-/**
- * API 404 for all other routes
- *
- * ALL /api/*
- */
-router.all(/(.+)/, function (req, res, next) {
- req.params.page = req.params['0'];
- next(new error.ItemNotFoundError(req.params.page));
-});
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/access_lists.js b/backend/routes/api/nginx/access_lists.js
deleted file mode 100644
index d55c3ae12..000000000
--- a/backend/routes/api/nginx/access_lists.js
+++ /dev/null
@@ -1,148 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalAccessList = require('../../../internal/access-list');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/access-lists
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/access-lists
- *
- * Retrieve all access-lists
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalAccessList.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/access-lists
- *
- * Create a new access-list
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/access-lists#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalAccessList.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific access-list
- *
- * /api/nginx/access-lists/123
- */
-router
- .route('/:list_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/access-lists/123
- *
- * Retrieve a specific access-list
- */
- .get((req, res, next) => {
- validator({
- required: ['list_id'],
- additionalProperties: false,
- properties: {
- list_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- list_id: req.params.list_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalAccessList.get(res.locals.access, {
- id: parseInt(data.list_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/access-lists/123
- *
- * Update and existing access-list
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/access-lists#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.list_id, 10);
- return internalAccessList.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/access-lists/123
- *
- * Delete and existing access-list
- */
- .delete((req, res, next) => {
- internalAccessList.delete(res.locals.access, {id: parseInt(req.params.list_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/certificates.js b/backend/routes/api/nginx/certificates.js
deleted file mode 100644
index ffdfb515d..000000000
--- a/backend/routes/api/nginx/certificates.js
+++ /dev/null
@@ -1,299 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalCertificate = require('../../../internal/certificate');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/certificates
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates
- *
- * Retrieve all certificates
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalCertificate.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/certificates
- *
- * Create a new certificate
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/certificates#/links/1/schema'}, req.body)
- .then((payload) => {
- req.setTimeout(900000); // 15 minutes timeout
- return internalCertificate.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Test HTTP challenge for domains
- *
- * /api/nginx/certificates/test-http
- */
-router
- .route('/test-http')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
-/**
- * GET /api/nginx/certificates/test-http
- *
- * Test HTTP challenge for domains
- */
- .get((req, res, next) => {
- internalCertificate.testHttpsChallenge(res.locals.access, JSON.parse(req.query.domains))
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific certificate
- *
- * /api/nginx/certificates/123
- */
-router
- .route('/:certificate_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates/123
- *
- * Retrieve a specific certificate
- */
- .get((req, res, next) => {
- validator({
- required: ['certificate_id'],
- additionalProperties: false,
- properties: {
- certificate_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- certificate_id: req.params.certificate_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalCertificate.get(res.locals.access, {
- id: parseInt(data.certificate_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/certificates/123
- *
- * Update and existing certificate
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/certificates#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.certificate_id, 10);
- return internalCertificate.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/certificates/123
- *
- * Update and existing certificate
- */
- .delete((req, res, next) => {
- internalCertificate.delete(res.locals.access, {id: parseInt(req.params.certificate_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Upload Certs
- *
- * /api/nginx/certificates/123/upload
- */
-router
- .route('/:certificate_id/upload')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/123/upload
- *
- * Upload certificates
- */
- .post((req, res, next) => {
- if (!req.files) {
- res.status(400)
- .send({error: 'No files were uploaded'});
- } else {
- internalCertificate.upload(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10),
- files: req.files
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- }
- });
-
-/**
- * Renew LE Certs
- *
- * /api/nginx/certificates/123/renew
- */
-router
- .route('/:certificate_id/renew')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/123/renew
- *
- * Renew certificate
- */
- .post((req, res, next) => {
- req.setTimeout(900000); // 15 minutes timeout
- internalCertificate.renew(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10)
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Download LE Certs
- *
- * /api/nginx/certificates/123/download
- */
-router
- .route('/:certificate_id/download')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/certificates/123/download
- *
- * Renew certificate
- */
- .get((req, res, next) => {
- internalCertificate.download(res.locals.access, {
- id: parseInt(req.params.certificate_id, 10)
- })
- .then((result) => {
- res.status(200)
- .download(result.fileName);
- })
- .catch(next);
- });
-
-/**
- * Validate Certs before saving
- *
- * /api/nginx/certificates/validate
- */
-router
- .route('/validate')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/certificates/validate
- *
- * Validate certificates
- */
- .post((req, res, next) => {
- if (!req.files) {
- res.status(400)
- .send({error: 'No files were uploaded'});
- } else {
- internalCertificate.validate({
- files: req.files
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- }
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/dead_hosts.js b/backend/routes/api/nginx/dead_hosts.js
deleted file mode 100644
index 08b58f2de..000000000
--- a/backend/routes/api/nginx/dead_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalDeadHost = require('../../../internal/dead-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/dead-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/dead-hosts
- *
- * Retrieve all dead-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalDeadHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/dead-hosts
- *
- * Create a new dead-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/dead-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalDeadHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific dead-host
- *
- * /api/nginx/dead-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/dead-hosts/123
- *
- * Retrieve a specific dead-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalDeadHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/dead-hosts/123
- *
- * Update and existing dead-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/dead-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalDeadHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/dead-hosts/123
- *
- * Update and existing dead-host
- */
- .delete((req, res, next) => {
- internalDeadHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable dead-host
- *
- * /api/nginx/dead-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/dead-hosts/123/enable
- */
- .post((req, res, next) => {
- internalDeadHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable dead-host
- *
- * /api/nginx/dead-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/dead-hosts/123/disable
- */
- .post((req, res, next) => {
- internalDeadHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/proxy_hosts.js b/backend/routes/api/nginx/proxy_hosts.js
deleted file mode 100644
index 6f933c3d3..000000000
--- a/backend/routes/api/nginx/proxy_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalProxyHost = require('../../../internal/proxy-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/proxy-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/proxy-hosts
- *
- * Retrieve all proxy-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalProxyHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/proxy-hosts
- *
- * Create a new proxy-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/proxy-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalProxyHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific proxy-host
- *
- * /api/nginx/proxy-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/proxy-hosts/123
- *
- * Retrieve a specific proxy-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalProxyHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/proxy-hosts/123
- *
- * Update and existing proxy-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/proxy-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalProxyHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/proxy-hosts/123
- *
- * Update and existing proxy-host
- */
- .delete((req, res, next) => {
- internalProxyHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable proxy-host
- *
- * /api/nginx/proxy-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/proxy-hosts/123/enable
- */
- .post((req, res, next) => {
- internalProxyHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable proxy-host
- *
- * /api/nginx/proxy-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/proxy-hosts/123/disable
- */
- .post((req, res, next) => {
- internalProxyHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/redirection_hosts.js b/backend/routes/api/nginx/redirection_hosts.js
deleted file mode 100644
index 4d44c1126..000000000
--- a/backend/routes/api/nginx/redirection_hosts.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalRedirectionHost = require('../../../internal/redirection-host');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/redirection-hosts
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/redirection-hosts
- *
- * Retrieve all redirection-hosts
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalRedirectionHost.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/redirection-hosts
- *
- * Create a new redirection-host
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/redirection-hosts#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalRedirectionHost.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific redirection-host
- *
- * /api/nginx/redirection-hosts/123
- */
-router
- .route('/:host_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/nginx/redirection-hosts/123
- *
- * Retrieve a specific redirection-host
- */
- .get((req, res, next) => {
- validator({
- required: ['host_id'],
- additionalProperties: false,
- properties: {
- host_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- host_id: req.params.host_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalRedirectionHost.get(res.locals.access, {
- id: parseInt(data.host_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/redirection-hosts/123
- *
- * Update and existing redirection-host
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/redirection-hosts#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.host_id, 10);
- return internalRedirectionHost.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/redirection-hosts/123
- *
- * Update and existing redirection-host
- */
- .delete((req, res, next) => {
- internalRedirectionHost.delete(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable redirection-host
- *
- * /api/nginx/redirection-hosts/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/redirection-hosts/123/enable
- */
- .post((req, res, next) => {
- internalRedirectionHost.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable redirection-host
- *
- * /api/nginx/redirection-hosts/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/redirection-hosts/123/disable
- */
- .post((req, res, next) => {
- internalRedirectionHost.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/nginx/streams.js b/backend/routes/api/nginx/streams.js
deleted file mode 100644
index 5e3fc28fe..000000000
--- a/backend/routes/api/nginx/streams.js
+++ /dev/null
@@ -1,196 +0,0 @@
-const express = require('express');
-const validator = require('../../../lib/validator');
-const jwtdecode = require('../../../lib/express/jwt-decode');
-const internalStream = require('../../../internal/stream');
-const apiValidator = require('../../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/nginx/streams
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
-
- /**
- * GET /api/nginx/streams
- *
- * Retrieve all streams
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalStream.getAll(res.locals.access, data.expand, data.query);
- })
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- })
-
- /**
- * POST /api/nginx/streams
- *
- * Create a new stream
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/streams#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalStream.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific stream
- *
- * /api/nginx/streams/123
- */
-router
- .route('/:stream_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode()) // preferred so it doesn't apply to nonexistent routes
-
- /**
- * GET /api/nginx/streams/123
- *
- * Retrieve a specific stream
- */
- .get((req, res, next) => {
- validator({
- required: ['stream_id'],
- additionalProperties: false,
- properties: {
- stream_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- stream_id: req.params.stream_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalStream.get(res.locals.access, {
- id: parseInt(data.stream_id, 10),
- expand: data.expand
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/nginx/streams/123
- *
- * Update and existing stream
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/streams#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = parseInt(req.params.stream_id, 10);
- return internalStream.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/nginx/streams/123
- *
- * Update and existing stream
- */
- .delete((req, res, next) => {
- internalStream.delete(res.locals.access, {id: parseInt(req.params.stream_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Enable stream
- *
- * /api/nginx/streams/123/enable
- */
-router
- .route('/:host_id/enable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/streams/123/enable
- */
- .post((req, res, next) => {
- internalStream.enable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Disable stream
- *
- * /api/nginx/streams/123/disable
- */
-router
- .route('/:host_id/disable')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/nginx/streams/123/disable
- */
- .post((req, res, next) => {
- internalStream.disable(res.locals.access, {id: parseInt(req.params.host_id, 10)})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/reports.js b/backend/routes/api/reports.js
deleted file mode 100644
index 9e2c98c89..000000000
--- a/backend/routes/api/reports.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const express = require('express');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalReport = require('../../internal/report');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/hosts')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /reports/hosts
- */
- .get(jwtdecode(), (req, res, next) => {
- internalReport.getHostsReport(res.locals.access)
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/schema.js b/backend/routes/api/schema.js
deleted file mode 100644
index fc6bd5bdf..000000000
--- a/backend/routes/api/schema.js
+++ /dev/null
@@ -1,36 +0,0 @@
-const express = require('express');
-const swaggerJSON = require('../../doc/api.swagger.json');
-const PACKAGE = require('../../package.json');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /schema
- */
- .get((req, res/*, next*/) => {
- let proto = req.protocol;
- if (typeof req.headers['x-forwarded-proto'] !== 'undefined' && req.headers['x-forwarded-proto']) {
- proto = req.headers['x-forwarded-proto'];
- }
-
- let origin = proto + '://' + req.hostname;
- if (typeof req.headers.origin !== 'undefined' && req.headers.origin) {
- origin = req.headers.origin;
- }
-
- swaggerJSON.info.version = PACKAGE.version;
- swaggerJSON.servers[0].url = origin + '/api';
- res.status(200).send(swaggerJSON);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/settings.js b/backend/routes/api/settings.js
deleted file mode 100644
index d08b2bf5c..000000000
--- a/backend/routes/api/settings.js
+++ /dev/null
@@ -1,96 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalSetting = require('../../internal/setting');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/settings
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/settings
- *
- * Retrieve all settings
- */
- .get((req, res, next) => {
- internalSetting.getAll(res.locals.access)
- .then((rows) => {
- res.status(200)
- .send(rows);
- })
- .catch(next);
- });
-
-/**
- * Specific setting
- *
- * /api/settings/something
- */
-router
- .route('/:setting_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /settings/something
- *
- * Retrieve a specific setting
- */
- .get((req, res, next) => {
- validator({
- required: ['setting_id'],
- additionalProperties: false,
- properties: {
- setting_id: {
- $ref: 'definitions#/definitions/setting_id'
- }
- }
- }, {
- setting_id: req.params.setting_id
- })
- .then((data) => {
- return internalSetting.get(res.locals.access, {
- id: data.setting_id
- });
- })
- .then((row) => {
- res.status(200)
- .send(row);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/settings/something
- *
- * Update and existing setting
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/settings#/links/1/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.setting_id;
- return internalSetting.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/tokens.js b/backend/routes/api/tokens.js
deleted file mode 100644
index a21f998ae..000000000
--- a/backend/routes/api/tokens.js
+++ /dev/null
@@ -1,54 +0,0 @@
-const express = require('express');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const internalToken = require('../../internal/token');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
-
- /**
- * GET /tokens
- *
- * Get a new Token, given they already have a token they want to refresh
- * We also piggy back on to this method, allowing admins to get tokens
- * for services like Job board and Worker.
- */
- .get(jwtdecode(), (req, res, next) => {
- internalToken.getFreshToken(res.locals.access, {
- expiry: (typeof req.query.expiry !== 'undefined' ? req.query.expiry : null),
- scope: (typeof req.query.scope !== 'undefined' ? req.query.scope : null)
- })
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- })
-
- /**
- * POST /tokens
- *
- * Create a new Token
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/tokens#/links/0/schema'}, req.body)
- .then((payload) => {
- return internalToken.getTokenFromEmail(payload);
- })
- .then((data) => {
- res.status(200)
- .send(data);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/routes/api/users.js b/backend/routes/api/users.js
deleted file mode 100644
index 1c6bd0ad2..000000000
--- a/backend/routes/api/users.js
+++ /dev/null
@@ -1,239 +0,0 @@
-const express = require('express');
-const validator = require('../../lib/validator');
-const jwtdecode = require('../../lib/express/jwt-decode');
-const userIdFromMe = require('../../lib/express/user-id-from-me');
-const internalUser = require('../../internal/user');
-const apiValidator = require('../../lib/validator/api');
-
-let router = express.Router({
- caseSensitive: true,
- strict: true,
- mergeParams: true
-});
-
-/**
- * /api/users
- */
-router
- .route('/')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * GET /api/users
- *
- * Retrieve all users
- */
- .get((req, res, next) => {
- validator({
- additionalProperties: false,
- properties: {
- expand: {
- $ref: 'definitions#/definitions/expand'
- },
- query: {
- $ref: 'definitions#/definitions/query'
- }
- }
- }, {
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null),
- query: (typeof req.query.query === 'string' ? req.query.query : null)
- })
- .then((data) => {
- return internalUser.getAll(res.locals.access, data.expand, data.query);
- })
- .then((users) => {
- res.status(200)
- .send(users);
- })
- .catch(next);
- })
-
- /**
- * POST /api/users
- *
- * Create a new User
- */
- .post((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/1/schema'}, req.body)
- .then((payload) => {
- return internalUser.create(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user
- *
- * /api/users/123
- */
-router
- .route('/:user_id')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * GET /users/123 or /users/me
- *
- * Retrieve a specific user
- */
- .get((req, res, next) => {
- validator({
- required: ['user_id'],
- additionalProperties: false,
- properties: {
- user_id: {
- $ref: 'definitions#/definitions/id'
- },
- expand: {
- $ref: 'definitions#/definitions/expand'
- }
- }
- }, {
- user_id: req.params.user_id,
- expand: (typeof req.query.expand === 'string' ? req.query.expand.split(',') : null)
- })
- .then((data) => {
- return internalUser.get(res.locals.access, {
- id: data.user_id,
- expand: data.expand,
- omit: internalUser.getUserOmisionsByAccess(res.locals.access, data.user_id)
- });
- })
- .then((user) => {
- res.status(200)
- .send(user);
- })
- .catch(next);
- })
-
- /**
- * PUT /api/users/123
- *
- * Update and existing user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/2/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.update(res.locals.access, payload);
- })
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- })
-
- /**
- * DELETE /api/users/123
- *
- * Update and existing user
- */
- .delete((req, res, next) => {
- internalUser.delete(res.locals.access, {id: req.params.user_id})
- .then((result) => {
- res.status(200)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user auth
- *
- * /api/users/123/auth
- */
-router
- .route('/:user_id/auth')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * PUT /api/users/123/auth
- *
- * Update password for a user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/4/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.setPassword(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user permissions
- *
- * /api/users/123/permissions
- */
-router
- .route('/:user_id/permissions')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
- .all(userIdFromMe)
-
- /**
- * PUT /api/users/123/permissions
- *
- * Set some or all permissions for a user
- */
- .put((req, res, next) => {
- apiValidator({$ref: 'endpoints/users#/links/5/schema'}, req.body)
- .then((payload) => {
- payload.id = req.params.user_id;
- return internalUser.setPermissions(res.locals.access, payload);
- })
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-/**
- * Specific user login as
- *
- * /api/users/123/login
- */
-router
- .route('/:user_id/login')
- .options((req, res) => {
- res.sendStatus(204);
- })
- .all(jwtdecode())
-
- /**
- * POST /api/users/123/login
- *
- * Log in as a user
- */
- .post((req, res, next) => {
- internalUser.loginAs(res.locals.access, {id: parseInt(req.params.user_id, 10)})
- .then((result) => {
- res.status(201)
- .send(result);
- })
- .catch(next);
- });
-
-module.exports = router;
diff --git a/backend/schema/definitions.json b/backend/schema/definitions.json
deleted file mode 100644
index 4b4f3405c..000000000
--- a/backend/schema/definitions.json
+++ /dev/null
@@ -1,240 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "definitions",
- "definitions": {
- "id": {
- "description": "Unique identifier",
- "example": 123456,
- "readOnly": true,
- "type": "integer",
- "minimum": 1
- },
- "setting_id": {
- "description": "Unique identifier for a Setting",
- "example": "default-site",
- "readOnly": true,
- "type": "string",
- "minLength": 2
- },
- "token": {
- "type": "string",
- "minLength": 10
- },
- "expand": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "sort": {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "object",
- "required": [
- "field",
- "dir"
- ],
- "additionalProperties": false,
- "properties": {
- "field": {
- "type": "string"
- },
- "dir": {
- "type": "string",
- "pattern": "^(asc|desc)$"
- }
- }
- }
- },
- "query": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- }
- ]
- },
- "criteria": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "object"
- }
- ]
- },
- "fields": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "omit": {
- "anyOf": [
- {
- "type": "null"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "type": "string"
- }
- }
- ]
- },
- "created_on": {
- "description": "Date and time of creation",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "modified_on": {
- "description": "Date and time of last update",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "user_id": {
- "description": "User ID",
- "example": 1234,
- "type": "integer",
- "minimum": 1
- },
- "certificate_id": {
- "description": "Certificate ID",
- "example": 1234,
- "anyOf": [
- {
- "type": "integer",
- "minimum": 0
- },
- {
- "type": "string",
- "pattern": "^new$"
- }
- ]
- },
- "access_list_id": {
- "description": "Access List ID",
- "example": 1234,
- "type": "integer",
- "minimum": 0
- },
- "name": {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "email": {
- "description": "Email Address",
- "example": "john@example.com",
- "format": "email",
- "type": "string",
- "minLength": 6,
- "maxLength": 100
- },
- "password": {
- "description": "Password",
- "type": "string",
- "minLength": 8,
- "maxLength": 255
- },
- "domain_name": {
- "description": "Domain Name",
- "example": "jc21.com",
- "type": "string",
- "pattern": "^(?:[^.*]+\\.?)+[^.]$"
- },
- "domain_names": {
- "description": "Domain Names separated by a comma",
- "example": "*.jc21.com,blog.jc21.com",
- "type": "array",
- "maxItems": 15,
- "uniqueItems": true,
- "items": {
- "type": "string",
- "pattern": "^(?:\\*\\.)?(?:[^.*]+\\.?)+[^.]$"
- }
- },
- "http_code": {
- "description": "Redirect HTTP Status Code",
- "example": 302,
- "type": "integer",
- "minimum": 300,
- "maximum": 308
- },
- "scheme": {
- "description": "RFC Protocol",
- "example": "HTTPS or $scheme",
- "type": "string",
- "minLength": 4
- },
- "enabled": {
- "description": "Is Enabled",
- "example": true,
- "type": "boolean"
- },
- "ssl_enabled": {
- "description": "Is SSL Enabled",
- "example": true,
- "type": "boolean"
- },
- "ssl_forced": {
- "description": "Is SSL Forced",
- "example": false,
- "type": "boolean"
- },
- "hsts_enabled": {
- "description": "Is HSTS Enabled",
- "example": false,
- "type": "boolean"
- },
- "hsts_subdomains": {
- "description": "Is HSTS applicable to all subdomains",
- "example": false,
- "type": "boolean"
- },
- "ssl_provider": {
- "type": "string",
- "pattern": "^(letsencrypt|other)$"
- },
- "http2_support": {
- "description": "HTTP2 Protocol Support",
- "example": false,
- "type": "boolean"
- },
- "block_exploits": {
- "description": "Should we block common exploits",
- "example": true,
- "type": "boolean"
- },
- "caching_enabled": {
- "description": "Should we cache assets",
- "example": true,
- "type": "boolean"
- }
- }
-}
diff --git a/backend/schema/endpoints/access-lists.json b/backend/schema/endpoints/access-lists.json
deleted file mode 100644
index 404e32376..000000000
--- a/backend/schema/endpoints/access-lists.json
+++ /dev/null
@@ -1,236 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/access-lists",
- "title": "Access Lists",
- "description": "Endpoints relating to Access Lists",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "name": {
- "type": "string",
- "description": "Name of the Access List"
- },
- "directive": {
- "type": "string",
- "enum": ["allow", "deny"]
- },
- "address": {
- "oneOf": [
- {
- "type": "string",
- "pattern": "^([0-9]{1,3}\\.){3}[0-9]{1,3}(/([0-9]|[1-2][0-9]|3[0-2]))?$"
- },
- {
- "type": "string",
- "pattern": "^s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:)))(%.+)?s*(/([0-9]|[1-9][0-9]|1[0-1][0-9]|12[0-8]))?$"
- },
- {
- "type": "string",
- "pattern": "^all$"
- }
- ]
- },
- "satisfy_any": {
- "type": "boolean"
- },
- "pass_auth": {
- "type": "boolean"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "name": {
- "$ref": "#/definitions/name"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Access Lists",
- "href": "/nginx/access-lists",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Access List",
- "href": "/nginx/access-list",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": ["name"],
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "satisfy_any": {
- "$ref": "#/definitions/satisfy_any"
- },
- "pass_auth": {
- "$ref": "#/definitions/pass_auth"
- },
- "items": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "username": {
- "type": "string",
- "minLength": 1
- },
- "password": {
- "type": "string",
- "minLength": 1
- }
- }
- }
- },
- "clients": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "address": {
- "$ref": "#/definitions/address"
- },
- "directive": {
- "$ref": "#/definitions/directive"
- }
- }
- }
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Access List",
- "href": "/nginx/access-list/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "satisfy_any": {
- "$ref": "#/definitions/satisfy_any"
- },
- "pass_auth": {
- "$ref": "#/definitions/pass_auth"
- },
- "items": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "username": {
- "type": "string",
- "minLength": 1
- },
- "password": {
- "type": "string",
- "minLength": 0
- }
- }
- }
- },
- "clients": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "address": {
- "$ref": "#/definitions/address"
- },
- "directive": {
- "$ref": "#/definitions/directive"
- }
- }
- }
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Access List",
- "href": "/nginx/access-list/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/certificates.json b/backend/schema/endpoints/certificates.json
deleted file mode 100644
index 955ca75c9..000000000
--- a/backend/schema/endpoints/certificates.json
+++ /dev/null
@@ -1,173 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/certificates",
- "title": "Certificates",
- "description": "Endpoints relating to Certificates",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "provider": {
- "$ref": "../definitions.json#/definitions/ssl_provider"
- },
- "nice_name": {
- "type": "string",
- "description": "Nice Name for the custom certificate"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "expires_on": {
- "description": "Date and time of expiration",
- "format": "date-time",
- "readOnly": true,
- "type": "string"
- },
- "meta": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "letsencrypt_email": {
- "type": "string",
- "format": "email"
- },
- "letsencrypt_agree": {
- "type": "boolean"
- },
- "dns_challenge": {
- "type": "boolean"
- },
- "dns_provider": {
- "type": "string"
- },
- "dns_provider_credentials": {
- "type": "string"
- },
- "propagation_seconds": {
- "anyOf": [
- {
- "type": "integer",
- "minimum": 0
- }
- ]
-
- }
- }
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "provider": {
- "$ref": "#/definitions/provider"
- },
- "nice_name": {
- "$ref": "#/definitions/nice_name"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "expires_on": {
- "$ref": "#/definitions/expires_on"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Certificates",
- "href": "/nginx/certificates",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Certificate",
- "href": "/nginx/certificates",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "provider"
- ],
- "properties": {
- "provider": {
- "$ref": "#/definitions/provider"
- },
- "nice_name": {
- "$ref": "#/definitions/nice_name"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Certificate",
- "href": "/nginx/certificates/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Test HTTP Challenge",
- "description": "Tests whether the HTTP challenge should work",
- "href": "/nginx/certificates/{definitions.identity.example}/test-http",
- "access": "private",
- "method": "GET",
- "rel": "info",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/dead-hosts.json b/backend/schema/endpoints/dead-hosts.json
deleted file mode 100644
index 0c73c3be1..000000000
--- a/backend/schema/endpoints/dead-hosts.json
+++ /dev/null
@@ -1,240 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/dead-hosts",
- "title": "404 Hosts",
- "description": "Endpoints relating to 404 Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of 404 Hosts",
- "href": "/nginx/dead-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new 404 Host",
- "href": "/nginx/dead-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing 404 Host",
- "href": "/nginx/dead-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/proxy-hosts.json b/backend/schema/endpoints/proxy-hosts.json
deleted file mode 100644
index 9a3fff2fc..000000000
--- a/backend/schema/endpoints/proxy-hosts.json
+++ /dev/null
@@ -1,387 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/proxy-hosts",
- "title": "Proxy Hosts",
- "description": "Endpoints relating to Proxy Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "forward_scheme": {
- "type": "string",
- "enum": ["http", "https"]
- },
- "forward_host": {
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "forward_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "../definitions.json#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "../definitions.json#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "description": "Allow Websocket Upgrade for all paths",
- "example": true,
- "type": "boolean"
- },
- "access_list_id": {
- "$ref": "../definitions.json#/definitions/access_list_id"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- },
- "locations": {
- "type": "array",
- "minItems": 0,
- "items": {
- "type": "object",
- "required": [
- "forward_scheme",
- "forward_host",
- "forward_port",
- "path"
- ],
- "additionalProperties": false,
- "properties": {
- "id": {
- "type": ["integer", "null"]
- },
- "path": {
- "type": "string",
- "minLength": 1
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "forward_path": {
- "type": "string"
- },
- "advanced_config": {
- "type": "string"
- }
- }
- }
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Proxy Hosts",
- "href": "/nginx/proxy-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Proxy Host",
- "href": "/nginx/proxy-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names",
- "forward_scheme",
- "forward_host",
- "forward_port"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_host": {
- "$ref": "#/definitions/forward_host"
- },
- "forward_port": {
- "$ref": "#/definitions/forward_port"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "caching_enabled": {
- "$ref": "#/definitions/caching_enabled"
- },
- "allow_websocket_upgrade": {
- "$ref": "#/definitions/allow_websocket_upgrade"
- },
- "access_list_id": {
- "$ref": "#/definitions/access_list_id"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- },
- "locations": {
- "$ref": "#/definitions/locations"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Proxy Host",
- "href": "/nginx/proxy-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/redirection-hosts.json b/backend/schema/endpoints/redirection-hosts.json
deleted file mode 100644
index 14a469985..000000000
--- a/backend/schema/endpoints/redirection-hosts.json
+++ /dev/null
@@ -1,305 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/redirection-hosts",
- "title": "Redirection Hosts",
- "description": "Endpoints relating to Redirection Hosts",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "../definitions.json#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "../definitions.json#/definitions/http_code"
- },
- "forward_scheme": {
- "$ref": "../definitions.json#/definitions/scheme"
- },
- "forward_domain_name": {
- "$ref": "../definitions.json#/definitions/domain_name"
- },
- "preserve_path": {
- "description": "Should the path be preserved",
- "example": true,
- "type": "boolean"
- },
- "certificate_id": {
- "$ref": "../definitions.json#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "../definitions.json#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "../definitions.json#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "../definitions.json#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "../definitions.json#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "../definitions.json#/definitions/block_exploits"
- },
- "advanced_config": {
- "type": "string"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_subdomains"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Redirection Hosts",
- "href": "/nginx/redirection-hosts",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Redirection Host",
- "href": "/nginx/redirection-hosts",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "domain_names",
- "forward_scheme",
- "forward_http_code",
- "forward_domain_name"
- ],
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "domain_names": {
- "$ref": "#/definitions/domain_names"
- },
- "forward_http_code": {
- "$ref": "#/definitions/forward_http_code"
- },
- "forward_scheme": {
- "$ref": "#/definitions/forward_scheme"
- },
- "forward_domain_name": {
- "$ref": "#/definitions/forward_domain_name"
- },
- "preserve_path": {
- "$ref": "#/definitions/preserve_path"
- },
- "certificate_id": {
- "$ref": "#/definitions/certificate_id"
- },
- "ssl_forced": {
- "$ref": "#/definitions/ssl_forced"
- },
- "hsts_enabled": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "hsts_subdomains": {
- "$ref": "#/definitions/hsts_enabled"
- },
- "http2_support": {
- "$ref": "#/definitions/http2_support"
- },
- "block_exploits": {
- "$ref": "#/definitions/block_exploits"
- },
- "advanced_config": {
- "$ref": "#/definitions/advanced_config"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Redirection Host",
- "href": "/nginx/redirection-hosts/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/settings.json b/backend/schema/endpoints/settings.json
deleted file mode 100644
index 29e2865ae..000000000
--- a/backend/schema/endpoints/settings.json
+++ /dev/null
@@ -1,99 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/settings",
- "title": "Settings",
- "description": "Endpoints relating to Settings",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/setting_id"
- },
- "name": {
- "description": "Name",
- "example": "Default Site",
- "type": "string",
- "minLength": 2,
- "maxLength": 100
- },
- "description": {
- "description": "Description",
- "example": "Default Site",
- "type": "string",
- "minLength": 2,
- "maxLength": 255
- },
- "value": {
- "description": "Value",
- "example": "404",
- "type": "string",
- "maxLength": 255
- },
- "meta": {
- "type": "object"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Settings",
- "href": "/settings",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Setting",
- "href": "/settings/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "value": {
- "$ref": "#/definitions/value"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- }
- ],
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "name": {
- "$ref": "#/definitions/description"
- },
- "description": {
- "$ref": "#/definitions/description"
- },
- "value": {
- "$ref": "#/definitions/value"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
-}
diff --git a/backend/schema/endpoints/streams.json b/backend/schema/endpoints/streams.json
deleted file mode 100644
index 159c8036e..000000000
--- a/backend/schema/endpoints/streams.json
+++ /dev/null
@@ -1,234 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/streams",
- "title": "Streams",
- "description": "Endpoints relating to Streams",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "incoming_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "forwarding_host": {
- "anyOf": [
- {
- "$ref": "../definitions.json#/definitions/domain_name"
- },
- {
- "type": "string",
- "format": "ipv4"
- },
- {
- "type": "string",
- "format": "ipv6"
- }
- ]
- },
- "forwarding_port": {
- "type": "integer",
- "minimum": 1,
- "maximum": 65535
- },
- "tcp_forwarding": {
- "type": "boolean"
- },
- "udp_forwarding": {
- "type": "boolean"
- },
- "enabled": {
- "$ref": "../definitions.json#/definitions/enabled"
- },
- "meta": {
- "type": "object"
- }
- },
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "enabled": {
- "$ref": "#/definitions/enabled"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Steams",
- "href": "/nginx/streams",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new Stream",
- "href": "/nginx/streams",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "incoming_port",
- "forwarding_host",
- "forwarding_port"
- ],
- "properties": {
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "incoming_port": {
- "$ref": "#/definitions/incoming_port"
- },
- "forwarding_host": {
- "$ref": "#/definitions/forwarding_host"
- },
- "forwarding_port": {
- "$ref": "#/definitions/forwarding_port"
- },
- "tcp_forwarding": {
- "$ref": "#/definitions/tcp_forwarding"
- },
- "udp_forwarding": {
- "$ref": "#/definitions/udp_forwarding"
- },
- "meta": {
- "$ref": "#/definitions/meta"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Enable",
- "description": "Enables a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}/enable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Disable",
- "description": "Disables a existing Stream",
- "href": "/nginx/streams/{definitions.identity.example}/disable",
- "access": "private",
- "method": "POST",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/tokens.json b/backend/schema/endpoints/tokens.json
deleted file mode 100644
index 920af63f4..000000000
--- a/backend/schema/endpoints/tokens.json
+++ /dev/null
@@ -1,100 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/tokens",
- "title": "Token",
- "description": "Tokens are required to authenticate against the API",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "identity": {
- "description": "Email Address or other 3rd party providers identifier",
- "example": "john@example.com",
- "type": "string"
- },
- "secret": {
- "description": "A password or key",
- "example": "correct horse battery staple",
- "type": "string"
- },
- "token": {
- "description": "JWT",
- "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk",
- "type": "string"
- },
- "expires": {
- "description": "Token expiry time",
- "format": "date-time",
- "type": "string"
- },
- "scope": {
- "description": "Scope of the Token, defaults to 'user'",
- "example": "user",
- "type": "string"
- }
- },
- "links": [
- {
- "title": "Create",
- "description": "Creates a new token.",
- "href": "/tokens",
- "access": "public",
- "method": "POST",
- "rel": "create",
- "schema": {
- "type": "object",
- "required": [
- "identity",
- "secret"
- ],
- "properties": {
- "identity": {
- "$ref": "#/definitions/identity"
- },
- "secret": {
- "$ref": "#/definitions/secret"
- },
- "scope": {
- "$ref": "#/definitions/scope"
- }
- }
- },
- "targetSchema": {
- "type": "object",
- "properties": {
- "token": {
- "$ref": "#/definitions/token"
- },
- "expires": {
- "$ref": "#/definitions/expires"
- }
- }
- }
- },
- {
- "title": "Refresh",
- "description": "Returns a new token.",
- "href": "/tokens",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {},
- "targetSchema": {
- "type": "object",
- "properties": {
- "token": {
- "$ref": "#/definitions/token"
- },
- "expires": {
- "$ref": "#/definitions/expires"
- },
- "scope": {
- "$ref": "#/definitions/scope"
- }
- }
- }
- }
- ]
-}
diff --git a/backend/schema/endpoints/users.json b/backend/schema/endpoints/users.json
deleted file mode 100644
index 42f44eac7..000000000
--- a/backend/schema/endpoints/users.json
+++ /dev/null
@@ -1,287 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "endpoints/users",
- "title": "Users",
- "description": "Endpoints relating to Users",
- "stability": "stable",
- "type": "object",
- "definitions": {
- "id": {
- "$ref": "../definitions.json#/definitions/id"
- },
- "created_on": {
- "$ref": "../definitions.json#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "../definitions.json#/definitions/modified_on"
- },
- "name": {
- "description": "Name",
- "example": "Jamie Curnow",
- "type": "string",
- "minLength": 2,
- "maxLength": 100
- },
- "nickname": {
- "description": "Nickname",
- "example": "Jamie",
- "type": "string",
- "minLength": 2,
- "maxLength": 50
- },
- "email": {
- "$ref": "../definitions.json#/definitions/email"
- },
- "avatar": {
- "description": "Avatar",
- "example": "http://somewhere.jpg",
- "type": "string",
- "minLength": 2,
- "maxLength": 150,
- "readOnly": true
- },
- "roles": {
- "description": "Roles",
- "example": [
- "admin"
- ],
- "type": "array"
- },
- "is_disabled": {
- "description": "Is Disabled",
- "example": false,
- "type": "boolean"
- }
- },
- "links": [
- {
- "title": "List",
- "description": "Returns a list of Users",
- "href": "/users",
- "access": "private",
- "method": "GET",
- "rel": "self",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "array",
- "items": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Create",
- "description": "Creates a new User",
- "href": "/users",
- "access": "private",
- "method": "POST",
- "rel": "create",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "required": [
- "name",
- "nickname",
- "email"
- ],
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- },
- "auth": {
- "type": "object",
- "description": "Auth Credentials",
- "example": {
- "type": "password",
- "secret": "bigredhorsebanana"
- }
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Update",
- "description": "Updates a existing User",
- "href": "/users/{definitions.identity.example}",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- }
- }
- },
- "targetSchema": {
- "properties": {
- "$ref": "#/properties"
- }
- }
- },
- {
- "title": "Delete",
- "description": "Deletes a existing User",
- "href": "/users/{definitions.identity.example}",
- "access": "private",
- "method": "DELETE",
- "rel": "delete",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Set Password",
- "description": "Sets a password for an existing User",
- "href": "/users/{definitions.identity.example}/auth",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "required": [
- "type",
- "secret"
- ],
- "properties": {
- "type": {
- "type": "string",
- "pattern": "^password$"
- },
- "current": {
- "type": "string",
- "minLength": 1,
- "maxLength": 64
- },
- "secret": {
- "type": "string",
- "minLength": 8,
- "maxLength": 64
- }
- }
- },
- "targetSchema": {
- "type": "boolean"
- }
- },
- {
- "title": "Set Permissions",
- "description": "Sets Permissions for a User",
- "href": "/users/{definitions.identity.example}/permissions",
- "access": "private",
- "method": "PUT",
- "rel": "update",
- "http_header": {
- "$ref": "../examples.json#/definitions/auth_header"
- },
- "schema": {
- "type": "object",
- "properties": {
- "visibility": {
- "type": "string",
- "pattern": "^(all|user)$"
- },
- "access_lists": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "dead_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "proxy_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "redirection_hosts": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "streams": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- },
- "certificates": {
- "type": "string",
- "pattern": "^(hidden|view|manage)$"
- }
- }
- },
- "targetSchema": {
- "type": "boolean"
- }
- }
- ],
- "properties": {
- "id": {
- "$ref": "#/definitions/id"
- },
- "created_on": {
- "$ref": "#/definitions/created_on"
- },
- "modified_on": {
- "$ref": "#/definitions/modified_on"
- },
- "name": {
- "$ref": "#/definitions/name"
- },
- "nickname": {
- "$ref": "#/definitions/nickname"
- },
- "email": {
- "$ref": "#/definitions/email"
- },
- "avatar": {
- "$ref": "#/definitions/avatar"
- },
- "roles": {
- "$ref": "#/definitions/roles"
- },
- "is_disabled": {
- "$ref": "#/definitions/is_disabled"
- }
- }
-}
diff --git a/backend/schema/examples.json b/backend/schema/examples.json
deleted file mode 100644
index 37bc6c4d3..000000000
--- a/backend/schema/examples.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "examples",
- "type": "object",
- "definitions": {
- "name": {
- "description": "Name",
- "example": "John Smith",
- "type": "string",
- "minLength": 1,
- "maxLength": 255
- },
- "auth_header": {
- "Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk",
- "X-API-Version": "next"
- },
- "token": {
- "type": "string",
- "description": "JWT",
- "example": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.O_frfYM8RzmRsUNigHtu0_jZ_utSejyr1axMGa8rlsk"
- }
- }
-}
diff --git a/backend/schema/index.json b/backend/schema/index.json
deleted file mode 100644
index 6e7d1c8af..000000000
--- a/backend/schema/index.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "$id": "root",
- "title": "Nginx Proxy Manager REST API",
- "description": "This is the Nginx Proxy Manager REST API",
- "version": "2.0.0",
- "links": [
- {
- "href": "http://npm.example.com/api",
- "rel": "self"
- }
- ],
- "properties": {
- "tokens": {
- "$ref": "endpoints/tokens.json"
- },
- "users": {
- "$ref": "endpoints/users.json"
- },
- "proxy-hosts": {
- "$ref": "endpoints/proxy-hosts.json"
- },
- "redirection-hosts": {
- "$ref": "endpoints/redirection-hosts.json"
- },
- "dead-hosts": {
- "$ref": "endpoints/dead-hosts.json"
- },
- "streams": {
- "$ref": "endpoints/streams.json"
- },
- "certificates": {
- "$ref": "endpoints/certificates.json"
- },
- "access-lists": {
- "$ref": "endpoints/access-lists.json"
- },
- "settings": {
- "$ref": "endpoints/settings.json"
- }
- }
-}
diff --git a/backend/scripts/lint.sh b/backend/scripts/lint.sh
new file mode 100755
index 000000000..44961b054
--- /dev/null
+++ b/backend/scripts/lint.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+BLUE='\E[1;34m'
+YELLOW='\E[1;33m'
+RESET='\E[0m'
+RESULT=0
+
+# go files: incomplete comment check
+INCOMPLETE_COMMENTS=$(find . -iname "*.go*" | grep -v " " | xargs grep --colour -H -n -E "^\s*\/\/\s*[A-Z]\w+ \.{3}" 2>/dev/null)
+if [[ -n "$INCOMPLETE_COMMENTS" ]]; then
+ echo -e "${BLUE}❯ ${YELLOW}WARN: Please fix incomplete exported comments:${RESET}"
+ echo -e "${RED}${INCOMPLETE_COMMENTS}${RESET}"
+ echo
+ # RESULT=1
+fi
+
+echo -e "${YELLOW}golangci-lint ...${RESET}"
+if ! golangci-lint run -E goimports ./...; then
+ exit 1
+fi
+
+exit "$RESULT"
diff --git a/backend/scripts/test.sh b/backend/scripts/test.sh
new file mode 100755
index 000000000..45ee9483e
--- /dev/null
+++ b/backend/scripts/test.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -eu
+
+DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+
+if ! command -v go-test-coverage &>/dev/null; then
+ go install github.com/vladopajic/go-test-coverage/v2@latest
+fi
+if ! command -v tparse &>/dev/null; then
+ go install github.com/mfridman/tparse@latest
+fi
+
+rm -f "$DIR/coverage.html"
+
+trap cleanup EXIT
+cleanup() {
+ rm -f "$DIR/coverage.out"
+}
+
+echo "Running go test suite ..."
+go test -json -cover ./... -coverprofile="$DIR/coverage.out" | tparse
+go tool cover -html="$DIR/coverage.out" -o "$DIR/coverage.html"
+go-test-coverage -c "$DIR/.testcoverage.yml"
diff --git a/backend/setup.js b/backend/setup.js
deleted file mode 100644
index 47fd1e7b0..000000000
--- a/backend/setup.js
+++ /dev/null
@@ -1,233 +0,0 @@
-const fs = require('fs');
-const NodeRSA = require('node-rsa');
-const config = require('config');
-const logger = require('./logger').setup;
-const certificateModel = require('./models/certificate');
-const userModel = require('./models/user');
-const userPermissionModel = require('./models/user_permission');
-const utils = require('./lib/utils');
-const authModel = require('./models/auth');
-const settingModel = require('./models/setting');
-const dns_plugins = require('./global/certbot-dns-plugins');
-const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
-
-/**
- * Creates a new JWT RSA Keypair if not alread set on the config
- *
- * @returns {Promise}
- */
-const setupJwt = () => {
- return new Promise((resolve, reject) => {
- // Now go and check if the jwt gpg keys have been created and if not, create them
- if (!config.has('jwt') || !config.has('jwt.key') || !config.has('jwt.pub')) {
- logger.info('Creating a new JWT key pair...');
-
- // jwt keys are not configured properly
- const filename = config.util.getEnv('NODE_CONFIG_DIR') + '/' + (config.util.getEnv('NODE_ENV') || 'default') + '.json';
- let config_data = {};
-
- try {
- config_data = require(filename);
- } catch (err) {
- // do nothing
- if (debug_mode) {
- logger.debug(filename + ' config file could not be required');
- }
- }
-
- // Now create the keys and save them in the config.
- let key = new NodeRSA({ b: 2048 });
- key.generateKeyPair();
-
- config_data.jwt = {
- key: key.exportKey('private').toString(),
- pub: key.exportKey('public').toString(),
- };
-
- // Write config
- fs.writeFile(filename, JSON.stringify(config_data, null, 2), (err) => {
- if (err) {
- logger.error('Could not write JWT key pair to config file: ' + filename);
- reject(err);
- } else {
- logger.info('Wrote JWT key pair to config file: ' + filename);
- delete require.cache[require.resolve('config')];
- resolve();
- }
- });
- } else {
- // JWT key pair exists
- if (debug_mode) {
- logger.debug('JWT Keypair already exists');
- }
-
- resolve();
- }
- });
-};
-
-/**
- * Creates a default admin users if one doesn't already exist in the database
- *
- * @returns {Promise}
- */
-const setupDefaultUser = () => {
- return userModel
- .query()
- .select(userModel.raw('COUNT(`id`) as `count`'))
- .where('is_deleted', 0)
- .first()
- .then((row) => {
- if (!row.count) {
- // Create a new user and set password
- logger.info('Creating a new user: admin@example.com with password: changeme');
-
- let data = {
- is_deleted: 0,
- email: 'admin@example.com',
- name: 'Administrator',
- nickname: 'Admin',
- avatar: '',
- roles: ['admin'],
- };
-
- return userModel
- .query()
- .insertAndFetch(data)
- .then((user) => {
- return authModel
- .query()
- .insert({
- user_id: user.id,
- type: 'password',
- secret: 'changeme',
- meta: {},
- })
- .then(() => {
- return userPermissionModel.query().insert({
- user_id: user.id,
- visibility: 'all',
- proxy_hosts: 'manage',
- redirection_hosts: 'manage',
- dead_hosts: 'manage',
- streams: 'manage',
- access_lists: 'manage',
- certificates: 'manage',
- });
- });
- })
- .then(() => {
- logger.info('Initial admin setup completed');
- });
- } else if (debug_mode) {
- logger.debug('Admin user setup not required');
- }
- });
-};
-
-/**
- * Creates default settings if they don't already exist in the database
- *
- * @returns {Promise}
- */
-const setupDefaultSettings = () => {
- return settingModel
- .query()
- .select(settingModel.raw('COUNT(`id`) as `count`'))
- .where({id: 'default-site'})
- .first()
- .then((row) => {
- if (!row.count) {
- settingModel
- .query()
- .insert({
- id: 'default-site',
- name: 'Default Site',
- description: 'What to show when Nginx is hit with an unknown Host',
- value: 'congratulations',
- meta: {},
- })
- .then(() => {
- logger.info('Default settings added');
- });
- }
- if (debug_mode) {
- logger.debug('Default setting setup not required');
- }
- });
-};
-
-/**
- * Installs all Certbot plugins which are required for an installed certificate
- *
- * @returns {Promise}
- */
-const setupCertbotPlugins = () => {
- return certificateModel
- .query()
- .where('is_deleted', 0)
- .andWhere('provider', 'letsencrypt')
- .then((certificates) => {
- if (certificates && certificates.length) {
- let plugins = [];
- let promises = [];
-
- certificates.map(function (certificate) {
- if (certificate.meta && certificate.meta.dns_challenge === true) {
- const dns_plugin = dns_plugins[certificate.meta.dns_provider];
- const packages_to_install = `${dns_plugin.package_name}${dns_plugin.version_requirement || ''} ${dns_plugin.dependencies}`;
-
- if (plugins.indexOf(packages_to_install) === -1) plugins.push(packages_to_install);
-
- // Make sure credentials file exists
- const credentials_loc = '/etc/letsencrypt/credentials/credentials-' + certificate.id;
- // Escape single quotes and backslashes
- const escapedCredentials = certificate.meta.dns_provider_credentials.replaceAll('\'', '\\\'').replaceAll('\\', '\\\\');
- const credentials_cmd = '[ -f \'' + credentials_loc + '\' ] || { mkdir -p /etc/letsencrypt/credentials 2> /dev/null; echo \'' + escapedCredentials + '\' > \'' + credentials_loc + '\' && chmod 600 \'' + credentials_loc + '\'; }';
- promises.push(utils.exec(credentials_cmd));
- }
- });
-
- if (plugins.length) {
- const install_cmd = 'pip install ' + plugins.join(' ');
- promises.push(utils.exec(install_cmd));
- }
-
- if (promises.length) {
- return Promise.all(promises)
- .then(() => {
- logger.info('Added Certbot plugins ' + plugins.join(', '));
- });
- }
- }
- });
-};
-
-
-/**
- * Starts a timer to call run the logrotation binary every two days
- * @returns {Promise}
- */
-const setupLogrotation = () => {
- const intervalTimeout = 1000 * 60 * 60 * 24 * 2; // 2 days
-
- const runLogrotate = async () => {
- try {
- await utils.exec('logrotate /etc/logrotate.d/nginx-proxy-manager');
- logger.info('Logrotate completed.');
- } catch (e) { logger.warn(e); }
- };
-
- logger.info('Logrotate Timer initialized');
- setInterval(runLogrotate, intervalTimeout);
- // And do this now as well
- return runLogrotate();
-};
-
-module.exports = function () {
- return setupJwt()
- .then(setupDefaultUser)
- .then(setupDefaultSettings)
- .then(setupCertbotPlugins)
- .then(setupLogrotation);
-};
diff --git a/backend/templates/_assets.conf b/backend/templates/_assets.conf
deleted file mode 100644
index dcb183c55..000000000
--- a/backend/templates/_assets.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if caching_enabled == 1 or caching_enabled == true -%}
- # Asset Caching
- include conf.d/include/assets.conf;
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_certificates.conf b/backend/templates/_certificates.conf
deleted file mode 100644
index 06ca7bb87..000000000
--- a/backend/templates/_certificates.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if certificate.provider == "letsencrypt" %}
- # Let's Encrypt SSL
- include conf.d/include/letsencrypt-acme-challenge.conf;
- include conf.d/include/ssl-ciphers.conf;
- ssl_certificate /etc/letsencrypt/live/npm-{{ certificate_id }}/fullchain.pem;
- ssl_certificate_key /etc/letsencrypt/live/npm-{{ certificate_id }}/privkey.pem;
-{% else %}
- # Custom SSL
- ssl_certificate /data/custom_ssl/npm-{{ certificate_id }}/fullchain.pem;
- ssl_certificate_key /data/custom_ssl/npm-{{ certificate_id }}/privkey.pem;
-{% endif %}
-{% endif %}
-
diff --git a/backend/templates/_exploits.conf b/backend/templates/_exploits.conf
deleted file mode 100644
index 002970d59..000000000
--- a/backend/templates/_exploits.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-{% if block_exploits == 1 or block_exploits == true %}
- # Block Exploits
- include conf.d/include/block-exploits.conf;
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_forced_ssl.conf b/backend/templates/_forced_ssl.conf
deleted file mode 100644
index 7fade20ca..000000000
--- a/backend/templates/_forced_ssl.conf
+++ /dev/null
@@ -1,6 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if ssl_forced == 1 or ssl_forced == true %}
- # Force SSL
- include conf.d/include/force-ssl.conf;
-{% endif %}
-{% endif %}
\ No newline at end of file
diff --git a/backend/templates/_header_comment.conf b/backend/templates/_header_comment.conf
deleted file mode 100644
index 8f996d34f..000000000
--- a/backend/templates/_header_comment.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-# ------------------------------------------------------------
-# {{ domain_names | join: ", " }}
-# ------------------------------------------------------------
\ No newline at end of file
diff --git a/backend/templates/_hsts.conf b/backend/templates/_hsts.conf
deleted file mode 100644
index 11aecf24c..000000000
--- a/backend/templates/_hsts.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-{% if certificate and certificate_id > 0 -%}
-{% if ssl_forced == 1 or ssl_forced == true %}
-{% if hsts_enabled == 1 or hsts_enabled == true %}
- # HSTS (ngx_http_headers_module is required) (63072000 seconds = 2 years)
- add_header Strict-Transport-Security "max-age=63072000;{% if hsts_subdomains == 1 or hsts_subdomains == true -%} includeSubDomains;{% endif %} preload" always;
-{% endif %}
-{% endif %}
-{% endif %}
diff --git a/backend/templates/_listen.conf b/backend/templates/_listen.conf
deleted file mode 100644
index 730f3a7c4..000000000
--- a/backend/templates/_listen.conf
+++ /dev/null
@@ -1,15 +0,0 @@
- listen 80;
-{% if ipv6 -%}
- listen [::]:80;
-{% else -%}
- #listen [::]:80;
-{% endif %}
-{% if certificate -%}
- listen 443 ssl{% if http2_support %} http2{% endif %};
-{% if ipv6 -%}
- listen [::]:443 ssl{% if http2_support %} http2{% endif %};
-{% else -%}
- #listen [::]:443;
-{% endif %}
-{% endif %}
- server_name {{ domain_names | join: " " }};
diff --git a/backend/templates/_location.conf b/backend/templates/_location.conf
deleted file mode 100644
index 5a7a6abeb..000000000
--- a/backend/templates/_location.conf
+++ /dev/null
@@ -1,45 +0,0 @@
- location {{ path }} {
- proxy_set_header Host $host;
- proxy_set_header X-Forwarded-Scheme $scheme;
- proxy_set_header X-Forwarded-Proto $scheme;
- proxy_set_header X-Forwarded-For $remote_addr;
- proxy_set_header X-Real-IP $remote_addr;
- proxy_pass {{ forward_scheme }}://{{ forward_host }}:{{ forward_port }}{{ forward_path }};
-
- {% if access_list_id > 0 %}
- {% if access_list.items.length > 0 %}
- # Authorization
- auth_basic "Authorization required";
- auth_basic_user_file /data/access/{{ access_list_id }};
-
- {{ access_list.passauth }}
- {% endif %}
-
- # Access Rules
- {% for client in access_list.clients %}
- {{- client.rule -}};
- {% endfor %}deny all;
-
- # Access checks must...
- {% if access_list.satisfy %}
- {{ access_list.satisfy }};
- {% endif %}
-
- {% endif %}
-
- {% include "_assets.conf" %}
- {% include "_exploits.conf" %}
-
- {% include "_forced_ssl.conf" %}
- {% include "_hsts.conf" %}
-
- {% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
- proxy_set_header Upgrade $http_upgrade;
- proxy_set_header Connection $http_connection;
- proxy_http_version 1.1;
- {% endif %}
-
-
- {{ advanced_config }}
- }
-
diff --git a/backend/templates/dead_host.conf b/backend/templates/dead_host.conf
deleted file mode 100644
index d94dff57a..000000000
--- a/backend/templates/dead_host.conf
+++ /dev/null
@@ -1,23 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
- access_log /data/logs/dead-host-{{ id }}_access.log standard;
- error_log /data/logs/dead-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{% if use_default_location %}
- location / {
-{% include "_hsts.conf" %}
- return 404;
- }
-{% endif %}
-
-}
-{% endif %}
diff --git a/backend/templates/default.conf b/backend/templates/default.conf
deleted file mode 100644
index ec68530ca..000000000
--- a/backend/templates/default.conf
+++ /dev/null
@@ -1,40 +0,0 @@
-# ------------------------------------------------------------
-# Default Site
-# ------------------------------------------------------------
-{% if value == "congratulations" %}
-# Skipping output, congratulations page configration is baked in.
-{%- else %}
-server {
- listen 80 default;
-{% if ipv6 -%}
- listen [::]:80 default;
-{% else -%}
- #listen [::]:80 default;
-{% endif %}
- server_name default-host.localhost;
- access_log /data/logs/default-host_access.log combined;
- error_log /data/logs/default-host_error.log warn;
-{% include "_exploits.conf" %}
-
- include conf.d/include/letsencrypt-acme-challenge.conf;
-
-{%- if value == "404" %}
- location / {
- return 404;
- }
-{% endif %}
-
-{%- if value == "redirect" %}
- location / {
- return 301 {{ meta.redirect }};
- }
-{%- endif %}
-
-{%- if value == "html" %}
- root /data/nginx/default_www;
- location / {
- try_files $uri /index.html;
- }
-{%- endif %}
-}
-{% endif %}
diff --git a/backend/templates/ip_ranges.conf b/backend/templates/ip_ranges.conf
deleted file mode 100644
index 8ede2bd99..000000000
--- a/backend/templates/ip_ranges.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-{% for range in ip_ranges %}
-set_real_ip_from {{ range }};
-{% endfor %}
\ No newline at end of file
diff --git a/backend/templates/letsencrypt-request.conf b/backend/templates/letsencrypt-request.conf
deleted file mode 100644
index 676c8a60f..000000000
--- a/backend/templates/letsencrypt-request.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-{% include "_header_comment.conf" %}
-
-server {
- listen 80;
-{% if ipv6 -%}
- listen [::]:80;
-{% endif %}
-
- server_name {{ domain_names | join: " " }};
-
- access_log /data/logs/letsencrypt-requests_access.log standard;
- error_log /data/logs/letsencrypt-requests_error.log warn;
-
- include conf.d/include/letsencrypt-acme-challenge.conf;
-
- location / {
- return 404;
- }
-}
diff --git a/backend/templates/proxy_host.conf b/backend/templates/proxy_host.conf
deleted file mode 100644
index ec30cca0d..000000000
--- a/backend/templates/proxy_host.conf
+++ /dev/null
@@ -1,70 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
- set $forward_scheme {{ forward_scheme }};
- set $server "{{ forward_host }}";
- set $port {{ forward_port }};
-
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_assets.conf" %}
-{% include "_exploits.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
-{% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
-proxy_set_header Upgrade $http_upgrade;
-proxy_set_header Connection $http_connection;
-proxy_http_version 1.1;
-{% endif %}
-
- access_log /data/logs/proxy-host-{{ id }}_access.log proxy;
- error_log /data/logs/proxy-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{{ locations }}
-
-{% if use_default_location %}
-
- location / {
-
- {% if access_list_id > 0 %}
- {% if access_list.items.length > 0 %}
- # Authorization
- auth_basic "Authorization required";
- auth_basic_user_file /data/access/{{ access_list_id }};
-
- {{ access_list.passauth }}
- {% endif %}
-
- # Access Rules
- {% for client in access_list.clients %}
- {{- client.rule -}};
- {% endfor %}deny all;
-
- # Access checks must...
- {% if access_list.satisfy %}
- {{ access_list.satisfy }};
- {% endif %}
-
- {% endif %}
-
-{% include "_hsts.conf" %}
-
- {% if allow_websocket_upgrade == 1 or allow_websocket_upgrade == true %}
- proxy_set_header Upgrade $http_upgrade;
- proxy_set_header Connection $http_connection;
- proxy_http_version 1.1;
- {% endif %}
-
- # Proxy!
- include conf.d/include/proxy.conf;
- }
-{% endif %}
-
- # Custom
- include /data/nginx/custom/server_proxy[.]conf;
-}
-{% endif %}
diff --git a/backend/templates/redirection_host.conf b/backend/templates/redirection_host.conf
deleted file mode 100644
index 339fe72ee..000000000
--- a/backend/templates/redirection_host.conf
+++ /dev/null
@@ -1,32 +0,0 @@
-{% include "_header_comment.conf" %}
-
-{% if enabled %}
-server {
-{% include "_listen.conf" %}
-{% include "_certificates.conf" %}
-{% include "_assets.conf" %}
-{% include "_exploits.conf" %}
-{% include "_hsts.conf" %}
-{% include "_forced_ssl.conf" %}
-
- access_log /data/logs/redirection-host-{{ id }}_access.log standard;
- error_log /data/logs/redirection-host-{{ id }}_error.log warn;
-
-{{ advanced_config }}
-
-{% if use_default_location %}
- location / {
-{% include "_hsts.conf" %}
-
- {% if preserve_path == 1 or preserve_path == true %}
- return {{ forward_http_code }} {{ forward_scheme }}://{{ forward_domain_name }}$request_uri;
- {% else %}
- return {{ forward_http_code }} {{ forward_scheme }}://{{ forward_domain_name }};
- {% endif %}
- }
-{% endif %}
-
- # Custom
- include /data/nginx/custom/server_redirect[.]conf;
-}
-{% endif %}
diff --git a/backend/templates/stream.conf b/backend/templates/stream.conf
deleted file mode 100644
index 76159a646..000000000
--- a/backend/templates/stream.conf
+++ /dev/null
@@ -1,37 +0,0 @@
-# ------------------------------------------------------------
-# {{ incoming_port }} TCP: {{ tcp_forwarding }} UDP: {{ udp_forwarding }}
-# ------------------------------------------------------------
-
-{% if enabled %}
-{% if tcp_forwarding == 1 or tcp_forwarding == true -%}
-server {
- listen {{ incoming_port }};
-{% if ipv6 -%}
- listen [::]:{{ incoming_port }};
-{% else -%}
- #listen [::]:{{ incoming_port }};
-{% endif %}
-
- proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
-
- # Custom
- include /data/nginx/custom/server_stream[.]conf;
- include /data/nginx/custom/server_stream_tcp[.]conf;
-}
-{% endif %}
-{% if udp_forwarding == 1 or udp_forwarding == true %}
-server {
- listen {{ incoming_port }} udp;
-{% if ipv6 -%}
- listen [::]:{{ incoming_port }} udp;
-{% else -%}
- #listen [::]:{{ incoming_port }} udp;
-{% endif %}
- proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
-
- # Custom
- include /data/nginx/custom/server_stream[.]conf;
- include /data/nginx/custom/server_stream_udp[.]conf;
-}
-{% endif %}
-{% endif %}
\ No newline at end of file
diff --git a/backend/yarn.lock b/backend/yarn.lock
deleted file mode 100644
index 968831827..000000000
--- a/backend/yarn.lock
+++ /dev/null
@@ -1,3754 +0,0 @@
-# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
-# yarn lockfile v1
-
-
-"@apidevtools/json-schema-ref-parser@8.0.0":
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-8.0.0.tgz#9eb749499b3f8d919e90bb141e4b6f67aee4692d"
- integrity sha512-n4YBtwQhdpLto1BaUCyAeflizmIbaloGShsPyRtFf5qdFJxfssj+GgLavczgKJFa3Bq+3St2CKcpRJdjtB4EBw==
- dependencies:
- "@jsdevtools/ono" "^7.1.0"
- call-me-maybe "^1.0.1"
- js-yaml "^3.13.1"
-
-"@babel/code-frame@^7.0.0":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a"
- integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==
- dependencies:
- "@babel/highlight" "^7.10.4"
-
-"@babel/helper-validator-identifier@^7.10.4":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2"
- integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==
-
-"@babel/highlight@^7.10.4":
- version "7.10.4"
- resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143"
- integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==
- dependencies:
- "@babel/helper-validator-identifier" "^7.10.4"
- chalk "^2.0.0"
- js-tokens "^4.0.0"
-
-"@jsdevtools/ono@^7.1.0":
- version "7.1.3"
- resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796"
- integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==
-
-"@sindresorhus/is@^0.14.0":
- version "0.14.0"
- resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea"
- integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==
-
-"@szmarczak/http-timer@^1.1.2":
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421"
- integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==
- dependencies:
- defer-to-connect "^1.0.1"
-
-"@types/color-name@^1.1.1":
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0"
- integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==
-
-abbrev@1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
- integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
-
-accepts@~1.3.5, accepts@~1.3.7:
- version "1.3.7"
- resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
- integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==
- dependencies:
- mime-types "~2.1.24"
- negotiator "0.6.2"
-
-acorn-jsx@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.2.0.tgz#4c66069173d6fdd68ed85239fc256226182b2ebe"
- integrity sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==
-
-acorn@^7.1.1:
- version "7.4.0"
- resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.0.tgz#e1ad486e6c54501634c6c397c5c121daa383607c"
- integrity sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w==
-
-ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.0, ajv@^6.12.6:
- version "6.12.6"
- resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
- integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
- dependencies:
- fast-deep-equal "^3.1.1"
- fast-json-stable-stringify "^2.0.0"
- json-schema-traverse "^0.4.1"
- uri-js "^4.2.2"
-
-ansi-align@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.0.tgz#b536b371cf687caaef236c18d3e21fe3797467cb"
- integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==
- dependencies:
- string-width "^3.0.0"
-
-ansi-escapes@^4.2.1:
- version "4.3.1"
- resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61"
- integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==
- dependencies:
- type-fest "^0.11.0"
-
-ansi-regex@^2.0.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
- integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8=
-
-ansi-regex@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
- integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
-
-ansi-regex@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
- integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
-
-ansi-regex@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
- integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
-
-ansi-styles@^3.2.0, ansi-styles@^3.2.1:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
- integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
- dependencies:
- color-convert "^1.9.0"
-
-ansi-styles@^4.0.0, ansi-styles@^4.1.0:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359"
- integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==
- dependencies:
- "@types/color-name" "^1.1.1"
- color-convert "^2.0.1"
-
-anymatch@~3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
- integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
- dependencies:
- normalize-path "^3.0.0"
- picomatch "^2.0.4"
-
-aproba@^1.0.3:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
- integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
-
-archiver-utils@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-2.1.0.tgz#e8a460e94b693c3e3da182a098ca6285ba9249e2"
- integrity sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==
- dependencies:
- glob "^7.1.4"
- graceful-fs "^4.2.0"
- lazystream "^1.0.0"
- lodash.defaults "^4.2.0"
- lodash.difference "^4.5.0"
- lodash.flatten "^4.4.0"
- lodash.isplainobject "^4.0.6"
- lodash.union "^4.6.0"
- normalize-path "^3.0.0"
- readable-stream "^2.0.0"
-
-archiver@^5.3.0:
- version "5.3.0"
- resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.0.tgz#dd3e097624481741df626267564f7dd8640a45ba"
- integrity sha512-iUw+oDwK0fgNpvveEsdQ0Ase6IIKztBJU2U0E9MzszMfmVVUyv1QJhS2ITW9ZCqx8dktAxVAjWWkKehuZE8OPg==
- dependencies:
- archiver-utils "^2.1.0"
- async "^3.2.0"
- buffer-crc32 "^0.2.1"
- readable-stream "^3.6.0"
- readdir-glob "^1.0.0"
- tar-stream "^2.2.0"
- zip-stream "^4.1.0"
-
-are-we-there-yet@~1.1.2:
- version "1.1.5"
- resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21"
- integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==
- dependencies:
- delegates "^1.0.0"
- readable-stream "^2.0.6"
-
-argparse@^1.0.7:
- version "1.0.10"
- resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
- integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
- dependencies:
- sprintf-js "~1.0.2"
-
-arr-diff@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
- integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=
-
-arr-flatten@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1"
- integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==
-
-arr-union@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
- integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=
-
-array-each@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f"
- integrity sha1-p5SvDAWrF1KEbudTofIRoFugxE8=
-
-array-flatten@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
- integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=
-
-array-slice@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4"
- integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w==
-
-array-unique@^0.3.2:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
- integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=
-
-asn1@^0.2.4:
- version "0.2.4"
- resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
- integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==
- dependencies:
- safer-buffer "~2.1.0"
-
-assign-symbols@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
- integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=
-
-astral-regex@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9"
- integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==
-
-async@^3.2.0:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/async/-/async-3.2.1.tgz#d3274ec66d107a47476a4c49136aacdb00665fc8"
- integrity sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==
-
-atob@^2.1.2:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
- integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==
-
-balanced-match@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
- integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
-
-base64-js@^1.3.1:
- version "1.5.1"
- resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
- integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
-
-base@^0.11.1:
- version "0.11.2"
- resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f"
- integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==
- dependencies:
- cache-base "^1.0.1"
- class-utils "^0.3.5"
- component-emitter "^1.2.1"
- define-property "^1.0.0"
- isobject "^3.0.1"
- mixin-deep "^1.2.0"
- pascalcase "^0.1.1"
-
-batchflow@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/batchflow/-/batchflow-0.4.0.tgz#7d419df79b6b7587b06f9ea34f96ccef6f74e5b5"
- integrity sha1-fUGd95trdYewb56jT5bM72905bU=
-
-bcrypt@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.0.tgz#051407c7cd5ffbfb773d541ca3760ea0754e37e2"
- integrity sha512-jB0yCBl4W/kVHM2whjfyqnxTmOHkCX4kHEa5nYKSoGeYe8YrjTYTc87/6bwt1g8cmV0QrbhKriETg9jWtcREhg==
- dependencies:
- node-addon-api "^3.0.0"
- node-pre-gyp "0.15.0"
-
-bignumber.js@9.0.0:
- version "9.0.0"
- resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.0.tgz#805880f84a329b5eac6e7cb6f8274b6d82bdf075"
- integrity sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==
-
-binary-extensions@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9"
- integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==
-
-bl@^4.0.3:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
- integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==
- dependencies:
- buffer "^5.5.0"
- inherits "^2.0.4"
- readable-stream "^3.4.0"
-
-blueimp-md5@^2.16.0:
- version "2.17.0"
- resolved "https://registry.yarnpkg.com/blueimp-md5/-/blueimp-md5-2.17.0.tgz#f4fcac088b115f7b4045f19f5da59e9d01b1bb96"
- integrity sha512-x5PKJHY5rHQYaADj6NwPUR2QRCUVSggPzrUKkeENpj871o9l9IefJbO2jkT5UvYykeOK9dx0VmkIo6dZ+vThYw==
-
-body-parser@1.19.0, body-parser@^1.19.0:
- version "1.19.0"
- resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a"
- integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==
- dependencies:
- bytes "3.1.0"
- content-type "~1.0.4"
- debug "2.6.9"
- depd "~1.1.2"
- http-errors "1.7.2"
- iconv-lite "0.4.24"
- on-finished "~2.3.0"
- qs "6.7.0"
- raw-body "2.4.0"
- type-is "~1.6.17"
-
-boxen@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64"
- integrity sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ==
- dependencies:
- ansi-align "^3.0.0"
- camelcase "^5.3.1"
- chalk "^3.0.0"
- cli-boxes "^2.2.0"
- string-width "^4.1.0"
- term-size "^2.1.0"
- type-fest "^0.8.1"
- widest-line "^3.1.0"
-
-brace-expansion@^1.1.7:
- version "1.1.11"
- resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
- integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
- dependencies:
- balanced-match "^1.0.0"
- concat-map "0.0.1"
-
-braces@^2.3.1:
- version "2.3.2"
- resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729"
- integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==
- dependencies:
- arr-flatten "^1.1.0"
- array-unique "^0.3.2"
- extend-shallow "^2.0.1"
- fill-range "^4.0.0"
- isobject "^3.0.1"
- repeat-element "^1.1.2"
- snapdragon "^0.8.1"
- snapdragon-node "^2.0.1"
- split-string "^3.0.2"
- to-regex "^3.0.1"
-
-braces@~3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
- integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
- dependencies:
- fill-range "^7.0.1"
-
-buffer-crc32@^0.2.1, buffer-crc32@^0.2.13:
- version "0.2.13"
- resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
- integrity sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=
-
-buffer-equal-constant-time@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
- integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=
-
-buffer@^5.5.0:
- version "5.7.1"
- resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
- integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==
- dependencies:
- base64-js "^1.3.1"
- ieee754 "^1.1.13"
-
-busboy@^0.3.1:
- version "0.3.1"
- resolved "https://registry.yarnpkg.com/busboy/-/busboy-0.3.1.tgz#170899274c5bf38aae27d5c62b71268cd585fd1b"
- integrity sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==
- dependencies:
- dicer "0.3.0"
-
-bytes@3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
- integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=
-
-bytes@3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6"
- integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
-
-cache-base@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2"
- integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==
- dependencies:
- collection-visit "^1.0.0"
- component-emitter "^1.2.1"
- get-value "^2.0.6"
- has-value "^1.0.0"
- isobject "^3.0.1"
- set-value "^2.0.0"
- to-object-path "^0.3.0"
- union-value "^1.0.0"
- unset-value "^1.0.0"
-
-cacheable-request@^6.0.0:
- version "6.1.0"
- resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912"
- integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==
- dependencies:
- clone-response "^1.0.2"
- get-stream "^5.1.0"
- http-cache-semantics "^4.0.0"
- keyv "^3.0.0"
- lowercase-keys "^2.0.0"
- normalize-url "^4.1.0"
- responselike "^1.0.2"
-
-call-me-maybe@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b"
- integrity sha1-JtII6onje1y95gJQoV8DHBak1ms=
-
-callsites@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
- integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
-
-camelcase@^5.0.0, camelcase@^5.3.1:
- version "5.3.1"
- resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
- integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
-
-chalk@^2.0.0, chalk@^2.1.0, chalk@^2.3.2:
- version "2.4.2"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
- integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
- dependencies:
- ansi-styles "^3.2.1"
- escape-string-regexp "^1.0.5"
- supports-color "^5.3.0"
-
-chalk@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4"
- integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==
- dependencies:
- ansi-styles "^4.1.0"
- supports-color "^7.1.0"
-
-chalk@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a"
- integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==
- dependencies:
- ansi-styles "^4.1.0"
- supports-color "^7.1.0"
-
-chardet@^0.7.0:
- version "0.7.0"
- resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
- integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
-
-chokidar@^3.2.2:
- version "3.4.1"
- resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.1.tgz#e905bdecf10eaa0a0b1db0c664481cc4cbc22ba1"
- integrity sha512-TQTJyr2stihpC4Sya9hs2Xh+O2wf+igjL36Y75xx2WdHuiICcn/XJza46Jwt0eT5hVpQOzo3FpY3cj3RVYLX0g==
- dependencies:
- anymatch "~3.1.1"
- braces "~3.0.2"
- glob-parent "~5.1.0"
- is-binary-path "~2.1.0"
- is-glob "~4.0.1"
- normalize-path "~3.0.0"
- readdirp "~3.4.0"
- optionalDependencies:
- fsevents "~2.1.2"
-
-chownr@^1.1.4:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
- integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
-
-ci-info@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
- integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
-
-class-utils@^0.3.5:
- version "0.3.6"
- resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463"
- integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==
- dependencies:
- arr-union "^3.1.0"
- define-property "^0.2.5"
- isobject "^3.0.0"
- static-extend "^0.1.1"
-
-cli-boxes@^2.2.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
- integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
-
-cli-cursor@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307"
- integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==
- dependencies:
- restore-cursor "^3.1.0"
-
-cli-width@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6"
- integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==
-
-cliui@^6.0.0:
- version "6.0.0"
- resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
- integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
- dependencies:
- string-width "^4.2.0"
- strip-ansi "^6.0.0"
- wrap-ansi "^6.2.0"
-
-clone-response@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b"
- integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=
- dependencies:
- mimic-response "^1.0.0"
-
-code-point-at@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
- integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
-
-collection-visit@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0"
- integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=
- dependencies:
- map-visit "^1.0.0"
- object-visit "^1.0.0"
-
-color-convert@^1.9.0:
- version "1.9.3"
- resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
- integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
- dependencies:
- color-name "1.1.3"
-
-color-convert@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
- integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
- dependencies:
- color-name "~1.1.4"
-
-color-name@1.1.3:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
- integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
-
-color-name@~1.1.4:
- version "1.1.4"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
- integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
-
-colorette@1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.1.0.tgz#1f943e5a357fac10b4e0f5aaef3b14cdc1af6ec7"
- integrity sha512-6S062WDQUXi6hOfkO/sBPVwE5ASXY4G2+b4atvhJfSsuUUhIaUKlkjLe9692Ipyt5/a+IPF5aVTu3V5gvXq5cg==
-
-commander@^4.1.1:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
- integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
-
-component-emitter@^1.2.1:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
- integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
-
-compress-commons@^4.1.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.1.tgz#df2a09a7ed17447642bad10a85cc9a19e5c42a7d"
- integrity sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==
- dependencies:
- buffer-crc32 "^0.2.13"
- crc32-stream "^4.0.2"
- normalize-path "^3.0.0"
- readable-stream "^3.6.0"
-
-compressible@~2.0.16:
- version "2.0.18"
- resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
- integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==
- dependencies:
- mime-db ">= 1.43.0 < 2"
-
-compression@^1.7.4:
- version "1.7.4"
- resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f"
- integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==
- dependencies:
- accepts "~1.3.5"
- bytes "3.0.0"
- compressible "~2.0.16"
- debug "2.6.9"
- on-headers "~1.0.2"
- safe-buffer "5.1.2"
- vary "~1.1.2"
-
-concat-map@0.0.1:
- version "0.0.1"
- resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
- integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
-
-config@^3.3.1:
- version "3.3.1"
- resolved "https://registry.yarnpkg.com/config/-/config-3.3.1.tgz#b6a70e2908a43b98ed20be7e367edf0cc8ed5a19"
- integrity sha512-+2/KaaaAzdwUBE3jgZON11L1ggLLhpf2FsGrfqYFHZW22ySGv/HqYIXrBwKKvn+XZh1UBUjHwAcrfsSkSygT+Q==
- dependencies:
- json5 "^2.1.1"
-
-configstore@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
- integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
- dependencies:
- dot-prop "^5.2.0"
- graceful-fs "^4.1.2"
- make-dir "^3.0.0"
- unique-string "^2.0.0"
- write-file-atomic "^3.0.0"
- xdg-basedir "^4.0.0"
-
-console-control-strings@^1.0.0, console-control-strings@~1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
- integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=
-
-content-disposition@0.5.3:
- version "0.5.3"
- resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd"
- integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==
- dependencies:
- safe-buffer "5.1.2"
-
-content-type@~1.0.4:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
- integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
-
-cookie-signature@1.0.6:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
- integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
-
-cookie@0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba"
- integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==
-
-copy-descriptor@^0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
- integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=
-
-core-util-is@~1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
- integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
-
-crc-32@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.0.tgz#cb2db6e29b88508e32d9dd0ec1693e7b41a18208"
- integrity sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==
- dependencies:
- exit-on-epipe "~1.0.1"
- printj "~1.1.0"
-
-crc32-stream@^4.0.2:
- version "4.0.2"
- resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-4.0.2.tgz#c922ad22b38395abe9d3870f02fa8134ed709007"
- integrity sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==
- dependencies:
- crc-32 "^1.2.0"
- readable-stream "^3.4.0"
-
-cross-spawn@^6.0.5:
- version "6.0.5"
- resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
- integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
- dependencies:
- nice-try "^1.0.4"
- path-key "^2.0.1"
- semver "^5.5.0"
- shebang-command "^1.2.0"
- which "^1.2.9"
-
-crypto-random-string@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
- integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
-
-db-errors@^0.2.3:
- version "0.2.3"
- resolved "https://registry.yarnpkg.com/db-errors/-/db-errors-0.2.3.tgz#a6a38952e00b20e790f2695a6446b3c65497ffa2"
- integrity sha512-OOgqgDuCavHXjYSJoV2yGhv6SeG8nk42aoCSoyXLZUH7VwFG27rxbavU1z+VrZbZjphw5UkDQwUlD21MwZpUng==
-
-debug@2.6.9, debug@^2.2.0, debug@^2.3.3:
- version "2.6.9"
- resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
- integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
- dependencies:
- ms "2.0.0"
-
-debug@4.1.1, debug@^4.0.1:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
- integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==
- dependencies:
- ms "^2.1.1"
-
-debug@^3.2.6:
- version "3.2.6"
- resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
- integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
- dependencies:
- ms "^2.1.1"
-
-decamelize@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
- integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
-
-decode-uri-component@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545"
- integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=
-
-decompress-response@^3.3.0:
- version "3.3.0"
- resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3"
- integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=
- dependencies:
- mimic-response "^1.0.0"
-
-deep-extend@^0.6.0:
- version "0.6.0"
- resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
- integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
-
-deep-is@~0.1.3:
- version "0.1.3"
- resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
- integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=
-
-defer-to-connect@^1.0.1:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591"
- integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==
-
-define-property@^0.2.5:
- version "0.2.5"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116"
- integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=
- dependencies:
- is-descriptor "^0.1.0"
-
-define-property@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6"
- integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY=
- dependencies:
- is-descriptor "^1.0.0"
-
-define-property@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d"
- integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==
- dependencies:
- is-descriptor "^1.0.2"
- isobject "^3.0.1"
-
-delegates@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
- integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=
-
-depd@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
- integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
-
-destroy@~1.0.4:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
- integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=
-
-detect-file@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7"
- integrity sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc=
-
-detect-libc@^1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
- integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
-
-dicer@0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872"
- integrity sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==
- dependencies:
- streamsearch "0.1.2"
-
-doctrine@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
- integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
- dependencies:
- esutils "^2.0.2"
-
-dot-prop@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
- integrity sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==
- dependencies:
- is-obj "^2.0.0"
-
-duplexer3@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
- integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=
-
-ecdsa-sig-formatter@1.0.11:
- version "1.0.11"
- resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
- integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
- dependencies:
- safe-buffer "^5.0.1"
-
-ee-first@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
- integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
-
-email-validator@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/email-validator/-/email-validator-2.0.4.tgz#b8dfaa5d0dae28f1b03c95881d904d4e40bfe7ed"
- integrity sha512-gYCwo7kh5S3IDyZPLZf6hSS0MnZT8QmJFqYvbqlDZSbwdZlY6QZWxJ4i/6UhITOJ4XzyI647Bm2MXKCLqnJ4nQ==
-
-emoji-regex@^7.0.1:
- version "7.0.3"
- resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
- integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
-
-emoji-regex@^8.0.0:
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37"
- integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==
-
-encodeurl@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
- integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=
-
-end-of-stream@^1.1.0, end-of-stream@^1.4.1:
- version "1.4.4"
- resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
- integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
- dependencies:
- once "^1.4.0"
-
-error-ex@^1.3.1:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
- integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
- dependencies:
- is-arrayish "^0.2.1"
-
-escape-goat@^2.0.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
- integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
-
-escape-html@~1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
- integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
-
-escape-string-regexp@^1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
- integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
-
-eslint-plugin-align-assignments@^1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/eslint-plugin-align-assignments/-/eslint-plugin-align-assignments-1.1.2.tgz#83e1a8a826d4adf29e82b52d0bb39c88b301b576"
- integrity sha512-I1ZJgk9EjHfGVU9M2Ex8UkVkkjLL5Y9BS6VNnQHq79eHj2H4/Cgxf36lQSUTLgm2ntB03A2NtF+zg9fyi5vChg==
-
-eslint-scope@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.0.tgz#d0f971dfe59c69e0cada684b23d49dbf82600ce5"
- integrity sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w==
- dependencies:
- esrecurse "^4.1.0"
- estraverse "^4.1.1"
-
-eslint-utils@^1.4.3:
- version "1.4.3"
- resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f"
- integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==
- dependencies:
- eslint-visitor-keys "^1.1.0"
-
-eslint-visitor-keys@^1.1.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e"
- integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==
-
-eslint@^6.8.0:
- version "6.8.0"
- resolved "https://registry.yarnpkg.com/eslint/-/eslint-6.8.0.tgz#62262d6729739f9275723824302fb227c8c93ffb"
- integrity sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==
- dependencies:
- "@babel/code-frame" "^7.0.0"
- ajv "^6.10.0"
- chalk "^2.1.0"
- cross-spawn "^6.0.5"
- debug "^4.0.1"
- doctrine "^3.0.0"
- eslint-scope "^5.0.0"
- eslint-utils "^1.4.3"
- eslint-visitor-keys "^1.1.0"
- espree "^6.1.2"
- esquery "^1.0.1"
- esutils "^2.0.2"
- file-entry-cache "^5.0.1"
- functional-red-black-tree "^1.0.1"
- glob-parent "^5.0.0"
- globals "^12.1.0"
- ignore "^4.0.6"
- import-fresh "^3.0.0"
- imurmurhash "^0.1.4"
- inquirer "^7.0.0"
- is-glob "^4.0.0"
- js-yaml "^3.13.1"
- json-stable-stringify-without-jsonify "^1.0.1"
- levn "^0.3.0"
- lodash "^4.17.14"
- minimatch "^3.0.4"
- mkdirp "^0.5.1"
- natural-compare "^1.4.0"
- optionator "^0.8.3"
- progress "^2.0.0"
- regexpp "^2.0.1"
- semver "^6.1.2"
- strip-ansi "^5.2.0"
- strip-json-comments "^3.0.1"
- table "^5.2.3"
- text-table "^0.2.0"
- v8-compile-cache "^2.0.3"
-
-esm@^3.2.25:
- version "3.2.25"
- resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10"
- integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==
-
-espree@^6.1.2:
- version "6.2.1"
- resolved "https://registry.yarnpkg.com/espree/-/espree-6.2.1.tgz#77fc72e1fd744a2052c20f38a5b575832e82734a"
- integrity sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==
- dependencies:
- acorn "^7.1.1"
- acorn-jsx "^5.2.0"
- eslint-visitor-keys "^1.1.0"
-
-esprima@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
- integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
-
-esquery@^1.0.1:
- version "1.3.1"
- resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57"
- integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==
- dependencies:
- estraverse "^5.1.0"
-
-esrecurse@^4.1.0:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf"
- integrity sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==
- dependencies:
- estraverse "^4.1.0"
-
-estraverse@^4.1.0, estraverse@^4.1.1:
- version "4.3.0"
- resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
- integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
-
-estraverse@^5.1.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880"
- integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==
-
-esutils@^2.0.2:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
- integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
-
-etag@~1.8.1:
- version "1.8.1"
- resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
- integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
-
-exit-on-epipe@~1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz#0bdd92e87d5285d267daa8171d0eb06159689692"
- integrity sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==
-
-expand-brackets@^2.1.4:
- version "2.1.4"
- resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622"
- integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI=
- dependencies:
- debug "^2.3.3"
- define-property "^0.2.5"
- extend-shallow "^2.0.1"
- posix-character-classes "^0.1.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-expand-tilde@^2.0.0, expand-tilde@^2.0.2:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
- integrity sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=
- dependencies:
- homedir-polyfill "^1.0.1"
-
-express-fileupload@^1.1.9:
- version "1.1.9"
- resolved "https://registry.yarnpkg.com/express-fileupload/-/express-fileupload-1.1.9.tgz#e798e9318394ed5083e56217ad6cda576da465d2"
- integrity sha512-f2w0aoe7lj3NeD8a4MXmYQsqir3Z66I08l9AKq04QbFUAjeZNmPwTlR5Lx2NGwSu/PslsAjGC38MWzo5tTjoBg==
- dependencies:
- busboy "^0.3.1"
-
-express@^4.17.1:
- version "4.17.1"
- resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134"
- integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
- dependencies:
- accepts "~1.3.7"
- array-flatten "1.1.1"
- body-parser "1.19.0"
- content-disposition "0.5.3"
- content-type "~1.0.4"
- cookie "0.4.0"
- cookie-signature "1.0.6"
- debug "2.6.9"
- depd "~1.1.2"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- etag "~1.8.1"
- finalhandler "~1.1.2"
- fresh "0.5.2"
- merge-descriptors "1.0.1"
- methods "~1.1.2"
- on-finished "~2.3.0"
- parseurl "~1.3.3"
- path-to-regexp "0.1.7"
- proxy-addr "~2.0.5"
- qs "6.7.0"
- range-parser "~1.2.1"
- safe-buffer "5.1.2"
- send "0.17.1"
- serve-static "1.14.1"
- setprototypeof "1.1.1"
- statuses "~1.5.0"
- type-is "~1.6.18"
- utils-merge "1.0.1"
- vary "~1.1.2"
-
-extend-shallow@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f"
- integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=
- dependencies:
- is-extendable "^0.1.0"
-
-extend-shallow@^3.0.0, extend-shallow@^3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
- integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=
- dependencies:
- assign-symbols "^1.0.0"
- is-extendable "^1.0.1"
-
-extend@^3.0.0:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
- integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
-
-external-editor@^3.0.3:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495"
- integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==
- dependencies:
- chardet "^0.7.0"
- iconv-lite "^0.4.24"
- tmp "^0.0.33"
-
-extglob@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543"
- integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==
- dependencies:
- array-unique "^0.3.2"
- define-property "^1.0.0"
- expand-brackets "^2.1.4"
- extend-shallow "^2.0.1"
- fragment-cache "^0.2.1"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-fast-deep-equal@^3.1.1:
- version "3.1.3"
- resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
- integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
-
-fast-json-stable-stringify@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
- integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
-
-fast-levenshtein@~2.0.6:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
- integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
-
-figures@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962"
- integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=
- dependencies:
- escape-string-regexp "^1.0.5"
-
-figures@^3.0.0:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af"
- integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==
- dependencies:
- escape-string-regexp "^1.0.5"
-
-file-entry-cache@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c"
- integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==
- dependencies:
- flat-cache "^2.0.1"
-
-fill-range@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
- integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=
- dependencies:
- extend-shallow "^2.0.1"
- is-number "^3.0.0"
- repeat-string "^1.6.1"
- to-regex-range "^2.1.0"
-
-fill-range@^7.0.1:
- version "7.0.1"
- resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
- integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
- dependencies:
- to-regex-range "^5.0.1"
-
-finalhandler@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d"
- integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==
- dependencies:
- debug "2.6.9"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- on-finished "~2.3.0"
- parseurl "~1.3.3"
- statuses "~1.5.0"
- unpipe "~1.0.0"
-
-find-up@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
- integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c=
- dependencies:
- locate-path "^2.0.0"
-
-find-up@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
- integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
- dependencies:
- locate-path "^5.0.0"
- path-exists "^4.0.0"
-
-findup-sync@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1"
- integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg==
- dependencies:
- detect-file "^1.0.0"
- is-glob "^4.0.0"
- micromatch "^3.0.4"
- resolve-dir "^1.0.1"
-
-fined@^1.0.1:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b"
- integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng==
- dependencies:
- expand-tilde "^2.0.2"
- is-plain-object "^2.0.3"
- object.defaults "^1.1.0"
- object.pick "^1.2.0"
- parse-filepath "^1.0.1"
-
-flagged-respawn@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41"
- integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q==
-
-flat-cache@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0"
- integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==
- dependencies:
- flatted "^2.0.0"
- rimraf "2.6.3"
- write "1.0.3"
-
-flatted@^2.0.0:
- version "2.0.2"
- resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138"
- integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==
-
-for-in@^1.0.1, for-in@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
- integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=
-
-for-own@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b"
- integrity sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs=
- dependencies:
- for-in "^1.0.1"
-
-forwarded@~0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84"
- integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=
-
-fragment-cache@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19"
- integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=
- dependencies:
- map-cache "^0.2.2"
-
-fresh@0.5.2:
- version "0.5.2"
- resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
- integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=
-
-fs-constants@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
- integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
-
-fs-minipass@^1.2.7:
- version "1.2.7"
- resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7"
- integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==
- dependencies:
- minipass "^2.6.0"
-
-fs.realpath@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
- integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
-
-fsevents@~2.1.2:
- version "2.1.3"
- resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e"
- integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==
-
-functional-red-black-tree@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
- integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
-
-gauge@~2.7.3:
- version "2.7.4"
- resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
- integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=
- dependencies:
- aproba "^1.0.3"
- console-control-strings "^1.0.0"
- has-unicode "^2.0.0"
- object-assign "^4.1.0"
- signal-exit "^3.0.0"
- string-width "^1.0.1"
- strip-ansi "^3.0.1"
- wide-align "^1.1.0"
-
-get-caller-file@^2.0.1:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
- integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
-
-get-stream@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
- integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
- dependencies:
- pump "^3.0.0"
-
-get-stream@^5.1.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.1.0.tgz#01203cdc92597f9b909067c3e656cc1f4d3c4dc9"
- integrity sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==
- dependencies:
- pump "^3.0.0"
-
-get-value@^2.0.3, get-value@^2.0.6:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28"
- integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=
-
-getopts@2.2.5:
- version "2.2.5"
- resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.2.5.tgz#67a0fe471cacb9c687d817cab6450b96dde8313b"
- integrity sha512-9jb7AW5p3in+IiJWhQiZmmwkpLaR/ccTWdWQCtZM66HJcHHLegowh4q4tSD7gouUyeNvFWRavfK9GXosQHDpFA==
-
-glob-parent@^5.0.0, glob-parent@~5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
- integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
- dependencies:
- is-glob "^4.0.1"
-
-glob@^7.1.3:
- version "7.1.6"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
- integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-glob@^7.1.4:
- version "7.1.7"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90"
- integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-global-dirs@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201"
- integrity sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A==
- dependencies:
- ini "^1.3.5"
-
-global-modules@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea"
- integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==
- dependencies:
- global-prefix "^1.0.1"
- is-windows "^1.0.1"
- resolve-dir "^1.0.0"
-
-global-prefix@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe"
- integrity sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=
- dependencies:
- expand-tilde "^2.0.2"
- homedir-polyfill "^1.0.1"
- ini "^1.3.4"
- is-windows "^1.0.1"
- which "^1.2.14"
-
-globals@^12.1.0:
- version "12.4.0"
- resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8"
- integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==
- dependencies:
- type-fest "^0.8.1"
-
-got@^9.6.0:
- version "9.6.0"
- resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85"
- integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==
- dependencies:
- "@sindresorhus/is" "^0.14.0"
- "@szmarczak/http-timer" "^1.1.2"
- cacheable-request "^6.0.0"
- decompress-response "^3.3.0"
- duplexer3 "^0.1.4"
- get-stream "^4.1.0"
- lowercase-keys "^1.0.1"
- mimic-response "^1.0.1"
- p-cancelable "^1.0.0"
- to-readable-stream "^1.0.0"
- url-parse-lax "^3.0.0"
-
-graceful-fs@^4.1.15, graceful-fs@^4.1.2:
- version "4.2.4"
- resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb"
- integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==
-
-graceful-fs@^4.2.0:
- version "4.2.8"
- resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a"
- integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==
-
-gravatar@^1.8.0:
- version "1.8.1"
- resolved "https://registry.yarnpkg.com/gravatar/-/gravatar-1.8.1.tgz#743bbdf3185c3433172e00e0e6ff5f6b30c58997"
- integrity sha512-18frnfVp4kRYkM/eQW32Mfwlsh/KMbwd3S6nkescBZHioobflFEFHsvM71qZAkUSLNifyi2uoI+TuGxJAnQIOA==
- dependencies:
- blueimp-md5 "^2.16.0"
- email-validator "^2.0.4"
- querystring "0.2.0"
- yargs "^15.4.1"
-
-has-flag@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
- integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
-
-has-flag@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
- integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
-
-has-unicode@^2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
- integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=
-
-has-value@^0.3.1:
- version "0.3.1"
- resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
- integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=
- dependencies:
- get-value "^2.0.3"
- has-values "^0.1.4"
- isobject "^2.0.0"
-
-has-value@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177"
- integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=
- dependencies:
- get-value "^2.0.6"
- has-values "^1.0.0"
- isobject "^3.0.0"
-
-has-values@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771"
- integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E=
-
-has-values@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f"
- integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=
- dependencies:
- is-number "^3.0.0"
- kind-of "^4.0.0"
-
-has-yarn@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77"
- integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==
-
-homedir-polyfill@^1.0.1:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8"
- integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==
- dependencies:
- parse-passwd "^1.0.0"
-
-http-cache-semantics@^4.0.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390"
- integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==
-
-http-errors@1.7.2:
- version "1.7.2"
- resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f"
- integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==
- dependencies:
- depd "~1.1.2"
- inherits "2.0.3"
- setprototypeof "1.1.1"
- statuses ">= 1.5.0 < 2"
- toidentifier "1.0.0"
-
-http-errors@~1.7.2:
- version "1.7.3"
- resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06"
- integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==
- dependencies:
- depd "~1.1.2"
- inherits "2.0.4"
- setprototypeof "1.1.1"
- statuses ">= 1.5.0 < 2"
- toidentifier "1.0.0"
-
-iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4:
- version "0.4.24"
- resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
- integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
- dependencies:
- safer-buffer ">= 2.1.2 < 3"
-
-ieee754@^1.1.13:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
- integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
-
-ignore-by-default@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09"
- integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk=
-
-ignore-walk@^3.0.1:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
- integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
- dependencies:
- minimatch "^3.0.4"
-
-ignore@^4.0.6:
- version "4.0.6"
- resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc"
- integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
-
-import-fresh@^3.0.0:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66"
- integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==
- dependencies:
- parent-module "^1.0.0"
- resolve-from "^4.0.0"
-
-import-lazy@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43"
- integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=
-
-imurmurhash@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
- integrity sha1-khi5srkoojixPcT7a21XbyMUU+o=
-
-inflight@^1.0.4:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
- integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
- dependencies:
- once "^1.3.0"
- wrappy "1"
-
-inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
- integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
-
-inherits@2.0.3:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
- integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
-
-ini@^1.3.4, ini@^1.3.5, ini@~1.3.0:
- version "1.3.8"
- resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
- integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
-
-inquirer@^7.0.0:
- version "7.3.3"
- resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003"
- integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==
- dependencies:
- ansi-escapes "^4.2.1"
- chalk "^4.1.0"
- cli-cursor "^3.1.0"
- cli-width "^3.0.0"
- external-editor "^3.0.3"
- figures "^3.0.0"
- lodash "^4.17.19"
- mute-stream "0.0.8"
- run-async "^2.4.0"
- rxjs "^6.6.0"
- string-width "^4.1.0"
- strip-ansi "^6.0.0"
- through "^2.3.6"
-
-interpret@^2.0.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
- integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
-
-ipaddr.js@1.9.1:
- version "1.9.1"
- resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
- integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
-
-is-absolute@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576"
- integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==
- dependencies:
- is-relative "^1.0.0"
- is-windows "^1.0.1"
-
-is-accessor-descriptor@^0.1.6:
- version "0.1.6"
- resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6"
- integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=
- dependencies:
- kind-of "^3.0.2"
-
-is-accessor-descriptor@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656"
- integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==
- dependencies:
- kind-of "^6.0.0"
-
-is-arrayish@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
- integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=
-
-is-binary-path@~2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
- integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
- dependencies:
- binary-extensions "^2.0.0"
-
-is-buffer@^1.1.5:
- version "1.1.6"
- resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
- integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
-
-is-ci@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c"
- integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==
- dependencies:
- ci-info "^2.0.0"
-
-is-data-descriptor@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
- integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=
- dependencies:
- kind-of "^3.0.2"
-
-is-data-descriptor@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7"
- integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==
- dependencies:
- kind-of "^6.0.0"
-
-is-descriptor@^0.1.0:
- version "0.1.6"
- resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca"
- integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==
- dependencies:
- is-accessor-descriptor "^0.1.6"
- is-data-descriptor "^0.1.4"
- kind-of "^5.0.0"
-
-is-descriptor@^1.0.0, is-descriptor@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec"
- integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==
- dependencies:
- is-accessor-descriptor "^1.0.0"
- is-data-descriptor "^1.0.0"
- kind-of "^6.0.2"
-
-is-extendable@^0.1.0, is-extendable@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
- integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=
-
-is-extendable@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
- integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==
- dependencies:
- is-plain-object "^2.0.4"
-
-is-extglob@^2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
- integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
-
-is-fullwidth-code-point@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb"
- integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs=
- dependencies:
- number-is-nan "^1.0.0"
-
-is-fullwidth-code-point@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
- integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
-
-is-fullwidth-code-point@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d"
- integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==
-
-is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
- integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
- dependencies:
- is-extglob "^2.1.1"
-
-is-installed-globally@^0.3.1:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141"
- integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g==
- dependencies:
- global-dirs "^2.0.1"
- is-path-inside "^3.0.1"
-
-is-npm@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d"
- integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig==
-
-is-number@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
- integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=
- dependencies:
- kind-of "^3.0.2"
-
-is-number@^7.0.0:
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
- integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
-
-is-obj@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982"
- integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==
-
-is-path-inside@^3.0.1:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
- integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
-
-is-plain-object@^2.0.3, is-plain-object@^2.0.4:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
- integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
- dependencies:
- isobject "^3.0.1"
-
-is-relative@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d"
- integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==
- dependencies:
- is-unc-path "^1.0.0"
-
-is-stream@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
- integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
-
-is-typedarray@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
- integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
-
-is-unc-path@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d"
- integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==
- dependencies:
- unc-path-regex "^0.1.2"
-
-is-windows@^1.0.1, is-windows@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
- integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==
-
-is-yarn-global@^0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232"
- integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==
-
-isarray@1.0.0, isarray@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
- integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
-
-isexe@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
- integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
-
-isobject@^2.0.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89"
- integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=
- dependencies:
- isarray "1.0.0"
-
-isobject@^3.0.0, isobject@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
- integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8=
-
-js-tokens@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
- integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
-
-js-yaml@^3.13.1:
- version "3.14.0"
- resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
- integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==
- dependencies:
- argparse "^1.0.7"
- esprima "^4.0.0"
-
-json-buffer@3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
- integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
-
-json-parse-better-errors@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
- integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==
-
-json-schema-ref-parser@^8.0.0:
- version "8.0.0"
- resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-8.0.0.tgz#7c758fac2cf822c05e837abd0a13f8fa2c15ffd4"
- integrity sha512-2P4icmNkZLrBr6oa5gSZaDSol/oaBHYkoP/8dsw63E54NnHGRhhiFuy9yFoxPuSm+uHKmeGxAAWMDF16SCHhcQ==
- dependencies:
- "@apidevtools/json-schema-ref-parser" "8.0.0"
-
-json-schema-traverse@^0.4.1:
- version "0.4.1"
- resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
- integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
-
-json-stable-stringify-without-jsonify@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
- integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
-
-json5@^2.1.1:
- version "2.1.3"
- resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.3.tgz#c9b0f7fa9233bfe5807fe66fcf3a5617ed597d43"
- integrity sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA==
- dependencies:
- minimist "^1.2.5"
-
-jsonwebtoken@^8.5.1:
- version "8.5.1"
- resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d"
- integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==
- dependencies:
- jws "^3.2.2"
- lodash.includes "^4.3.0"
- lodash.isboolean "^3.0.3"
- lodash.isinteger "^4.0.4"
- lodash.isnumber "^3.0.3"
- lodash.isplainobject "^4.0.6"
- lodash.isstring "^4.0.1"
- lodash.once "^4.0.0"
- ms "^2.1.1"
- semver "^5.6.0"
-
-jwa@^1.4.1:
- version "1.4.1"
- resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
- integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==
- dependencies:
- buffer-equal-constant-time "1.0.1"
- ecdsa-sig-formatter "1.0.11"
- safe-buffer "^5.0.1"
-
-jws@^3.2.2:
- version "3.2.2"
- resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
- integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==
- dependencies:
- jwa "^1.4.1"
- safe-buffer "^5.0.1"
-
-keyv@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
- integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==
- dependencies:
- json-buffer "3.0.0"
-
-kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0:
- version "3.2.2"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
- integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=
- dependencies:
- is-buffer "^1.1.5"
-
-kind-of@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57"
- integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc=
- dependencies:
- is-buffer "^1.1.5"
-
-kind-of@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d"
- integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==
-
-kind-of@^6.0.0, kind-of@^6.0.2:
- version "6.0.3"
- resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
- integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
-
-knex@^0.20.13:
- version "0.20.15"
- resolved "https://registry.yarnpkg.com/knex/-/knex-0.20.15.tgz#b7e9e1efd9cf35d214440d9439ed21153574679d"
- integrity sha512-WHmvgfQfxA5v8pyb9zbskxCS1L1WmYgUbwBhHojlkmdouUOazvroUWlCr6KIKMQ8anXZh1NXOOtIUMnxENZG5Q==
- dependencies:
- colorette "1.1.0"
- commander "^4.1.1"
- debug "4.1.1"
- esm "^3.2.25"
- getopts "2.2.5"
- inherits "~2.0.4"
- interpret "^2.0.0"
- liftoff "3.1.0"
- lodash "^4.17.15"
- mkdirp "^0.5.1"
- pg-connection-string "2.1.0"
- tarn "^2.0.0"
- tildify "2.0.0"
- uuid "^7.0.1"
- v8flags "^3.1.3"
-
-latest-version@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
- integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
- dependencies:
- package-json "^6.3.0"
-
-lazystream@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4"
- integrity sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=
- dependencies:
- readable-stream "^2.0.5"
-
-levn@^0.3.0, levn@~0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee"
- integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=
- dependencies:
- prelude-ls "~1.1.2"
- type-check "~0.3.2"
-
-liftoff@3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/liftoff/-/liftoff-3.1.0.tgz#c9ba6081f908670607ee79062d700df062c52ed3"
- integrity sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog==
- dependencies:
- extend "^3.0.0"
- findup-sync "^3.0.0"
- fined "^1.0.1"
- flagged-respawn "^1.0.0"
- is-plain-object "^2.0.4"
- object.map "^1.0.0"
- rechoir "^0.6.2"
- resolve "^1.1.7"
-
-liquidjs@^9.11.10:
- version "9.15.0"
- resolved "https://registry.yarnpkg.com/liquidjs/-/liquidjs-9.15.0.tgz#03e8c13aeda89801a346c614b0802f320458d0ac"
- integrity sha512-wRPNfMx6X3GGEDqTlBpw7VMo8ylKkzLYTcd7eeaDeYnZyR5BqUgF9tZy3FdPCHV2N/BassGKmlmlpJiRXGFOqg==
-
-load-json-file@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b"
- integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs=
- dependencies:
- graceful-fs "^4.1.2"
- parse-json "^4.0.0"
- pify "^3.0.0"
- strip-bom "^3.0.0"
-
-locate-path@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
- integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=
- dependencies:
- p-locate "^2.0.0"
- path-exists "^3.0.0"
-
-locate-path@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
- integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
- dependencies:
- p-locate "^4.1.0"
-
-lodash.defaults@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
- integrity sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=
-
-lodash.difference@^4.5.0:
- version "4.5.0"
- resolved "https://registry.yarnpkg.com/lodash.difference/-/lodash.difference-4.5.0.tgz#9ccb4e505d486b91651345772885a2df27fd017c"
- integrity sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=
-
-lodash.flatten@^4.4.0:
- version "4.4.0"
- resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
- integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=
-
-lodash.includes@^4.3.0:
- version "4.3.0"
- resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f"
- integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=
-
-lodash.isboolean@^3.0.3:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6"
- integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=
-
-lodash.isinteger@^4.0.4:
- version "4.0.4"
- resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343"
- integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=
-
-lodash.isnumber@^3.0.3:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc"
- integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=
-
-lodash.isplainobject@^4.0.6:
- version "4.0.6"
- resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
- integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=
-
-lodash.isstring@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
- integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=
-
-lodash.once@^4.0.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac"
- integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=
-
-lodash.union@^4.6.0:
- version "4.6.0"
- resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88"
- integrity sha1-SLtQiECfFvGCFmZkHETdGqrjzYg=
-
-lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21:
- version "4.17.21"
- resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
- integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
-
-lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
- integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==
-
-lowercase-keys@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
- integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
-
-make-dir@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f"
- integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==
- dependencies:
- semver "^6.0.0"
-
-make-iterator@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6"
- integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw==
- dependencies:
- kind-of "^6.0.2"
-
-map-cache@^0.2.0, map-cache@^0.2.2:
- version "0.2.2"
- resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
- integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=
-
-map-visit@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f"
- integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=
- dependencies:
- object-visit "^1.0.0"
-
-media-typer@0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
- integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
-
-merge-descriptors@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
- integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=
-
-methods@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
- integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
-
-micromatch@^3.0.4:
- version "3.1.10"
- resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23"
- integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==
- dependencies:
- arr-diff "^4.0.0"
- array-unique "^0.3.2"
- braces "^2.3.1"
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- extglob "^2.0.4"
- fragment-cache "^0.2.1"
- kind-of "^6.0.2"
- nanomatch "^1.2.9"
- object.pick "^1.3.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.2"
-
-mime-db@1.44.0, "mime-db@>= 1.43.0 < 2":
- version "1.44.0"
- resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92"
- integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==
-
-mime-types@~2.1.24:
- version "2.1.27"
- resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f"
- integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==
- dependencies:
- mime-db "1.44.0"
-
-mime@1.6.0:
- version "1.6.0"
- resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
- integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
-
-mimic-fn@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
- integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==
-
-mimic-response@^1.0.0, mimic-response@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
- integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
-
-minimatch@^3.0.4:
- version "3.0.4"
- resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
- integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
- dependencies:
- brace-expansion "^1.1.7"
-
-minimist@^1.2.0, minimist@^1.2.5:
- version "1.2.5"
- resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
- integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
-
-minipass@^2.6.0, minipass@^2.9.0:
- version "2.9.0"
- resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
- integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
- dependencies:
- safe-buffer "^5.1.2"
- yallist "^3.0.0"
-
-minizlib@^1.3.3:
- version "1.3.3"
- resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
- integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
- dependencies:
- minipass "^2.9.0"
-
-mixin-deep@^1.2.0:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566"
- integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==
- dependencies:
- for-in "^1.0.2"
- is-extendable "^1.0.1"
-
-mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.5:
- version "0.5.5"
- resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
- integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
- dependencies:
- minimist "^1.2.5"
-
-moment@^2.24.0:
- version "2.27.0"
- resolved "https://registry.yarnpkg.com/moment/-/moment-2.27.0.tgz#8bff4e3e26a236220dfe3e36de756b6ebaa0105d"
- integrity sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ==
-
-ms@2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
- integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
-
-ms@2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
- integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
-
-ms@^2.1.1:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
- integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
-
-mute-stream@0.0.8:
- version "0.0.8"
- resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
- integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
-
-mysql@^2.18.1:
- version "2.18.1"
- resolved "https://registry.yarnpkg.com/mysql/-/mysql-2.18.1.tgz#2254143855c5a8c73825e4522baf2ea021766717"
- integrity sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==
- dependencies:
- bignumber.js "9.0.0"
- readable-stream "2.3.7"
- safe-buffer "5.1.2"
- sqlstring "2.3.1"
-
-nan@^2.12.1:
- version "2.14.1"
- resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01"
- integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw==
-
-nanomatch@^1.2.9:
- version "1.2.13"
- resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
- integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==
- dependencies:
- arr-diff "^4.0.0"
- array-unique "^0.3.2"
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- fragment-cache "^0.2.1"
- is-windows "^1.0.2"
- kind-of "^6.0.2"
- object.pick "^1.3.0"
- regex-not "^1.0.0"
- snapdragon "^0.8.1"
- to-regex "^3.0.1"
-
-natural-compare@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
- integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
-
-needle@^2.2.1, needle@^2.5.0:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/needle/-/needle-2.5.0.tgz#e6fc4b3cc6c25caed7554bd613a5cf0bac8c31c0"
- integrity sha512-o/qITSDR0JCyCKEQ1/1bnUXMmznxabbwi/Y4WwJElf+evwJNFNwIDMCCt5IigFVxgeGBJESLohGtIS9gEzo1fA==
- dependencies:
- debug "^3.2.6"
- iconv-lite "^0.4.4"
- sax "^1.2.4"
-
-negotiator@0.6.2:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
- integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==
-
-nice-try@^1.0.4:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
- integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
-
-node-addon-api@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.0.0.tgz#812446a1001a54f71663bed188314bba07e09247"
- integrity sha512-sSHCgWfJ+Lui/u+0msF3oyCgvdkhxDbkCS6Q8uiJquzOimkJBvX6hl5aSSA7DR1XbMpdM8r7phjcF63sF4rkKg==
-
-node-pre-gyp@0.15.0:
- version "0.15.0"
- resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.15.0.tgz#c2fc383276b74c7ffa842925241553e8b40f1087"
- integrity sha512-7QcZa8/fpaU/BKenjcaeFF9hLz2+7S9AqyXFhlH/rilsQ/hPZKK32RtR5EQHJElgu+q5RfbJ34KriI79UWaorA==
- dependencies:
- detect-libc "^1.0.2"
- mkdirp "^0.5.3"
- needle "^2.5.0"
- nopt "^4.0.1"
- npm-packlist "^1.1.6"
- npmlog "^4.0.2"
- rc "^1.2.7"
- rimraf "^2.6.1"
- semver "^5.3.0"
- tar "^4.4.2"
-
-node-pre-gyp@^0.11.0:
- version "0.11.0"
- resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz#db1f33215272f692cd38f03238e3e9b47c5dd054"
- integrity sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==
- dependencies:
- detect-libc "^1.0.2"
- mkdirp "^0.5.1"
- needle "^2.2.1"
- nopt "^4.0.1"
- npm-packlist "^1.1.6"
- npmlog "^4.0.2"
- rc "^1.2.7"
- rimraf "^2.6.1"
- semver "^5.3.0"
- tar "^4"
-
-node-rsa@^1.0.8:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/node-rsa/-/node-rsa-1.1.1.tgz#efd9ad382097782f506153398496f79e4464434d"
- integrity sha512-Jd4cvbJMryN21r5HgxQOpMEqv+ooke/korixNNK3mGqfGJmy0M77WDDzo/05969+OkMy3XW1UuZsSmW9KQm7Fw==
- dependencies:
- asn1 "^0.2.4"
-
-nodemon@^2.0.2:
- version "2.0.4"
- resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.4.tgz#55b09319eb488d6394aa9818148c0c2d1c04c416"
- integrity sha512-Ltced+hIfTmaS28Zjv1BM552oQ3dbwPqI4+zI0SLgq+wpJhSyqgYude/aZa/3i31VCQWMfXJVxvu86abcam3uQ==
- dependencies:
- chokidar "^3.2.2"
- debug "^3.2.6"
- ignore-by-default "^1.0.1"
- minimatch "^3.0.4"
- pstree.remy "^1.1.7"
- semver "^5.7.1"
- supports-color "^5.5.0"
- touch "^3.1.0"
- undefsafe "^2.0.2"
- update-notifier "^4.0.0"
-
-nopt@^4.0.1:
- version "4.0.3"
- resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48"
- integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==
- dependencies:
- abbrev "1"
- osenv "^0.1.4"
-
-nopt@~1.0.10:
- version "1.0.10"
- resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee"
- integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=
- dependencies:
- abbrev "1"
-
-normalize-path@^3.0.0, normalize-path@~3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
- integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
-
-normalize-url@^4.1.0:
- version "4.5.1"
- resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a"
- integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==
-
-npm-bundled@^1.0.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.1.tgz#1edd570865a94cdb1bc8220775e29466c9fb234b"
- integrity sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==
- dependencies:
- npm-normalize-package-bin "^1.0.1"
-
-npm-normalize-package-bin@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2"
- integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==
-
-npm-packlist@^1.1.6:
- version "1.4.8"
- resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.8.tgz#56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e"
- integrity sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==
- dependencies:
- ignore-walk "^3.0.1"
- npm-bundled "^1.0.1"
- npm-normalize-package-bin "^1.0.1"
-
-npmlog@^4.0.2:
- version "4.1.2"
- resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
- integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==
- dependencies:
- are-we-there-yet "~1.1.2"
- console-control-strings "~1.1.0"
- gauge "~2.7.3"
- set-blocking "~2.0.0"
-
-number-is-nan@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
- integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=
-
-object-assign@^4.1.0:
- version "4.1.1"
- resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
- integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
-
-object-copy@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c"
- integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw=
- dependencies:
- copy-descriptor "^0.1.0"
- define-property "^0.2.5"
- kind-of "^3.0.3"
-
-object-visit@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb"
- integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=
- dependencies:
- isobject "^3.0.0"
-
-object.defaults@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf"
- integrity sha1-On+GgzS0B96gbaFtiNXNKeQ1/s8=
- dependencies:
- array-each "^1.0.1"
- array-slice "^1.0.0"
- for-own "^1.0.0"
- isobject "^3.0.0"
-
-object.map@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37"
- integrity sha1-z4Plncj8wK1fQlDh94s7gb2AHTc=
- dependencies:
- for-own "^1.0.0"
- make-iterator "^1.0.0"
-
-object.pick@^1.2.0, object.pick@^1.3.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
- integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=
- dependencies:
- isobject "^3.0.1"
-
-objection@^2.2.16:
- version "2.2.16"
- resolved "https://registry.yarnpkg.com/objection/-/objection-2.2.16.tgz#552ec6d625a7f80d6e204fc63732cbd3fc56f31c"
- integrity sha512-sq8erZdxW5ruPUK6tVvwDxyO16U49XAn/BmOm2zaNhNA2phOPCe2/7+R70nDEF1SFrgJOrwDu/PtoxybuJxnjQ==
- dependencies:
- ajv "^6.12.6"
- db-errors "^0.2.3"
-
-on-finished@~2.3.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947"
- integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=
- dependencies:
- ee-first "1.1.1"
-
-on-headers@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f"
- integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==
-
-once@^1.3.0, once@^1.3.1, once@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
- integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
- dependencies:
- wrappy "1"
-
-onetime@^5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.1.tgz#5c8016847b0d67fcedb7eef254751cfcdc7e9418"
- integrity sha512-ZpZpjcJeugQfWsfyQlshVoowIIQ1qBGSVll4rfDq6JJVO//fesjoX808hXWfBjY+ROZgpKDI5TRSRBSoJiZ8eg==
- dependencies:
- mimic-fn "^2.1.0"
-
-optionator@^0.8.3:
- version "0.8.3"
- resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
- integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==
- dependencies:
- deep-is "~0.1.3"
- fast-levenshtein "~2.0.6"
- levn "~0.3.0"
- prelude-ls "~1.1.2"
- type-check "~0.3.2"
- word-wrap "~1.2.3"
-
-os-homedir@^1.0.0:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
- integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M=
-
-os-tmpdir@^1.0.0, os-tmpdir@~1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
- integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
-
-osenv@^0.1.4:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
- integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==
- dependencies:
- os-homedir "^1.0.0"
- os-tmpdir "^1.0.0"
-
-p-cancelable@^1.0.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc"
- integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==
-
-p-limit@^1.1.0:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8"
- integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==
- dependencies:
- p-try "^1.0.0"
-
-p-limit@^2.2.0:
- version "2.3.0"
- resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
- integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
- dependencies:
- p-try "^2.0.0"
-
-p-locate@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43"
- integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=
- dependencies:
- p-limit "^1.1.0"
-
-p-locate@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
- integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
- dependencies:
- p-limit "^2.2.0"
-
-p-try@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3"
- integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=
-
-p-try@^2.0.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
- integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
-
-package-json@^6.3.0:
- version "6.5.0"
- resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0"
- integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==
- dependencies:
- got "^9.6.0"
- registry-auth-token "^4.0.0"
- registry-url "^5.0.0"
- semver "^6.2.0"
-
-parent-module@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
- integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
- dependencies:
- callsites "^3.0.0"
-
-parse-filepath@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891"
- integrity sha1-pjISf1Oq89FYdvWHLz/6x2PWyJE=
- dependencies:
- is-absolute "^1.0.0"
- map-cache "^0.2.0"
- path-root "^0.1.1"
-
-parse-json@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0"
- integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=
- dependencies:
- error-ex "^1.3.1"
- json-parse-better-errors "^1.0.1"
-
-parse-passwd@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6"
- integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=
-
-parseurl@~1.3.3:
- version "1.3.3"
- resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
- integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
-
-pascalcase@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14"
- integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=
-
-path-exists@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
- integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
-
-path-exists@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
- integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
-
-path-is-absolute@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
- integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
-
-path-key@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
- integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
-
-path-parse@^1.0.6:
- version "1.0.7"
- resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735"
- integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
-
-path-root-regex@^0.1.0:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d"
- integrity sha1-v8zcjfWxLcUsi0PsONGNcsBLqW0=
-
-path-root@^0.1.1:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7"
- integrity sha1-mkpoFMrBwM1zNgqV8yCDyOpHRbc=
- dependencies:
- path-root-regex "^0.1.0"
-
-path-to-regexp@0.1.7:
- version "0.1.7"
- resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
- integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=
-
-path@^0.12.7:
- version "0.12.7"
- resolved "https://registry.yarnpkg.com/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f"
- integrity sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=
- dependencies:
- process "^0.11.1"
- util "^0.10.3"
-
-pg-connection-string@2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.1.0.tgz#e07258f280476540b24818ebb5dca29e101ca502"
- integrity sha512-bhlV7Eq09JrRIvo1eKngpwuqKtJnNhZdpdOlvrPrA4dxqXPjxSrbNrfnIDmTpwMyRszrcV4kU5ZA4mMsQUrjdg==
-
-picomatch@^2.0.4, picomatch@^2.2.1:
- version "2.2.2"
- resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
- integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
-
-pify@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
- integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=
-
-pkg-conf@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-2.1.0.tgz#2126514ca6f2abfebd168596df18ba57867f0058"
- integrity sha1-ISZRTKbyq/69FoWW3xi6V4Z/AFg=
- dependencies:
- find-up "^2.0.0"
- load-json-file "^4.0.0"
-
-posix-character-classes@^0.1.0:
- version "0.1.1"
- resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"
- integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=
-
-prelude-ls@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
- integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=
-
-prepend-http@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897"
- integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=
-
-prettier@^2.0.4:
- version "2.0.5"
- resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.5.tgz#d6d56282455243f2f92cc1716692c08aa31522d4"
- integrity sha512-7PtVymN48hGcO4fGjybyBSIWDsLU4H4XlvOHfq91pz9kkGlonzwTfYkaIEwiRg/dAJF9YlbsduBAgtYLi+8cFg==
-
-printj@~1.1.0:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.2.tgz#d90deb2975a8b9f600fb3a1c94e3f4c53c78a222"
- integrity sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==
-
-process-nextick-args@~2.0.0:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
- integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
-
-process@^0.11.1:
- version "0.11.10"
- resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
- integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
-
-progress@^2.0.0:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
- integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
-
-proxy-addr@~2.0.5:
- version "2.0.6"
- resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf"
- integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==
- dependencies:
- forwarded "~0.1.2"
- ipaddr.js "1.9.1"
-
-pstree.remy@^1.1.7:
- version "1.1.8"
- resolved "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.8.tgz#c242224f4a67c21f686839bbdb4ac282b8373d3a"
- integrity sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==
-
-pump@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
- integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
- dependencies:
- end-of-stream "^1.1.0"
- once "^1.3.1"
-
-punycode@^2.1.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
- integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
-
-pupa@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.0.1.tgz#dbdc9ff48ffbea4a26a069b6f9f7abb051008726"
- integrity sha512-hEJH0s8PXLY/cdXh66tNEQGndDrIKNqNC5xmrysZy3i5C3oEoLna7YAOad+7u125+zH1HNXUmGEkrhb3c2VriA==
- dependencies:
- escape-goat "^2.0.0"
-
-qs@6.7.0:
- version "6.7.0"
- resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc"
- integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==
-
-querystring@0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
- integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
-
-range-parser@~1.2.1:
- version "1.2.1"
- resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
- integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
-
-raw-body@2.4.0:
- version "2.4.0"
- resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332"
- integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==
- dependencies:
- bytes "3.1.0"
- http-errors "1.7.2"
- iconv-lite "0.4.24"
- unpipe "1.0.0"
-
-rc@^1.2.7, rc@^1.2.8:
- version "1.2.8"
- resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
- integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
- dependencies:
- deep-extend "^0.6.0"
- ini "~1.3.0"
- minimist "^1.2.0"
- strip-json-comments "~2.0.1"
-
-readable-stream@2.3.7, readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.0.6:
- version "2.3.7"
- resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
- integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
- dependencies:
- core-util-is "~1.0.0"
- inherits "~2.0.3"
- isarray "~1.0.0"
- process-nextick-args "~2.0.0"
- safe-buffer "~5.1.1"
- string_decoder "~1.1.1"
- util-deprecate "~1.0.1"
-
-readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
- version "3.6.0"
- resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
- integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
- dependencies:
- inherits "^2.0.3"
- string_decoder "^1.1.1"
- util-deprecate "^1.0.1"
-
-readdir-glob@^1.0.0:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.1.tgz#f0e10bb7bf7bfa7e0add8baffdc54c3f7dbee6c4"
- integrity sha512-91/k1EzZwDx6HbERR+zucygRFfiPl2zkIYZtv3Jjr6Mn7SkKcVct8aVO+sSRiGMc6fLf72du3d92/uY63YPdEA==
- dependencies:
- minimatch "^3.0.4"
-
-readdirp@~3.4.0:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.4.0.tgz#9fdccdf9e9155805449221ac645e8303ab5b9ada"
- integrity sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==
- dependencies:
- picomatch "^2.2.1"
-
-rechoir@^0.6.2:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384"
- integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=
- dependencies:
- resolve "^1.1.6"
-
-regex-not@^1.0.0, regex-not@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c"
- integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==
- dependencies:
- extend-shallow "^3.0.2"
- safe-regex "^1.1.0"
-
-regexpp@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f"
- integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==
-
-registry-auth-token@^4.0.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.0.tgz#1d37dffda72bbecd0f581e4715540213a65eb7da"
- integrity sha512-P+lWzPrsgfN+UEpDS3U8AQKg/UjZX6mQSJueZj3EK+vNESoqBSpBUD3gmu4sF9lOsjXWjF11dQKUqemf3veq1w==
- dependencies:
- rc "^1.2.8"
-
-registry-url@^5.0.0:
- version "5.1.0"
- resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009"
- integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==
- dependencies:
- rc "^1.2.8"
-
-repeat-element@^1.1.2:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce"
- integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==
-
-repeat-string@^1.6.1:
- version "1.6.1"
- resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
- integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc=
-
-require-directory@^2.1.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
- integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
-
-require-main-filename@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
- integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
-
-resolve-dir@^1.0.0, resolve-dir@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43"
- integrity sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=
- dependencies:
- expand-tilde "^2.0.0"
- global-modules "^1.0.0"
-
-resolve-from@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
- integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
-
-resolve-url@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
- integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
-
-resolve@^1.1.6, resolve@^1.1.7:
- version "1.17.0"
- resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444"
- integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==
- dependencies:
- path-parse "^1.0.6"
-
-responselike@^1.0.2:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
- integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=
- dependencies:
- lowercase-keys "^1.0.0"
-
-restore-cursor@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e"
- integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==
- dependencies:
- onetime "^5.1.0"
- signal-exit "^3.0.2"
-
-ret@~0.1.10:
- version "0.1.15"
- resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc"
- integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==
-
-rimraf@2.6.3:
- version "2.6.3"
- resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab"
- integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==
- dependencies:
- glob "^7.1.3"
-
-rimraf@^2.6.1:
- version "2.7.1"
- resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
- integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
- dependencies:
- glob "^7.1.3"
-
-run-async@^2.4.0:
- version "2.4.1"
- resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455"
- integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==
-
-rxjs@^6.6.0:
- version "6.6.2"
- resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.2.tgz#8096a7ac03f2cc4fe5860ef6e572810d9e01c0d2"
- integrity sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg==
- dependencies:
- tslib "^1.9.0"
-
-safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
- version "5.1.2"
- resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
- integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
-
-safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0:
- version "5.2.1"
- resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
- integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
-
-safe-regex@^1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e"
- integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4=
- dependencies:
- ret "~0.1.10"
-
-"safer-buffer@>= 2.1.2 < 3", safer-buffer@~2.1.0:
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
- integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
-
-sax@^1.2.4:
- version "1.2.4"
- resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
- integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
-
-semver-diff@^3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
- integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
- dependencies:
- semver "^6.3.0"
-
-semver@^5.3.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
- version "5.7.1"
- resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
- integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
-
-semver@^6.0.0, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0:
- version "6.3.0"
- resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
- integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
-
-send@0.17.1:
- version "0.17.1"
- resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8"
- integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==
- dependencies:
- debug "2.6.9"
- depd "~1.1.2"
- destroy "~1.0.4"
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- etag "~1.8.1"
- fresh "0.5.2"
- http-errors "~1.7.2"
- mime "1.6.0"
- ms "2.1.1"
- on-finished "~2.3.0"
- range-parser "~1.2.1"
- statuses "~1.5.0"
-
-serve-static@1.14.1:
- version "1.14.1"
- resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9"
- integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==
- dependencies:
- encodeurl "~1.0.2"
- escape-html "~1.0.3"
- parseurl "~1.3.3"
- send "0.17.1"
-
-set-blocking@^2.0.0, set-blocking@~2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
- integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
-
-set-value@^2.0.0, set-value@^2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b"
- integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==
- dependencies:
- extend-shallow "^2.0.1"
- is-extendable "^0.1.1"
- is-plain-object "^2.0.3"
- split-string "^3.0.1"
-
-setprototypeof@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683"
- integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==
-
-shebang-command@^1.2.0:
- version "1.2.0"
- resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
- integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
- dependencies:
- shebang-regex "^1.0.0"
-
-shebang-regex@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
- integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
-
-signal-exit@^3.0.0, signal-exit@^3.0.2:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
- integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
-
-signale@^1.4.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/signale/-/signale-1.4.0.tgz#c4be58302fb0262ac00fc3d886a7c113759042f1"
- integrity sha512-iuh+gPf28RkltuJC7W5MRi6XAjTDCAPC/prJUpQoG4vIP3MJZ+GTydVnodXA7pwvTKb2cA0m9OFZW/cdWy/I/w==
- dependencies:
- chalk "^2.3.2"
- figures "^2.0.0"
- pkg-conf "^2.1.0"
-
-slice-ansi@^2.1.0:
- version "2.1.0"
- resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636"
- integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==
- dependencies:
- ansi-styles "^3.2.0"
- astral-regex "^1.0.0"
- is-fullwidth-code-point "^2.0.0"
-
-snapdragon-node@^2.0.1:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
- integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==
- dependencies:
- define-property "^1.0.0"
- isobject "^3.0.0"
- snapdragon-util "^3.0.1"
-
-snapdragon-util@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2"
- integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==
- dependencies:
- kind-of "^3.2.0"
-
-snapdragon@^0.8.1:
- version "0.8.2"
- resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d"
- integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==
- dependencies:
- base "^0.11.1"
- debug "^2.2.0"
- define-property "^0.2.5"
- extend-shallow "^2.0.1"
- map-cache "^0.2.2"
- source-map "^0.5.6"
- source-map-resolve "^0.5.0"
- use "^3.1.0"
-
-source-map-resolve@^0.5.0:
- version "0.5.3"
- resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a"
- integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==
- dependencies:
- atob "^2.1.2"
- decode-uri-component "^0.2.0"
- resolve-url "^0.2.1"
- source-map-url "^0.4.0"
- urix "^0.1.0"
-
-source-map-url@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
- integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=
-
-source-map@^0.5.6:
- version "0.5.7"
- resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
- integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
-
-split-string@^3.0.1, split-string@^3.0.2:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2"
- integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==
- dependencies:
- extend-shallow "^3.0.0"
-
-sprintf-js@~1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
- integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
-
-sqlite3@^4.1.1:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-4.2.0.tgz#49026d665e9fc4f922e56fb9711ba5b4c85c4901"
- integrity sha512-roEOz41hxui2Q7uYnWsjMOTry6TcNUNmp8audCx18gF10P2NknwdpF+E+HKvz/F2NvPKGGBF4NGc+ZPQ+AABwg==
- dependencies:
- nan "^2.12.1"
- node-pre-gyp "^0.11.0"
-
-sqlstring@2.3.1:
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.1.tgz#475393ff9e91479aea62dcaf0ca3d14983a7fb40"
- integrity sha1-R1OT/56RR5rqYtyvDKPRSYOn+0A=
-
-static-extend@^0.1.1:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6"
- integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=
- dependencies:
- define-property "^0.2.5"
- object-copy "^0.1.0"
-
-"statuses@>= 1.5.0 < 2", statuses@~1.5.0:
- version "1.5.0"
- resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
- integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=
-
-streamsearch@0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a"
- integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=
-
-string-width@^1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3"
- integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=
- dependencies:
- code-point-at "^1.0.0"
- is-fullwidth-code-point "^1.0.0"
- strip-ansi "^3.0.0"
-
-"string-width@^1.0.2 || 2":
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
- integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
- dependencies:
- is-fullwidth-code-point "^2.0.0"
- strip-ansi "^4.0.0"
-
-string-width@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
- integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
- dependencies:
- emoji-regex "^7.0.1"
- is-fullwidth-code-point "^2.0.0"
- strip-ansi "^5.1.0"
-
-string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0:
- version "4.2.0"
- resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
- integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
- dependencies:
- emoji-regex "^8.0.0"
- is-fullwidth-code-point "^3.0.0"
- strip-ansi "^6.0.0"
-
-string_decoder@^1.1.1:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
- integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
- dependencies:
- safe-buffer "~5.2.0"
-
-string_decoder@~1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
- integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
- dependencies:
- safe-buffer "~5.1.0"
-
-strip-ansi@^3.0.0, strip-ansi@^3.0.1:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
- integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=
- dependencies:
- ansi-regex "^2.0.0"
-
-strip-ansi@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
- integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
- dependencies:
- ansi-regex "^3.0.0"
-
-strip-ansi@^5.1.0, strip-ansi@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
- integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
- dependencies:
- ansi-regex "^4.1.0"
-
-strip-ansi@^6.0.0:
- version "6.0.0"
- resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532"
- integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==
- dependencies:
- ansi-regex "^5.0.0"
-
-strip-bom@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
- integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=
-
-strip-json-comments@^3.0.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
- integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
-
-strip-json-comments@~2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
- integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
-
-supports-color@^5.3.0, supports-color@^5.5.0:
- version "5.5.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
- integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
- dependencies:
- has-flag "^3.0.0"
-
-supports-color@^7.1.0:
- version "7.1.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
- integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
- dependencies:
- has-flag "^4.0.0"
-
-table@^5.2.3:
- version "5.4.6"
- resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e"
- integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==
- dependencies:
- ajv "^6.10.2"
- lodash "^4.17.14"
- slice-ansi "^2.1.0"
- string-width "^3.0.0"
-
-tar-stream@^2.2.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
- integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
- dependencies:
- bl "^4.0.3"
- end-of-stream "^1.4.1"
- fs-constants "^1.0.0"
- inherits "^2.0.3"
- readable-stream "^3.1.1"
-
-tar@^4, tar@^4.4.2:
- version "4.4.19"
- resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3"
- integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==
- dependencies:
- chownr "^1.1.4"
- fs-minipass "^1.2.7"
- minipass "^2.9.0"
- minizlib "^1.3.3"
- mkdirp "^0.5.5"
- safe-buffer "^5.2.1"
- yallist "^3.1.1"
-
-tarn@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/tarn/-/tarn-2.0.0.tgz#c68499f69881f99ae955b4317ca7d212d942fdee"
- integrity sha512-7rNMCZd3s9bhQh47ksAQd92ADFcJUjjbyOvyFjNLwTPpGieFHMC84S+LOzw0fx1uh6hnDz/19r8CPMnIjJlMMA==
-
-temp-dir@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d"
- integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0=
-
-temp-write@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/temp-write/-/temp-write-4.0.0.tgz#cd2e0825fc826ae72d201dc26eef3bf7e6fc9320"
- integrity sha512-HIeWmj77uOOHb0QX7siN3OtwV3CTntquin6TNVg6SHOqCP3hYKmox90eeFOGaY1MqJ9WYDDjkyZrW6qS5AWpbw==
- dependencies:
- graceful-fs "^4.1.15"
- is-stream "^2.0.0"
- make-dir "^3.0.0"
- temp-dir "^1.0.0"
- uuid "^3.3.2"
-
-term-size@^2.1.0:
- version "2.2.0"
- resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.0.tgz#1f16adedfe9bdc18800e1776821734086fcc6753"
- integrity sha512-a6sumDlzyHVJWb8+YofY4TW112G6p2FCPEAFk+59gIYHv3XHRhm9ltVQ9kli4hNWeQBwSpe8cRN25x0ROunMOw==
-
-text-table@^0.2.0:
- version "0.2.0"
- resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
- integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
-
-through@^2.3.6:
- version "2.3.8"
- resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
- integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=
-
-tildify@2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a"
- integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==
-
-tmp@^0.0.33:
- version "0.0.33"
- resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
- integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==
- dependencies:
- os-tmpdir "~1.0.2"
-
-to-object-path@^0.3.0:
- version "0.3.0"
- resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af"
- integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=
- dependencies:
- kind-of "^3.0.2"
-
-to-readable-stream@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771"
- integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==
-
-to-regex-range@^2.1.0:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38"
- integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=
- dependencies:
- is-number "^3.0.0"
- repeat-string "^1.6.1"
-
-to-regex-range@^5.0.1:
- version "5.0.1"
- resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
- integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
- dependencies:
- is-number "^7.0.0"
-
-to-regex@^3.0.1, to-regex@^3.0.2:
- version "3.0.2"
- resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce"
- integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==
- dependencies:
- define-property "^2.0.2"
- extend-shallow "^3.0.2"
- regex-not "^1.0.2"
- safe-regex "^1.1.0"
-
-toidentifier@1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553"
- integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
-
-touch@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
- integrity sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==
- dependencies:
- nopt "~1.0.10"
-
-tslib@^1.9.0:
- version "1.13.0"
- resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043"
- integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==
-
-type-check@~0.3.2:
- version "0.3.2"
- resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
- integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=
- dependencies:
- prelude-ls "~1.1.2"
-
-type-fest@^0.11.0:
- version "0.11.0"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1"
- integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==
-
-type-fest@^0.8.1:
- version "0.8.1"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
- integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
-
-type-is@~1.6.17, type-is@~1.6.18:
- version "1.6.18"
- resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
- integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
- dependencies:
- media-typer "0.3.0"
- mime-types "~2.1.24"
-
-typedarray-to-buffer@^3.1.5:
- version "3.1.5"
- resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"
- integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==
- dependencies:
- is-typedarray "^1.0.0"
-
-unc-path-regex@^0.1.2:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa"
- integrity sha1-5z3T17DXxe2G+6xrCufYxqadUPo=
-
-undefsafe@^2.0.2:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.3.tgz#6b166e7094ad46313b2202da7ecc2cd7cc6e7aae"
- integrity sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A==
- dependencies:
- debug "^2.2.0"
-
-union-value@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847"
- integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==
- dependencies:
- arr-union "^3.1.0"
- get-value "^2.0.6"
- is-extendable "^0.1.1"
- set-value "^2.0.1"
-
-unique-string@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
- integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
- dependencies:
- crypto-random-string "^2.0.0"
-
-unpipe@1.0.0, unpipe@~1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
- integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
-
-unset-value@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559"
- integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=
- dependencies:
- has-value "^0.3.1"
- isobject "^3.0.0"
-
-update-notifier@^4.0.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-4.1.0.tgz#4866b98c3bc5b5473c020b1250583628f9a328f3"
- integrity sha512-w3doE1qtI0/ZmgeoDoARmI5fjDoT93IfKgEGqm26dGUOh8oNpaSTsGNdYRN/SjOuo10jcJGwkEL3mroKzktkew==
- dependencies:
- boxen "^4.2.0"
- chalk "^3.0.0"
- configstore "^5.0.1"
- has-yarn "^2.1.0"
- import-lazy "^2.1.0"
- is-ci "^2.0.0"
- is-installed-globally "^0.3.1"
- is-npm "^4.0.0"
- is-yarn-global "^0.3.0"
- latest-version "^5.0.0"
- pupa "^2.0.1"
- semver-diff "^3.1.1"
- xdg-basedir "^4.0.0"
-
-uri-js@^4.2.2:
- version "4.2.2"
- resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0"
- integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==
- dependencies:
- punycode "^2.1.0"
-
-urix@^0.1.0:
- version "0.1.0"
- resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72"
- integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=
-
-url-parse-lax@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c"
- integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=
- dependencies:
- prepend-http "^2.0.0"
-
-use@^3.1.0:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"
- integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==
-
-util-deprecate@^1.0.1, util-deprecate@~1.0.1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
- integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
-
-util@^0.10.3:
- version "0.10.4"
- resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901"
- integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==
- dependencies:
- inherits "2.0.3"
-
-utils-merge@1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
- integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
-
-uuid@^3.3.2:
- version "3.4.0"
- resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
- integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
-
-uuid@^7.0.1:
- version "7.0.3"
- resolved "https://registry.yarnpkg.com/uuid/-/uuid-7.0.3.tgz#c5c9f2c8cf25dc0a372c4df1441c41f5bd0c680b"
- integrity sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==
-
-v8-compile-cache@^2.0.3:
- version "2.1.1"
- resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745"
- integrity sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ==
-
-v8flags@^3.1.3:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656"
- integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==
- dependencies:
- homedir-polyfill "^1.0.1"
-
-vary@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
- integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
-
-which-module@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
- integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
-
-which@^1.2.14, which@^1.2.9:
- version "1.3.1"
- resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
- integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
- dependencies:
- isexe "^2.0.0"
-
-wide-align@^1.1.0:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
- integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
- dependencies:
- string-width "^1.0.2 || 2"
-
-widest-line@^3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca"
- integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==
- dependencies:
- string-width "^4.0.0"
-
-word-wrap@~1.2.3:
- version "1.2.3"
- resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
- integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
-
-wrap-ansi@^6.2.0:
- version "6.2.0"
- resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
- integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
- dependencies:
- ansi-styles "^4.0.0"
- string-width "^4.1.0"
- strip-ansi "^6.0.0"
-
-wrappy@1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
- integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
-
-write-file-atomic@^3.0.0:
- version "3.0.3"
- resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8"
- integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==
- dependencies:
- imurmurhash "^0.1.4"
- is-typedarray "^1.0.0"
- signal-exit "^3.0.2"
- typedarray-to-buffer "^3.1.5"
-
-write@1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3"
- integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==
- dependencies:
- mkdirp "^0.5.1"
-
-xdg-basedir@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
- integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
-
-y18n@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
- integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
-
-yallist@^3.0.0, yallist@^3.1.1:
- version "3.1.1"
- resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
- integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
-
-yargs-parser@^18.1.2:
- version "18.1.3"
- resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
- integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
- dependencies:
- camelcase "^5.0.0"
- decamelize "^1.2.0"
-
-yargs@^15.4.1:
- version "15.4.1"
- resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
- integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
- dependencies:
- cliui "^6.0.0"
- decamelize "^1.2.0"
- find-up "^4.1.0"
- get-caller-file "^2.0.1"
- require-directory "^2.1.1"
- require-main-filename "^2.0.0"
- set-blocking "^2.0.0"
- string-width "^4.2.0"
- which-module "^2.0.0"
- y18n "^4.0.0"
- yargs-parser "^18.1.2"
-
-zip-stream@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-4.1.0.tgz#51dd326571544e36aa3f756430b313576dc8fc79"
- integrity sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==
- dependencies:
- archiver-utils "^2.1.0"
- compress-commons "^4.1.0"
- readable-stream "^3.6.0"
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 378fffbfc..a7002fa5a 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,63 +1,104 @@
# This is a Dockerfile intended to be built using `docker buildx`
# for multi-arch support. Building with `docker build` may have unexpected results.
-# This file assumes that the frontend has been built using ./scripts/frontend-build
+# This file assumes that these scripts have been run first:
+# - ./scripts/ci/build-frontend
-FROM nginxproxymanager/nginx-full:certbot-node
+FROM nginxproxymanager/testca AS testca
+FROM ghcr.io/letsencrypt/pebble AS pebbleca
+FROM jc21/gotools:latest AS gobuild
+
+SHELL ["/bin/bash", "-o", "pipefail", "-c"]
-ARG TARGETPLATFORM
-ARG BUILD_VERSION
ARG BUILD_COMMIT
-ARG BUILD_DATE
+ARG BUILD_VERSION
+ARG GOPRIVATE
+ARG GOPROXY
-ENV SUPPRESS_NO_CONFIG_WARNING=1 \
- S6_FIX_ATTRS_HIDDEN=1 \
+ENV BUILD_COMMIT="${BUILD_COMMIT:-dev}" \
+ BUILD_VERSION="${BUILD_VERSION:-0.0.0}" \
+ CGO_ENABLED=0 \
+ GOPRIVATE="${GOPRIVATE:-}" \
+ GOPROXY="${GOPROXY:-}"
+
+COPY scripts /scripts
+COPY backend /app
+WORKDIR /app
+
+ARG ARG TARGETPLATFORM
+RUN mkdir -p /dist \
+ && /scripts/go-multiarch-wrapper /dist/server /dist/ipranges
+
+#===============
+# Final image
+#===============
+
+FROM nginxproxymanager/nginx-full:acmesh AS final
+
+COPY --from=gobuild /dist/server /app/bin/server
+COPY --from=gobuild /dist/ipranges /app/bin/ipranges
+# these certs are used for testing in CI
+COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
+COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
+
+# These acmesh vars are defined in the base image
+ENV ACMESH_CONFIG_HOME=/data/.acme.sh/config \
+ ACMESH_HOME=/data/.acme.sh \
+ CERT_HOME=/data/.acme.sh/certs \
+ LE_CONFIG_HOME=/data/.acme.sh/config \
+ LE_WORKING_DIR=/data/.acme.sh \
S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
- NODE_ENV=production \
- NPM_BUILD_VERSION="${BUILD_VERSION}" \
- NPM_BUILD_COMMIT="${BUILD_COMMIT}" \
- NPM_BUILD_DATE="${BUILD_DATE}"
-
-RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
- && apt-get update \
- && apt-get install -y --no-install-recommends jq logrotate \
+ S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
+ S6_FIX_ATTRS_HIDDEN=1 \
+ S6_KILL_FINISH_MAXTIME=10000 \
+ S6_VERBOSITY=1
+
+RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
+
+# fail2ban
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends fail2ban logrotate \
&& apt-get clean \
- && rm -rf /var/lib/apt/lists/*
+ && rm -rf /var/lib/apt/lists/* /etc/fail2ban
# s6 overlay
+ARG TARGETPLATFORM
COPY scripts/install-s6 /tmp/install-s6
-RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -f /tmp/install-s6
-
-EXPOSE 80 81 443
+RUN /tmp/install-s6 "${TARGETPLATFORM}" && rm -rf /tmp/*
-COPY backend /app
-COPY frontend/dist /app/frontend
-COPY global /app/global
+EXPOSE 80/tcp 81/tcp 443/tcp
-WORKDIR /app
-RUN yarn install
-
-# add late to limit cache-busting by modifications
COPY docker/rootfs /
# Remove frontend service not required for prod, dev nginx config as well
-RUN rm -rf /etc/services.d/frontend /etc/nginx/conf.d/dev.conf
+# and remove any other cruft
+RUN rm -rf /etc/services.d/frontend \
+ /etc/nginx/conf.d/dev.conf \
+ /var/cache/* \
+ /var/log/* \
+ /tmp/* \
+ /var/lib/dpkg/status-old
+
+VOLUME /data
-# Change permission of logrotate config file
-RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
+CMD [ "/init" ]
-# fix for pip installs
-# https://github.com/NginxProxyManager/nginx-proxy-manager/issues/1769
-RUN pip uninstall --yes setuptools \
- && pip install "setuptools==58.0.0"
+ARG NOW
+ARG BUILD_VERSION
+ARG BUILD_COMMIT
+ARG BUILD_DATE
-VOLUME [ "/data", "/etc/letsencrypt" ]
-ENTRYPOINT [ "/init" ]
+ENV NPM_BUILD_VERSION="${BUILD_VERSION:-0.0.0}" \
+ NPM_BUILD_COMMIT="${BUILD_COMMIT:-dev}" \
+ NPM_BUILD_DATE="${BUILD_DATE:-}"
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.license="MIT" \
org.label-schema.name="nginx-proxy-manager" \
- org.label-schema.description="Docker container for managing Nginx proxy hosts with a simple, powerful interface " \
- org.label-schema.url="https://github.com/jc21/nginx-proxy-manager" \
- org.label-schema.vcs-url="https://github.com/jc21/nginx-proxy-manager.git" \
- org.label-schema.cmd="docker run --rm -ti jc21/nginx-proxy-manager:latest"
+ org.label-schema.description="Nginx Host Management and Proxy" \
+ org.label-schema.build-date="${NOW:-}" \
+ org.label-schema.version="${BUILD_VERSION:-0.0.0}" \
+ org.label-schema.url="https://nginxproxymanager.com" \
+ org.label-schema.vcs-url="https://github.com/NginxProxyManager/nginx-proxy-manager.git" \
+ org.label-schema.vcs-ref="${BUILD_COMMIT:-dev}" \
+ org.label-schema.cmd="docker run --rm -ti jc21/nginx-proxy-manager:${BUILD_VERSION:-0.0.0}"
diff --git a/docker/ci.env b/docker/ci.env
new file mode 100644
index 000000000..7128295dd
--- /dev/null
+++ b/docker/ci.env
@@ -0,0 +1,8 @@
+AUTHENTIK_SECRET_KEY=gl8woZe8L6IIX8SC0c5Ocsj0xPkX5uJo5DVZCFl+L/QGbzuplfutYuua2ODNLEiDD3aFd9H2ylJmrke0
+AUTHENTIK_REDIS__HOST=authentik-redis
+AUTHENTIK_POSTGRESQL__HOST=db-postgres
+AUTHENTIK_POSTGRESQL__USER=authentik
+AUTHENTIK_POSTGRESQL__NAME=authentik
+AUTHENTIK_POSTGRESQL__PASSWORD=07EKS5NLI6Tpv68tbdvrxfvj
+AUTHENTIK_BOOTSTRAP_PASSWORD=admin
+AUTHENTIK_BOOTSTRAP_EMAIL=admin@example.com
diff --git a/docker/ci/postgres/authentik.sql.gz b/docker/ci/postgres/authentik.sql.gz
new file mode 100644
index 000000000..49665d4e6
Binary files /dev/null and b/docker/ci/postgres/authentik.sql.gz differ
diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile
index d2e2266a1..a59c636a1 100644
--- a/docker/dev/Dockerfile
+++ b/docker/dev/Dockerfile
@@ -1,29 +1,55 @@
-FROM nginxproxymanager/nginx-full:certbot-node
+FROM nginxproxymanager/testca AS testca
+FROM ghcr.io/letsencrypt/pebble AS pebbleca
+FROM nginxproxymanager/nginx-full:acmesh-golang
LABEL maintainer="Jamie Curnow "
-ENV S6_LOGGING=0 \
- SUPPRESS_NO_CONFIG_WARNING=1 \
- S6_FIX_ATTRS_HIDDEN=1
+SHELL ["/bin/bash", "-o", "pipefail", "-c"]
-RUN echo "fs.file-max = 65535" > /etc/sysctl.conf \
+ARG GOPROXY
+ARG GOPRIVATE
+
+ENV ACMESH_CONFIG_HOME=/data/.acme.sh/config \
+ ACMESH_HOME=/data/.acme.sh \
+ CERT_HOME=/data/.acme.sh/certs \
+ CGO_ENABLED=0 \
+ GOPROXY=$GOPROXY \
+ GOPRIVATE=$GOPRIVATE \
+ LE_CONFIG_HOME=/data/.acme.sh/config \
+ LE_WORKING_DIR=/data/.acme.sh \
+ S6_BEHAVIOUR_IF_STAGE2_FAILS=1 \
+ S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
+ S6_FIX_ATTRS_HIDDEN=1 \
+ S6_KILL_FINISH_MAXTIME=10000 \
+ S6_VERBOSITY=2
+
+RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
+
+# node, fail2ban
+RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get update \
- && apt-get install -y certbot jq python3-pip logrotate \
+ && apt-get install -y --no-install-recommends nodejs vim dnsutils fail2ban logrotate \
+ && npm install --location=global yarn \
&& apt-get clean \
- && rm -rf /var/lib/apt/lists/*
+ && rm -rf /var/lib/apt/lists/* /etc/fail2ban
# Task
RUN cd /usr \
&& curl -sL https://taskfile.dev/install.sh | sh \
&& cd /root
-COPY rootfs /
+COPY docker/rootfs /
RUN rm -f /etc/nginx/conf.d/production.conf
-RUN chmod 644 /etc/logrotate.d/nginx-proxy-manager
# s6 overlay
-RUN curl -L -o /tmp/s6-overlay-amd64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz" \
- && tar -xzf /tmp/s6-overlay-amd64.tar.gz -C /
+COPY scripts/install-s6 /tmp/install-s6
+RUN /tmp/install-s6 && rm -rf /tmp/*
+
+# Fix for golang dev:
+RUN chown -R 1000:1000 /opt/go
-EXPOSE 80 81 443
-ENTRYPOINT [ "/init" ]
+COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
+COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
+EXPOSE 80
+CMD [ "/init" ]
+HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://127.0.0.1:81/api || exit 1
diff --git a/docker/dev/dnsrouter-config.json b/docker/dev/dnsrouter-config.json
new file mode 100644
index 000000000..fbeab98e9
--- /dev/null
+++ b/docker/dev/dnsrouter-config.json
@@ -0,0 +1,31 @@
+{
+ "log": {
+ "format": "nice",
+ "level": "debug"
+ },
+ "servers": [
+ {
+ "host": "0.0.0.0",
+ "port": 53,
+ "upstreams": [
+ {
+ "regex": "website[0-9]+.example\\.com",
+ "upstream": "127.0.0.11"
+ },
+ {
+ "regex": ".*\\.example\\.com",
+ "upstream": "1.1.1.1"
+ },
+ {
+ "regex": "local",
+ "nxdomain": true
+ }
+ ],
+ "internal": null,
+ "default_upstream": "127.0.0.11"
+ }
+ ],
+ "cache": {
+ "disabled": true
+ }
+}
diff --git a/docker/dev/pdns-db.sql b/docker/dev/pdns-db.sql
new file mode 100644
index 000000000..c182cf785
--- /dev/null
+++ b/docker/dev/pdns-db.sql
@@ -0,0 +1,255 @@
+/*
+
+How this was generated:
+1. bring up an empty pdns stack
+2. use api to create a zone ...
+
+curl -X POST \
+ 'http://npm.dev:8081/api/v1/servers/localhost/zones' \
+ --header 'X-API-Key: npm' \
+ --header 'Content-Type: application/json' \
+ --data-raw '{
+ "name": "example.com.",
+ "kind": "Native",
+ "masters": [],
+ "nameservers": [
+ "ns1.pdns.",
+ "ns2.pdns."
+ ]
+}'
+
+3. Dump sql:
+
+docker exec -ti npm.pdns.db mysqldump -u pdns -p pdns
+
+*/
+
+----------------------------------------------------------------------
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8mb4 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `comments`
+--
+
+DROP TABLE IF EXISTS `comments`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `comments` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `name` varchar(255) NOT NULL,
+ `type` varchar(10) NOT NULL,
+ `modified_at` int(11) NOT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+ `comment` text CHARACTER SET utf8mb3 NOT NULL,
+ PRIMARY KEY (`id`),
+ KEY `comments_name_type_idx` (`name`,`type`),
+ KEY `comments_order_idx` (`domain_id`,`modified_at`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `comments`
+--
+
+LOCK TABLES `comments` WRITE;
+/*!40000 ALTER TABLE `comments` DISABLE KEYS */;
+/*!40000 ALTER TABLE `comments` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `cryptokeys`
+--
+
+DROP TABLE IF EXISTS `cryptokeys`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `cryptokeys` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `flags` int(11) NOT NULL,
+ `active` tinyint(1) DEFAULT NULL,
+ `published` tinyint(1) DEFAULT 1,
+ `content` text DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `domainidindex` (`domain_id`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `cryptokeys`
+--
+
+LOCK TABLES `cryptokeys` WRITE;
+/*!40000 ALTER TABLE `cryptokeys` DISABLE KEYS */;
+/*!40000 ALTER TABLE `cryptokeys` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `domainmetadata`
+--
+
+DROP TABLE IF EXISTS `domainmetadata`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `domainmetadata` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) NOT NULL,
+ `kind` varchar(32) DEFAULT NULL,
+ `content` text DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `domainmetadata_idx` (`domain_id`,`kind`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `domainmetadata`
+--
+
+LOCK TABLES `domainmetadata` WRITE;
+/*!40000 ALTER TABLE `domainmetadata` DISABLE KEYS */;
+INSERT INTO `domainmetadata` VALUES
+(1,1,'SOA-EDIT-API','DEFAULT');
+/*!40000 ALTER TABLE `domainmetadata` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `domains`
+--
+
+DROP TABLE IF EXISTS `domains`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `domains` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) NOT NULL,
+ `master` varchar(128) DEFAULT NULL,
+ `last_check` int(11) DEFAULT NULL,
+ `type` varchar(8) NOT NULL,
+ `notified_serial` int(10) unsigned DEFAULT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 DEFAULT NULL,
+ `options` varchar(64000) DEFAULT NULL,
+ `catalog` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name_index` (`name`),
+ KEY `catalog_idx` (`catalog`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `domains`
+--
+
+LOCK TABLES `domains` WRITE;
+/*!40000 ALTER TABLE `domains` DISABLE KEYS */;
+INSERT INTO `domains` VALUES
+(1,'example.com','',NULL,'NATIVE',NULL,'',NULL,NULL);
+/*!40000 ALTER TABLE `domains` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `records`
+--
+
+DROP TABLE IF EXISTS `records`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `records` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `domain_id` int(11) DEFAULT NULL,
+ `name` varchar(255) DEFAULT NULL,
+ `type` varchar(10) DEFAULT NULL,
+ `content` varchar(64000) DEFAULT NULL,
+ `ttl` int(11) DEFAULT NULL,
+ `prio` int(11) DEFAULT NULL,
+ `disabled` tinyint(1) DEFAULT 0,
+ `ordername` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
+ `auth` tinyint(1) DEFAULT 1,
+ PRIMARY KEY (`id`),
+ KEY `nametype_index` (`name`,`type`),
+ KEY `domain_id` (`domain_id`),
+ KEY `ordername` (`ordername`)
+) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `records`
+--
+
+LOCK TABLES `records` WRITE;
+/*!40000 ALTER TABLE `records` DISABLE KEYS */;
+INSERT INTO `records` VALUES
+(1,1,'example.com','NS','ns1.pdns',1500,0,0,NULL,1),
+(2,1,'example.com','NS','ns2.pdns',1500,0,0,NULL,1),
+(3,1,'example.com','SOA','a.misconfigured.dns.server.invalid hostmaster.example.com 2023030501 10800 3600 604800 3600',1500,0,0,NULL,1);
+/*!40000 ALTER TABLE `records` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `supermasters`
+--
+
+DROP TABLE IF EXISTS `supermasters`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `supermasters` (
+ `ip` varchar(64) NOT NULL,
+ `nameserver` varchar(255) NOT NULL,
+ `account` varchar(40) CHARACTER SET utf8mb3 NOT NULL,
+ PRIMARY KEY (`ip`,`nameserver`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `supermasters`
+--
+
+LOCK TABLES `supermasters` WRITE;
+/*!40000 ALTER TABLE `supermasters` DISABLE KEYS */;
+/*!40000 ALTER TABLE `supermasters` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `tsigkeys`
+--
+
+DROP TABLE IF EXISTS `tsigkeys`;
+/*!40101 SET @saved_cs_client = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `tsigkeys` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `algorithm` varchar(50) DEFAULT NULL,
+ `secret` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `namealgoindex` (`name`,`algorithm`)
+) ENGINE=InnoDB DEFAULT CHARSET=latin1;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `tsigkeys`
+--
+
+LOCK TABLES `tsigkeys` WRITE;
+/*!40000 ALTER TABLE `tsigkeys` DISABLE KEYS */;
+/*!40000 ALTER TABLE `tsigkeys` ENABLE KEYS */;
+UNLOCK TABLES;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
diff --git a/docker/dev/pebble-config.json b/docker/dev/pebble-config.json
new file mode 100644
index 000000000..289d29069
--- /dev/null
+++ b/docker/dev/pebble-config.json
@@ -0,0 +1,12 @@
+{
+ "pebble": {
+ "listenAddress": "0.0.0.0:443",
+ "managementListenAddress": "0.0.0.0:15000",
+ "certificate": "test/certs/localhost/cert.pem",
+ "privateKey": "test/certs/localhost/key.pem",
+ "httpPort": 80,
+ "tlsPort": 443,
+ "ocspResponderURL": "",
+ "externalAccountBindingRequired": false
+ }
+}
\ No newline at end of file
diff --git a/docker/docker-compose.ci.mysql.yml b/docker/docker-compose.ci.mysql.yml
new file mode 100644
index 000000000..06ecdaa37
--- /dev/null
+++ b/docker/docker-compose.ci.mysql.yml
@@ -0,0 +1,30 @@
+# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
+services:
+
+ cypress:
+ environment:
+ CYPRESS_stack: 'mysql'
+
+ fullstack:
+ environment:
+ NPM_DB_DRIVER: 'mysql'
+ NPM_DB_HOST: 'db-mysql'
+ NPM_DB_PORT: '3306'
+ NPM_DB_USERNAME: 'npm'
+ NPM_DB_PASSWORD: 'npmpass'
+ NPM_DB_NAME: 'npm'
+ depends_on:
+ - db-mysql
+
+ db-mysql:
+ image: mysql:latest
+ environment:
+ MYSQL_ROOT_PASSWORD: 'npm'
+ MYSQL_DATABASE: 'npm'
+ MYSQL_USER: 'npm'
+ MYSQL_PASSWORD: 'npmpass'
+ volumes:
+ - mysql_vol:/var/lib/mysql
+
+volumes:
+ mysql_vol:
diff --git a/docker/docker-compose.ci.postgres.yml b/docker/docker-compose.ci.postgres.yml
new file mode 100644
index 000000000..80f06d3ea
--- /dev/null
+++ b/docker/docker-compose.ci.postgres.yml
@@ -0,0 +1,78 @@
+# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
+services:
+
+ cypress:
+ environment:
+ CYPRESS_stack: 'postgres'
+
+ fullstack:
+ environment:
+ NPM_DB_DRIVER: 'postgres'
+ NPM_DB_HOST: 'db-postgres'
+ NPM_DB_PORT: '5432'
+ NPM_DB_USERNAME: 'npm'
+ NPM_DB_PASSWORD: 'npmpass'
+ NPM_DB_NAME: 'npm'
+ NPM_DB_SSLMODE: 'disable'
+ depends_on:
+ - db-postgres
+ - authentik
+ - authentik-worker
+ - authentik-ldap
+
+ db-postgres:
+ image: postgres:latest
+ environment:
+ POSTGRES_USER: 'npm'
+ POSTGRES_PASSWORD: 'npmpass'
+ POSTGRES_DB: 'npm'
+ volumes:
+ - psql_vol:/var/lib/postgresql/data
+ - ./ci/postgres:/docker-entrypoint-initdb.d
+
+ authentik-redis:
+ image: 'redis:alpine'
+ command: --save 60 1 --loglevel warning
+ restart: unless-stopped
+ healthcheck:
+ test: ['CMD-SHELL', 'redis-cli ping | grep PONG']
+ start_period: 20s
+ interval: 30s
+ retries: 5
+ timeout: 3s
+ volumes:
+ - redis_vol:/data
+
+ authentik:
+ image: ghcr.io/goauthentik/server:2024.10.1
+ restart: unless-stopped
+ command: server
+ env_file:
+ - ci.env
+ depends_on:
+ - authentik-redis
+ - db-postgres
+
+ authentik-worker:
+ image: ghcr.io/goauthentik/server:2024.10.1
+ restart: unless-stopped
+ command: worker
+ env_file:
+ - ci.env
+ depends_on:
+ - authentik-redis
+ - db-postgres
+
+ authentik-ldap:
+ image: ghcr.io/goauthentik/ldap:2024.10.1
+ environment:
+ AUTHENTIK_HOST: 'http://authentik:9000'
+ AUTHENTIK_INSECURE: 'true'
+ AUTHENTIK_TOKEN: 'wKYZuRcI0ETtb8vWzMCr04oNbhrQUUICy89hSpDln1OEKLjiNEuQ51044Vkp'
+ restart: unless-stopped
+ depends_on:
+ - authentik
+
+volumes:
+ psql_vol:
+ redis_vol:
diff --git a/docker/docker-compose.ci.yml b/docker/docker-compose.ci.yml
index a8049ec81..21a343a9b 100644
--- a/docker/docker-compose.ci.yml
+++ b/docker/docker-compose.ci.yml
@@ -1,80 +1,96 @@
-# WARNING: This is a CI docker-compose file used for building and testing of the entire app, it should not be used for production.
-version: "3"
+# WARNING: This is a CI docker-compose file used for building
+# and testing of the entire app, it should not be used for production.
services:
- fullstack-mysql:
- image: ${IMAGE}:ci-${BUILD_NUMBER}
+ fullstack:
+ image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DB_MYSQL_HOST: "db"
- DB_MYSQL_PORT: 3306
- DB_MYSQL_USER: "npm"
- DB_MYSQL_PASSWORD: "npm"
- DB_MYSQL_NAME: "npm"
+ NPM_DB_DRIVER: 'sqlite'
+ NPM_LOG_LEVEL: 'debug'
+ NPM_LOG_FORMAT: 'json'
+ NPM_DISABLE_IPV6: 'true'
volumes:
- - npm_data:/data
- expose:
- - 81
- - 80
- - 443
- depends_on:
- - db
- healthcheck:
- test: ["CMD", "/bin/check-health"]
- interval: 10s
- timeout: 3s
+ - '/etc/localtime:/etc/localtime:ro'
+ - 'npm_data_ci:/data'
+ - '../docs:/temp-docs'
+ - './dev/resolv.conf:/etc/resolv.conf:ro'
+ networks:
+ default:
+ aliases:
+ - website1.example.com
+ - website2.example.com
+ - website3.example.com
- fullstack-sqlite:
- image: ${IMAGE}:ci-${BUILD_NUMBER}
- environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DB_SQLITE_FILE: "/data/database.sqlite"
+ stepca:
+ image: jc21/testca
volumes:
- - npm_data:/data
- expose:
- - 81
- - 80
- - 443
- healthcheck:
- test: ["CMD", "/bin/check-health"]
- interval: 10s
- timeout: 3s
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ - '/etc/localtime:/etc/localtime:ro'
+ networks:
+ default:
+ aliases:
+ - ca.internal
- db:
- image: jc21/mariadb-aria
- environment:
- MYSQL_ROOT_PASSWORD: "npm"
- MYSQL_DATABASE: "npm"
- MYSQL_USER: "npm"
- MYSQL_PASSWORD: "npm"
+ pdns:
+ image: pschiffe/pdns-mysql:4.8
volumes:
- - db_data:/var/lib/mysql
+ - '/etc/localtime:/etc/localtime:ro'
+ environment:
+ PDNS_master: 'yes'
+ PDNS_api: 'yes'
+ PDNS_api_key: 'npm'
+ PDNS_webserver: 'yes'
+ PDNS_webserver_address: '0.0.0.0'
+ PDNS_webserver_password: 'npm'
+ PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_version_string: 'anonymous'
+ PDNS_default_ttl: 1500
+ PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_gmysql_host: pdns-db
+ PDNS_gmysql_port: 3306
+ PDNS_gmysql_user: pdns
+ PDNS_gmysql_password: pdns
+ PDNS_gmysql_dbname: pdns
+ depends_on:
+ - pdns-db
+ networks:
+ default:
+ aliases:
+ - ns1.pdns
+ - ns2.pdns
- cypress-mysql:
- image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
- build:
- context: ../test/
- dockerfile: cypress/Dockerfile
+ pdns-db:
+ image: mariadb
environment:
- CYPRESS_baseUrl: "http://fullstack-mysql:81"
+ MYSQL_ROOT_PASSWORD: 'pdns'
+ MYSQL_DATABASE: 'pdns'
+ MYSQL_USER: 'pdns'
+ MYSQL_PASSWORD: 'pdns'
+ volumes:
+ - 'pdns_mysql_vol:/var/lib/mysql'
+ - '/etc/localtime:/etc/localtime:ro'
+ - './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
+
+ dnsrouter:
+ image: jc21/dnsrouter
volumes:
- - cypress-logs:/results
- command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
+ - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
+ entrypoint: /dnsrouter -v -c /dnsrouter-config.json
- cypress-sqlite:
- image: ${IMAGE}-cypress:ci-${BUILD_NUMBER}
+ cypress:
+ image: "${IMAGE}-cypress:ci-${BUILD_NUMBER}"
build:
- context: ../test/
- dockerfile: cypress/Dockerfile
+ context: ../
+ dockerfile: test/cypress/Dockerfile
environment:
- CYPRESS_baseUrl: "http://fullstack-sqlite:81"
+ CYPRESS_baseUrl: 'http://fullstack:81'
+ CYPRESS_stack: 'sqlite'
volumes:
- - cypress-logs:/results
- command: cypress run --browser chrome --config-file=${CYPRESS_CONFIG:-cypress/config/ci.json}
+ - 'cypress_logs:/results'
+ - './dev/resolv.conf:/etc/resolv.conf:ro'
+ command: cypress run --browser chrome --config-file=cypress/config/ci.js
volumes:
- cypress-logs:
- npm_data:
- db_data:
+ cypress_logs:
+ npm_data_ci:
+ pdns_mysql_vol:
diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml
index 79fbd7999..d1ed520b2 100644
--- a/docker/docker-compose.dev.yml
+++ b/docker/docker-compose.dev.yml
@@ -1,62 +1,127 @@
-# WARNING: This is a DEVELOPMENT docker-compose file, it should not be used for production.
-version: "3.5"
+# WARNING: This is a DEVELOPMENT docker-compose file used for development of the entire app, it should not be used for production.
services:
+
npm:
image: nginxproxymanager:dev
- container_name: npm_core
+ container_name: 'npm.dev'
build:
- context: ./
- dockerfile: ./dev/Dockerfile
+ context: ../
+ dockerfile: ./docker/dev/Dockerfile
+ args:
+ GOPROXY: "${GOPROXY:-}"
+ GOPRIVATE: "${GOPRIVATE:-}"
ports:
- 3080:80
- 3081:81
- 3443:443
- networks:
- - nginx_proxy_manager
environment:
- NODE_ENV: "development"
- FORCE_COLOR: 1
- DEVELOPMENT: "true"
- DB_MYSQL_HOST: "db"
- DB_MYSQL_PORT: 3306
- DB_MYSQL_USER: "npm"
- DB_MYSQL_PASSWORD: "npm"
- DB_MYSQL_NAME: "npm"
- # DB_SQLITE_FILE: "/data/database.sqlite"
- # DISABLE_IPV6: "true"
+ #DEBUG: 'true'
+ DEVELOPMENT: 'true'
+ GOPROXY: "${GOPROXY:-}"
+ GOPRIVATE: "${GOPRIVATE:-}"
+ YARN_REGISTRY: "${YARN_REGISTRY:-}"
+ NPM_LOG_LEVEL: 'debug'
+ PUID: 1000
+ PGID: 1000
volumes:
- - npm_data:/data
- - le_data:/etc/letsencrypt
- - ../backend:/app
- - ../frontend:/app/frontend
- - ../global:/app/global
- depends_on:
- - db
+ - /etc/localtime:/etc/localtime:ro
+ - ../:/app
+ - ./rootfs/var/www/html:/var/www/html
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ - npm_data_vol:/data
working_dir: /app
+ networks:
+ default:
+ aliases:
+ - website1.internal
+ - website2.internal
+ - website3.internal
+ restart: unless-stopped
+
+ npm-stepca:
+ image: jc21/testca
+ container_name: "npm.stepca"
+ volumes:
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ networks:
+ default:
+ aliases:
+ - ca.internal
+
+ npm-pebble:
+ image: letsencrypt/pebble
+ container_name: "npm.pebble"
+ command: pebble -config /test/config/pebble-config.json
+ environment:
+ PEBBLE_VA_SLEEPTIME: 2
+ volumes:
+ - ./dev/pebble-config.json:/test/config/pebble-config.json
+ - ./dev/resolv.conf:/etc/resolv.conf:ro
+ networks:
+ default:
+ aliases:
+ # required for https cert dns san
+ - pebble
- db:
- image: jc21/mariadb-aria
- container_name: npm_db
+ npm-swagger:
+ image: swaggerapi/swagger-ui:latest
+ container_name: "npm.swagger"
ports:
- - 33306:3306
+ - 3001:80
+ environment:
+ URL: "http://${SWAGGER_PUBLIC_DOMAIN:-127.0.0.1:3081}/api/schema"
+ PORT: '80'
+ depends_on:
+ - npm
+
+ npm-pdns:
+ image: pschiffe/pdns-mysql:4.8
+ container_name: "npm.pdns"
+ volumes:
+ - '/etc/localtime:/etc/localtime:ro'
+ environment:
+ PDNS_master: 'yes'
+ PDNS_api: 'yes'
+ PDNS_api_key: 'npm'
+ PDNS_webserver: 'yes'
+ PDNS_webserver_address: '0.0.0.0'
+ PDNS_webserver_password: 'npm'
+ PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_version_string: 'anonymous'
+ PDNS_default_ttl: 1500
+ PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
+ PDNS_gmysql_host: npm-pdns-db
+ PDNS_gmysql_port: 3306
+ PDNS_gmysql_user: pdns
+ PDNS_gmysql_password: pdns
+ PDNS_gmysql_dbname: pdns
+ depends_on:
+ - npm-pdns-db
networks:
- - nginx_proxy_manager
+ default:
+ aliases:
+ - ns1.pdns
+ - ns2.pdns
+
+ npm-pdns-db:
+ image: mariadb:10.7.1
+ container_name: "npm.pdns.db"
environment:
- MYSQL_ROOT_PASSWORD: "npm"
- MYSQL_DATABASE: "npm"
- MYSQL_USER: "npm"
- MYSQL_PASSWORD: "npm"
+ MYSQL_ROOT_PASSWORD: 'pdns'
+ MYSQL_DATABASE: 'pdns'
+ MYSQL_USER: 'pdns'
+ MYSQL_PASSWORD: 'pdns'
+ volumes:
+ - npm_pdns_mysql_vol:/var/lib/mysql
+ - /etc/localtime:/etc/localtime:ro
+ - './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
+
+ npm-dnsrouter:
+ image: jc21/dnsrouter
+ container_name: "npm.dnsrouter"
volumes:
- - db_data:/var/lib/mysql
+ - ./dev/dnsrouter-config.json.tmp:/dnsrouter-config.json:ro
volumes:
- npm_data:
- name: npm_core_data
- le_data:
- name: npm_le_data
- db_data:
- name: npm_db_data
-
-networks:
- nginx_proxy_manager:
- name: npm_network
+ npm_data_vol:
+ npm_pdns_mysql_vol:
diff --git a/docker/rootfs/bin/check-health b/docker/rootfs/bin/check-health
deleted file mode 100755
index bcf5552b3..000000000
--- a/docker/rootfs/bin/check-health
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-OK=$(curl --silent http://127.0.0.1:81/api/ | jq --raw-output '.status')
-
-if [ "$OK" == "OK" ]; then
- echo "OK"
- exit 0
-else
- echo "NOT OK"
- exit 1
-fi
diff --git a/docker/rootfs/bin/handle-ipv6-setting b/docker/rootfs/bin/handle-ipv6-setting
deleted file mode 100755
index 2aa0e41a9..000000000
--- a/docker/rootfs/bin/handle-ipv6-setting
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-# This command reads the `DISABLE_IPV6` env var and will either enable
-# or disable ipv6 in all nginx configs based on this setting.
-
-# Lowercase
-DISABLE_IPV6=$(echo "${DISABLE_IPV6:-}" | tr '[:upper:]' '[:lower:]')
-
-CYAN='\E[1;36m'
-BLUE='\E[1;34m'
-YELLOW='\E[1;33m'
-RED='\E[1;31m'
-RESET='\E[0m'
-
-FOLDER=$1
-if [ "$FOLDER" == "" ]; then
- echo -e "${RED}❯ $0 requires a absolute folder path as the first argument!${RESET}"
- echo -e "${YELLOW} ie: $0 /data/nginx${RESET}"
- exit 1
-fi
-
-FILES=$(find "$FOLDER" -type f -name "*.conf")
-if [ "$DISABLE_IPV6" == "true" ] || [ "$DISABLE_IPV6" == "on" ] || [ "$DISABLE_IPV6" == "1" ] || [ "$DISABLE_IPV6" == "yes" ]; then
- # IPV6 is disabled
- echo "Disabling IPV6 in hosts"
- echo -e "${BLUE}❯ ${CYAN}Disabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
-
- # Iterate over configs and run the regex
- for FILE in $FILES
- do
- echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
- sed -E -i 's/^([^#]*)listen \[::\]/\1#listen [::]/g' "$FILE"
- done
-
-else
- # IPV6 is enabled
- echo -e "${BLUE}❯ ${CYAN}Enabling IPV6 in hosts: ${YELLOW}${FOLDER}${RESET}"
-
- # Iterate over configs and run the regex
- for FILE in $FILES
- do
- echo -e " ${BLUE}❯ ${YELLOW}${FILE}${RESET}"
- sed -E -i 's/^(\s*)#listen \[::\]/\1listen [::]/g' "$FILE"
- done
-
-fi
diff --git a/docker/rootfs/etc/cont-finish.d/.gitignore b/docker/rootfs/etc/cont-finish.d/.gitignore
deleted file mode 100644
index d6b7ef32c..000000000
--- a/docker/rootfs/etc/cont-finish.d/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*
-!.gitignore
diff --git a/docker/rootfs/etc/cont-init.d/.gitignore b/docker/rootfs/etc/cont-init.d/.gitignore
deleted file mode 100644
index f04f0f6e0..000000000
--- a/docker/rootfs/etc/cont-init.d/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-*
-!.gitignore
-!*.sh
diff --git a/docker/rootfs/etc/cont-init.d/01_perms.sh b/docker/rootfs/etc/cont-init.d/01_perms.sh
deleted file mode 100755
index e7875d329..000000000
--- a/docker/rootfs/etc/cont-init.d/01_perms.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/with-contenv bash
-set -e
-
-mkdir -p /data/logs
-echo "Changing ownership of /data/logs to $(id -u):$(id -g)"
-chown -R "$(id -u):$(id -g)" /data/logs
-
diff --git a/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh b/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh
deleted file mode 100644
index f145807ab..000000000
--- a/docker/rootfs/etc/cont-init.d/01_s6-secret-init.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/with-contenv bash
-# ref: https://github.com/linuxserver/docker-baseimage-alpine/blob/master/root/etc/cont-init.d/01-envfile
-
-# in s6, environmental variables are written as text files for s6 to monitor
-# seach through full-path filenames for files ending in "__FILE"
-for FILENAME in $(find /var/run/s6/container_environment/ | grep "__FILE$"); do
- echo "[secret-init] Evaluating ${FILENAME##*/} ..."
-
- # set SECRETFILE to the contents of the full-path textfile
- SECRETFILE=$(cat ${FILENAME})
- # SECRETFILE=${FILENAME}
- # echo "[secret-init] Set SECRETFILE to ${SECRETFILE}" # DEBUG - rm for prod!
-
- # if SECRETFILE exists / is not null
- if [[ -f ${SECRETFILE} ]]; then
- # strip the appended "__FILE" from environmental variable name ...
- STRIPFILE=$(echo ${FILENAME} | sed "s/__FILE//g")
- # echo "[secret-init] Set STRIPFILE to ${STRIPFILE}" # DEBUG - rm for prod!
-
- # ... and set value to contents of secretfile
- # since s6 uses text files, this is effectively "export ..."
- printf $(cat ${SECRETFILE}) > ${STRIPFILE}
- # echo "[secret-init] Set ${STRIPFILE##*/} to $(cat ${STRIPFILE})" # DEBUG - rm for prod!"
- echo "[secret-init] Success! ${STRIPFILE##*/} set from ${FILENAME##*/}"
-
- else
- echo "[secret-init] cannot find secret in ${FILENAME}"
- fi
-done
diff --git a/docker/rootfs/etc/fix-attrs.d/.gitignore b/docker/rootfs/etc/fix-attrs.d/.gitignore
deleted file mode 100644
index d6b7ef32c..000000000
--- a/docker/rootfs/etc/fix-attrs.d/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*
-!.gitignore
diff --git a/docker/rootfs/etc/letsencrypt.ini b/docker/rootfs/etc/letsencrypt.ini
deleted file mode 100644
index aae53b902..000000000
--- a/docker/rootfs/etc/letsencrypt.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-text = True
-non-interactive = True
-webroot-path = /data/letsencrypt-acme-challenge
-key-type = ecdsa
-elliptic-curve = secp384r1
-preferred-chain = ISRG Root X1
diff --git a/docker/rootfs/etc/nginx/conf.d/default.conf b/docker/rootfs/etc/nginx/conf.d/default.conf
index 37d316db5..4d7007339 100644
--- a/docker/rootfs/etc/nginx/conf.d/default.conf
+++ b/docker/rootfs/etc/nginx/conf.d/default.conf
@@ -1,18 +1,10 @@
# "You are not configured" page, which is the default if another default doesn't exist
server {
- listen 80;
- listen [::]:80;
-
- set $forward_scheme "http";
- set $server "127.0.0.1";
- set $port "80";
-
- server_name localhost-nginx-proxy-manager;
- access_log /data/logs/fallback_access.log standard;
- error_log /data/logs/fallback_error.log warn;
- include conf.d/include/assets.conf;
+ listen 80 default;
+ server_name localhost;
+ include conf.d/include/acme-challenge.conf;
include conf.d/include/block-exploits.conf;
- include conf.d/include/letsencrypt-acme-challenge.conf;
+ access_log /data/logs/default.log proxy;
location / {
index index.html;
@@ -22,19 +14,11 @@ server {
# First 443 Host, which is the default if another default doesn't exist
server {
- listen 443 ssl;
- listen [::]:443 ssl;
-
- set $forward_scheme "https";
- set $server "127.0.0.1";
- set $port "443";
-
+ listen 443 ssl default;
server_name localhost;
- access_log /data/logs/fallback_access.log standard;
- error_log /dev/null crit;
- ssl_certificate /data/nginx/dummycert.pem;
- ssl_certificate_key /data/nginx/dummykey.pem;
include conf.d/include/ssl-ciphers.conf;
-
+ include conf.d/include/block-exploits.conf;
+ access_log /data/logs/default.log proxy;
+ ssl_reject_handshake on;
return 444;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/dev.conf b/docker/rootfs/etc/nginx/conf.d/dev.conf
index edbdec8ac..34d0340da 100644
--- a/docker/rootfs/etc/nginx/conf.d/dev.conf
+++ b/docker/rootfs/etc/nginx/conf.d/dev.conf
@@ -1,29 +1,62 @@
server {
listen 81 default;
- listen [::]:81 default;
-
server_name nginxproxymanager-dev;
- root /app/frontend/dist;
- access_log /dev/null;
location /api {
return 302 /api/;
}
+ root /app/backend;
+ location /api/coverage {
+ try_files /index.html /coverage.html;
+ }
+
+ # go server
location /api/ {
+ add_header X-Served-By $host;
+ chunked_transfer_encoding off;
+ proxy_buffering off;
+ proxy_cache off;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header Connection '';
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Accel-Buffering no;
+ proxy_pass http://127.0.0.1:3000/api/;
+ }
+
+ # go server
+ location /oauth/ {
+ add_header X-Served-By $host;
+ chunked_transfer_encoding off;
+ proxy_buffering off;
+ proxy_cache off;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header Connection '';
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Accel-Buffering no;
+ proxy_pass http://127.0.0.1:3000/oauth/;
+ }
+
+ location ~ .html {
+ try_files $uri =404;
+ }
+
+ # vite dev server
+ location / {
add_header X-Served-By $host;
+ proxy_http_version 1.1;
proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "Upgrade";
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
- proxy_pass http://127.0.0.1:3000/;
-
- proxy_read_timeout 15m;
- proxy_send_timeout 15m;
- }
-
- location / {
- index index.html;
- try_files $uri $uri.html $uri/ /index.html;
+ proxy_pass http://127.0.0.1:5173;
}
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/.gitignore b/docker/rootfs/etc/nginx/conf.d/include/.gitignore
deleted file mode 100644
index 5291fe15e..000000000
--- a/docker/rootfs/etc/nginx/conf.d/include/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-resolvers.conf
diff --git a/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf b/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf
new file mode 100644
index 000000000..db408c777
--- /dev/null
+++ b/docker/rootfs/etc/nginx/conf.d/include/acme-challenge.conf
@@ -0,0 +1,17 @@
+# Rule for legitimate ACME Challenge requests (like /.well-known/acme-challenge/xxxxxxxxx)
+# We use ^~ here, so that we don't check other regexes (for speed-up). We actually MUST cancel
+# other regex checks, because in our other config files have regex rule that denies access to files with dotted names.
+location ^~ /.well-known/acme-challenge/ {
+ auth_basic off;
+ auth_request off;
+ allow all;
+ default_type "text/plain";
+ root "/data/.acme.sh/.well-known";
+}
+
+# Hide /acme-challenge subdirectory and return 404 on all requests.
+# It is somewhat more secure than letting Nginx return 403.
+# Ending slash is important!
+location = /.well-known/acme-challenge/ {
+ return 404;
+}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/assets.conf b/docker/rootfs/etc/nginx/conf.d/include/assets.conf
index e95c2e8b7..7dd0f5cea 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/assets.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/assets.conf
@@ -1,31 +1,31 @@
location ~* ^.*\.(css|js|jpe?g|gif|png|woff|eot|ttf|svg|ico|css\.map|js\.map)$ {
- if_modified_since off;
+ if_modified_since off;
- # use the public cache
- proxy_cache public-cache;
- proxy_cache_key $host$request_uri;
+ # use the public cache
+ proxy_cache public-cache;
+ proxy_cache_key $host$request_uri;
- # ignore these headers for media
- proxy_ignore_headers Set-Cookie Cache-Control Expires X-Accel-Expires;
+ # ignore these headers for media
+ proxy_ignore_headers Set-Cookie Cache-Control Expires X-Accel-Expires;
- # cache 200s and also 404s (not ideal but there are a few 404 images for some reason)
- proxy_cache_valid any 30m;
- proxy_cache_valid 404 1m;
+ # cache 200s and also 404s (not ideal but there are a few 404 images for some reason)
+ proxy_cache_valid any 30m;
+ proxy_cache_valid 404 1m;
- # strip this header to avoid If-Modified-Since requests
- proxy_hide_header Last-Modified;
- proxy_hide_header Cache-Control;
- proxy_hide_header Vary;
+ # strip this header to avoid If-Modified-Since requests
+ proxy_hide_header Last-Modified;
+ proxy_hide_header Cache-Control;
+ proxy_hide_header Vary;
- proxy_cache_bypass 0;
- proxy_no_cache 0;
+ proxy_cache_bypass 0;
+ proxy_no_cache 0;
- proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504 http_404;
- proxy_connect_timeout 5s;
- proxy_read_timeout 45s;
+ proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504 http_404;
+ proxy_connect_timeout 5s;
+ proxy_read_timeout 45s;
- expires @30m;
- access_log off;
+ expires @30m;
+ access_log off;
- include conf.d/include/proxy.conf;
+ include conf.d/include/proxy.conf;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf b/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
index 093bda235..22360fc1a 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/block-exploits.conf
@@ -2,92 +2,92 @@
set $block_sql_injections 0;
if ($query_string ~ "union.*select.*\(") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($query_string ~ "union.*all.*select.*") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($query_string ~ "concat.*\(") {
- set $block_sql_injections 1;
+ set $block_sql_injections 1;
}
if ($block_sql_injections = 1) {
- return 403;
+ return 403;
}
## Block file injections
set $block_file_injections 0;
if ($query_string ~ "[a-zA-Z0-9_]=http://") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($query_string ~ "[a-zA-Z0-9_]=(\.\.//?)+") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($query_string ~ "[a-zA-Z0-9_]=/([a-z0-9_.]//?)+") {
- set $block_file_injections 1;
+ set $block_file_injections 1;
}
if ($block_file_injections = 1) {
- return 403;
+ return 403;
}
## Block common exploits
set $block_common_exploits 0;
if ($query_string ~ "(<|%3C).*script.*(>|%3E)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "GLOBALS(=|\[|\%[0-9A-Z]{0,2})") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "_REQUEST(=|\[|\%[0-9A-Z]{0,2})") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "proc/self/environ") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "mosConfig_[a-zA-Z_]{1,21}(=|\%3D)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($query_string ~ "base64_(en|de)code\(.*\)") {
- set $block_common_exploits 1;
+ set $block_common_exploits 1;
}
if ($block_common_exploits = 1) {
- return 403;
+ return 403;
}
## Block spam
set $block_spam 0;
if ($query_string ~ "\b(ultram|unicauca|valium|viagra|vicodin|xanax|ypxaieo)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(erections|hoodia|huronriveracres|impotence|levitra|libido)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(ambien|blue\spill|cialis|cocaine|ejaculation|erectile)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($query_string ~ "\b(lipitor|phentermin|pro[sz]ac|sandyauer|tramadol|troyhamby)\b") {
- set $block_spam 1;
+ set $block_spam 1;
}
if ($block_spam = 1) {
- return 403;
+ return 403;
}
## Block user agents
@@ -95,42 +95,42 @@ set $block_user_agents 0;
# Disable Akeeba Remote Control 2.5 and earlier
if ($http_user_agent ~ "Indy Library") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
# Common bandwidth hoggers and hacking tools.
if ($http_user_agent ~ "libwww-perl") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GetRight") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GetWeb!") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Go!Zilla") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Download Demon") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "Go-Ahead-Got-It") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "TurnitinBot") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($http_user_agent ~ "GrabNet") {
- set $block_user_agents 1;
+ set $block_user_agents 1;
}
if ($block_user_agents = 1) {
- return 403;
+ return 403;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf b/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
index 15f0d2856..5fd4810f8 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/force-ssl.conf
@@ -1,3 +1,3 @@
if ($scheme = "http") {
- return 301 https://$host$request_uri;
+ return 301 https://$host$request_uri;
}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf b/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf
deleted file mode 100644
index 342493254..000000000
--- a/docker/rootfs/etc/nginx/conf.d/include/ip_ranges.conf
+++ /dev/null
@@ -1,2 +0,0 @@
-# This should be left blank is it is populated programatically
-# by the application backend.
diff --git a/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf b/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf
deleted file mode 100644
index ff2a78274..000000000
--- a/docker/rootfs/etc/nginx/conf.d/include/letsencrypt-acme-challenge.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Rule for legitimate ACME Challenge requests (like /.well-known/acme-challenge/xxxxxxxxx)
-# We use ^~ here, so that we don't check other regexes (for speed-up). We actually MUST cancel
-# other regex checks, because in our other config files have regex rule that denies access to files with dotted names.
-location ^~ /.well-known/acme-challenge/ {
- # Since this is for letsencrypt authentication of a domain and they do not give IP ranges of their infrastructure
- # we need to open up access by turning off auth and IP ACL for this location.
- auth_basic off;
- auth_request off;
- allow all;
-
- # Set correct content type. According to this:
- # https://community.letsencrypt.org/t/using-the-webroot-domain-verification-method/1445/29
- # Current specification requires "text/plain" or no content header at all.
- # It seems that "text/plain" is a safe option.
- default_type "text/plain";
-
- # This directory must be the same as in /etc/letsencrypt/cli.ini
- # as "webroot-path" parameter. Also don't forget to set "authenticator" parameter
- # there to "webroot".
- # Do NOT use alias, use root! Target directory is located here:
- # /var/www/common/letsencrypt/.well-known/acme-challenge/
- root /data/letsencrypt-acme-challenge;
-}
-
-# Hide /acme-challenge subdirectory and return 404 on all requests.
-# It is somewhat more secure than letting Nginx return 403.
-# Ending slash is important!
-location = /.well-known/acme-challenge/ {
- return 404;
-}
diff --git a/docker/rootfs/etc/nginx/conf.d/include/proxy.conf b/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
index fcaaf0038..b84a45135 100644
--- a/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
+++ b/docker/rootfs/etc/nginx/conf.d/include/proxy.conf
@@ -3,6 +3,4 @@ proxy_set_header Host $host;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $remote_addr;
-proxy_set_header X-Real-IP $remote_addr;
-proxy_pass $forward_scheme://$server:$port$request_uri;
-
+proxy_pass $forward_scheme://$server:$port;
diff --git a/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf b/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf
new file mode 100644
index 000000000..ccd9dcef9
--- /dev/null
+++ b/docker/rootfs/etc/nginx/conf.d/include/resolvers.conf
@@ -0,0 +1 @@
+# Intentionally blank
diff --git a/docker/rootfs/etc/nginx/conf.d/production.conf b/docker/rootfs/etc/nginx/conf.d/production.conf
index 877e51dda..2881604f9 100644
--- a/docker/rootfs/etc/nginx/conf.d/production.conf
+++ b/docker/rootfs/etc/nginx/conf.d/production.conf
@@ -1,33 +1,20 @@
# Admin Interface
server {
listen 81 default;
- listen [::]:81 default;
-
server_name nginxproxymanager;
- root /app/frontend;
- access_log /dev/null;
-
- location /api {
- return 302 /api/;
- }
-
- location /api/ {
- add_header X-Served-By $host;
- proxy_set_header Host $host;
- proxy_set_header X-Forwarded-Scheme $scheme;
- proxy_set_header X-Forwarded-Proto $scheme;
- proxy_set_header X-Forwarded-For $remote_addr;
- proxy_pass http://127.0.0.1:3000/;
-
- proxy_read_timeout 15m;
- proxy_send_timeout 15m;
- }
location / {
- index index.html;
- if ($request_uri ~ ^/(.*)\.html$) {
- return 302 /$1;
- }
- try_files $uri $uri.html $uri/ /index.html;
+ add_header X-Served-By $host;
+ chunked_transfer_encoding off;
+ proxy_buffering off;
+ proxy_cache off;
+ proxy_http_version 1.1;
+ proxy_set_header Host $host;
+ proxy_set_header Connection '';
+ proxy_set_header X-Forwarded-Scheme $scheme;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_set_header X-Forwarded-For $remote_addr;
+ proxy_set_header X-Accel-Buffering no;
+ proxy_pass http://localhost:3000/;
}
}
diff --git a/docker/rootfs/etc/nginx/nginx.conf b/docker/rootfs/etc/nginx/nginx.conf
index 4d5ee9017..3cafede3b 100644
--- a/docker/rootfs/etc/nginx/nginx.conf
+++ b/docker/rootfs/etc/nginx/nginx.conf
@@ -1,7 +1,7 @@
# run nginx in foreground
daemon off;
-
-user root;
+pid /run/nginx/nginx.pid;
+user npm;
# Set number of worker processes automatically based on number of CPU cores.
worker_processes auto;
@@ -26,15 +26,12 @@ http {
tcp_nopush on;
tcp_nodelay on;
client_body_temp_path /tmp/nginx/body 1 2;
- keepalive_timeout 90s;
- proxy_connect_timeout 90s;
- proxy_send_timeout 90s;
- proxy_read_timeout 90s;
+ keepalive_timeout 65;
ssl_prefer_server_ciphers on;
gzip on;
proxy_ignore_client_abort off;
- client_max_body_size 2000m;
- server_names_hash_bucket_size 1024;
+ client_max_body_size 200m;
+ server_names_hash_bucket_size 64;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@@ -43,7 +40,7 @@ http {
proxy_cache_path /var/lib/nginx/cache/public levels=1:2 keys_zone=public-cache:30m max_size=192m;
proxy_cache_path /var/lib/nginx/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
- log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
+ log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
access_log /data/logs/fallback_access.log proxy;
@@ -57,13 +54,13 @@ http {
}
# Real IP Determination
-
+
# Local subnets:
set_real_ip_from 10.0.0.0/8;
set_real_ip_from 172.16.0.0/12; # Includes Docker subnet
set_real_ip_from 192.168.0.0/16;
# NPM generated CDN ip ranges:
- include conf.d/include/ip_ranges.conf;
+ include conf.d/include/ipranges.conf;
# always put the following 2 lines after ip subnets:
real_ip_header X-Real-IP;
real_ip_recursive on;
@@ -74,10 +71,9 @@ http {
# Files generated by NPM
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
- include /data/nginx/proxy_host/*.conf;
- include /data/nginx/redirection_host/*.conf;
- include /data/nginx/dead_host/*.conf;
- include /data/nginx/temp/*.conf;
+ include /data/nginx/upstreams/*.conf;
+ include /data/nginx/hosts/*.conf;
+ include /data/nginx/streams/*.conf;
# Custom
include /data/nginx/custom/http[.]conf;
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/dependencies.d/prepare b/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/dependencies.d/prepare
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/run b/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/run
new file mode 100755
index 000000000..c90b7219b
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/run
@@ -0,0 +1,26 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+. /bin/common.sh
+
+if [ "$(is_true "$DEBUG")" = '1' ]; then
+ set -x
+fi
+
+log_info 'Starting backend ...'
+
+if [ "$(is_true "$DEVELOPMENT")" = '1' ]; then
+ HOME=$NPMHOME
+ GOPATH="$HOME/go"
+ mkdir -p "$GOPATH"
+ chown -R "$PUID:$PGID" "$GOPATH"
+ export HOME GOPATH
+ rm -rf /app/backend/.task
+ cd /app/backend || exit 1
+ exec s6-setuidgid "$PUID:$PGID" task -w
+else
+ cd /app/bin || exit 1
+ exec s6-setuidgid "$PUID:$PGID" /app/bin/server
+fi
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/type b/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/type
new file mode 100644
index 000000000..5883cff0c
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/backend/type
@@ -0,0 +1 @@
+longrun
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/dependencies.d/prepare b/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/dependencies.d/prepare
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/run b/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/run
new file mode 100755
index 000000000..adf64ce27
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/run
@@ -0,0 +1,11 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+. /bin/common.sh
+
+if [ "$(is_true "$DEBUG")" = '1' ]; then
+ set -x
+fi
+
+log_info 'Starting fail2ban ...'
+exec /usr/bin/fail2ban-client -c /fail2ban -x -vv -f start
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/type b/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/type
new file mode 100644
index 000000000..5883cff0c
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/fail2ban/type
@@ -0,0 +1 @@
+longrun
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/dependencies.d/prepare b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/dependencies.d/prepare
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
new file mode 100755
index 000000000..4f92a9e9f
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/run
@@ -0,0 +1,26 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+. /bin/common.sh
+
+if [ "$(is_true "$DEBUG")" = '1' ]; then
+ set -x
+fi
+
+# This service is DEVELOPMENT only.
+if [ "$(is_true "$DEVELOPMENT")" = '1' ]; then
+ CI=true
+ HOME=$NPMHOME
+ export CI
+ export HOME
+
+ cd /app/frontend || exit 1
+
+ log_info 'Starting frontend ...'
+ s6-setuidgid "$PUID:$PGID" yarn install
+ exec s6-setuidgid "$PUID:$PGID" yarn dev
+else
+ exit 0
+fi
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/type b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/type
new file mode 100644
index 000000000..5883cff0c
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/frontend/type
@@ -0,0 +1 @@
+longrun
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/prepare b/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/dependencies.d/prepare
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/run b/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
new file mode 100755
index 000000000..1f81e4cbb
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/run
@@ -0,0 +1,13 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+. /bin/common.sh
+
+if [ "$(is_true "$DEBUG")" = '1' ]; then
+ set -x
+fi
+
+log_info 'Starting nginx ...'
+exec s6-setuidgid "$PUID:$PGID" nginx
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/type b/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/type
new file mode 100644
index 000000000..5883cff0c
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/nginx/type
@@ -0,0 +1 @@
+longrun
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/00-all.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/00-all.sh
new file mode 100755
index 000000000..0ce831d0e
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/00-all.sh
@@ -0,0 +1,22 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+. /bin/common.sh
+
+if [ "$(id -u)" != "0" ]; then
+ log_fatal "This docker container must be run as root, do not specify a user.\nYou can specify PUID and PGID env vars to run processes as that user and group after initialization."
+fi
+
+if [ "$(is_true "$DEBUG")" = '1' ]; then
+ set -x
+fi
+
+. /etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
+. /etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
+. /etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
+. /etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
+. /etc/s6-overlay/s6-rc.d/prepare/50-ipv46.sh
+. /etc/s6-overlay/s6-rc.d/prepare/60-fail2ban.sh
+. /etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
new file mode 100755
index 000000000..ea1001938
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/10-usergroup.sh
@@ -0,0 +1,40 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+log_info "Configuring $NPMUSER user ..."
+
+if id -u "$NPMUSER" 2>/dev/null; then
+ # user already exists
+ usermod -u "$PUID" "$NPMUSER"
+else
+ # Add user
+ useradd -o -u "$PUID" -U -d "$NPMHOME" -s /bin/false "$NPMUSER"
+fi
+
+log_info "Configuring $NPMGROUP group ..."
+if [ "$(get_group_id "$NPMGROUP")" = '' ]; then
+ # Add group. This will not set the id properly if it's already taken
+ groupadd -f -g "$PGID" "$NPMGROUP"
+else
+ groupmod -o -g "$PGID" "$NPMGROUP"
+fi
+
+# Set the group ID and check it
+groupmod -o -g "$PGID" "$NPMGROUP"
+if [ "$(get_group_id "$NPMGROUP")" != "$PGID" ]; then
+ echo "ERROR: Unable to set group id properly"
+ exit 1
+fi
+
+# Set the group against the user and check it
+usermod -G "$PGID" "$NPMGROUP"
+if [ "$(id -g "$NPMUSER")" != "$PGID" ] ; then
+ echo "ERROR: Unable to set group against the user properly"
+ exit 1
+fi
+
+# Home for user
+mkdir -p "$NPMHOME"
+chown -R "$PUID:$PGID" "$NPMHOME"
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/20-paths.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
new file mode 100755
index 000000000..0be2af72f
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/20-paths.sh
@@ -0,0 +1,27 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+log_info 'Checking paths ...'
+
+# Ensure /data is mounted
+if [ ! -d '/data' ]; then
+ log_fatal '/data is not mounted! Check your docker configuration.'
+fi
+
+# Create required folders
+mkdir -p \
+ /data/logs \
+ /data/nginx \
+ /run/nginx \
+ /tmp/nginx/body \
+ /var/log/nginx \
+ /var/lib/nginx/cache/public \
+ /var/lib/nginx/cache/private \
+ /var/cache/nginx/proxy_temp
+
+touch /var/log/nginx/error.log || true
+chmod 777 /var/log/nginx/error.log || true
+chmod -R 777 /var/cache/nginx || true
+chmod 644 /etc/logrotate.d/nginx-proxy-manager
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
new file mode 100755
index 000000000..4a02b5ff0
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/30-ownership.sh
@@ -0,0 +1,23 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+log_info 'Setting ownership ...'
+
+# root
+chown root /tmp/nginx
+
+# npm user and group
+chown -R "$PUID:$PGID" /data
+chown -R "$PUID:$PGID" /run/nginx
+chown -R "$PUID:$PGID" /tmp/nginx
+chown -R "$PUID:$PGID" /var/cache/nginx
+chown -R "$PUID:$PGID" /var/lib/logrotate
+chown -R "$PUID:$PGID" /var/lib/nginx
+chown -R "$PUID:$PGID" /var/log/nginx
+
+# Don't chown entire /etc/nginx folder as this causes crashes on some systems
+chown -R "$PUID:$PGID" /etc/nginx/nginx
+chown -R "$PUID:$PGID" /etc/nginx/nginx.conf
+chown -R "$PUID:$PGID" /etc/nginx/conf.d
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
new file mode 100755
index 000000000..5a8741355
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/40-dynamic.sh
@@ -0,0 +1,34 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+log_info 'Dynamic resolvers ...'
+
+# Dynamically generate resolvers file, if resolver is IPv6, enclose in `[]`
+# thanks @tfmm
+if [ "$(is_true "$NPM_DISABLE_IPV6")" = '1' ]; then
+ echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) ipv6=off valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
+else
+ echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
+fi
+
+# Fire off acme.sh wrapper script to "install" itself if required
+acme.sh -h > /dev/null 2>&1
+
+# Generate IP Ranges from online CDN services
+# continue on error, as this could be due to network errors
+# and can be attempted again with a docker restart
+rm -rf /etc/nginx/conf.d/include/ipranges.conf
+set +e
+RC=0
+if [ "$(is_true "$DEVELOPMENT")" = '1' ]; then
+ echo '# ignored in development mode' > /etc/nginx/conf.d/include/ipranges.conf
+else
+ /app/bin/ipranges > /etc/nginx/conf.d/include/ipranges.conf
+ RC=$?
+fi
+if [ "$RC" != '0' ]; then
+ log_warn 'Generation of IP Ranges file has an error. Check output of /etc/nginx/conf.d/include/ipranges.conf for more information.'
+fi
+set -e
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/50-ipv46.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/50-ipv46.sh
new file mode 100755
index 000000000..0199e4e59
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/50-ipv46.sh
@@ -0,0 +1,58 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+# This command reads the `NPM_DISABLE_IPV4` and `NPM_DISABLE_IPV6`` env vars and will either enable
+# or disable ipv6 in all nginx configs based on this setting.
+
+set -e
+
+log_info 'IPv4/IPv6 ...'
+
+DIS_4=$(is_true "$NPM_DISABLE_IPV4")
+DIS_6=$(is_true "$NPM_DISABLE_IPV6")
+
+# Ensure someone didn't misconfigure the settings
+if [ "$DIS_4" = "1" ] && [ "$DIS_6" = "1" ]; then
+ log_fatal 'NPM_DISABLE_IPV4 and NPM_DISABLE_IPV6 cannot both be set!'
+fi
+
+process_folder () {
+ FILES=$(find "$1" -type f -name "*.conf")
+ SED_REGEX=
+
+ # IPV4 ...
+ if [ "$DIS_4" = "1" ]; then
+ echo "Disabling IPV4 in hosts in: $1"
+ SED_REGEX='s/^([^#]*)listen ([0-9]+)/\1#listen \2/g'
+ else
+ echo "Enabling IPV4 in hosts in: $1"
+ SED_REGEX='s/^(\s*)#listen ([0-9]+)/\1listen \2/g'
+ fi
+
+ for FILE in $FILES
+ do
+ echo " - ${FILE}"
+ echo "$(sed -E "$SED_REGEX" "$FILE")" > $FILE
+ done
+
+ # IPV6 ...
+ if [ "$DIS_6" = "1" ]; then
+ echo "Disabling IPV6 in hosts in: $1"
+ SED_REGEX='s/^([^#]*)listen \[::\]/\1#listen [::]/g'
+ else
+ echo "Enabling IPV6 in hosts in: $1"
+ SED_REGEX='s/^(\s*)#listen \[::\]/\1listen [::]/g'
+ fi
+
+ for FILE in $FILES
+ do
+ echo " - ${FILE}"
+ echo "$(sed -E "$SED_REGEX" "$FILE")" > $FILE
+ done
+
+ # ensure the files are still owned by the npm user
+ chown -R "$PUID:$PGID" "$1"
+}
+
+process_folder /etc/nginx/conf.d
+process_folder /data/nginx
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/60-fail2ban.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/60-fail2ban.sh
new file mode 100755
index 000000000..70071f11b
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/60-fail2ban.sh
@@ -0,0 +1,13 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+
+log_info 'Fail2ban configuration ...'
+
+mkdir -p /fail2ban/{action.d,filter.d,jail.d,log}
+chown -R "$PUID:$PGID" /fail2ban
+mkdir -p /var/run/fail2ban
+mkdir -p /data/logs/fail2ban
+chown nobody:nogroup /data/logs/fail2ban
+chmod 02755 /data/logs/fail2ban
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/90-banner.sh b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
new file mode 100755
index 000000000..4e69c0f83
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/90-banner.sh
@@ -0,0 +1,24 @@
+#!/command/with-contenv bash
+# shellcheck shell=bash
+
+set -e
+set +x
+
+. /etc/os-release
+
+echo "
+-------------------------------------
+ _ _ ____ __ __
+| \ | | _ \| \/ |
+| \| | |_) | |\/| |
+| |\ | __/| | | |
+|_| \_|_| |_| |_|
+-------------------------------------
+Version: ${NPM_BUILD_VERSION:-3.0.0-dev} (${NPM_BUILD_COMMIT:-dev}) ${NPM_BUILD_DATE:-0000-00-00}
+User: $NPMUSER PUID:$PUID ID:$(id -u "$NPMUSER") GROUP:$(id -g "$NPMUSER")
+Group: $NPMGROUP PGID:$PGID ID:$(get_group_id "$NPMGROUP")
+OpenResty: ${OPENRESTY_VERSION:-unknown}
+Debian: ${VERSION_ID:-unknown}
+Kernel: $(uname -r)
+-------------------------------------
+"
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/dependencies.d/base b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/dependencies.d/base
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/type b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/type
new file mode 100644
index 000000000..bdd22a185
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/type
@@ -0,0 +1 @@
+oneshot
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/up b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/up
new file mode 100644
index 000000000..896a01b60
--- /dev/null
+++ b/docker/rootfs/etc/s6-overlay/s6-rc.d/prepare/up
@@ -0,0 +1,2 @@
+# shellcheck shell=bash
+/etc/s6-overlay/s6-rc.d/prepare/00-all.sh
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/backend b/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/backend
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/fail2ban b/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/fail2ban
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/frontend b/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/frontend
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/nginx b/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/nginx
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/prepare b/docker/rootfs/etc/s6-overlay/s6-rc.d/user/contents.d/prepare
new file mode 100644
index 000000000..e69de29bb
diff --git a/docker/rootfs/etc/services.d/frontend/finish b/docker/rootfs/etc/services.d/frontend/finish
deleted file mode 100755
index bca9a35db..000000000
--- a/docker/rootfs/etc/services.d/frontend/finish
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/execlineb -S1
-if { s6-test ${1} -ne 0 }
-if { s6-test ${1} -ne 256 }
-
-s6-svscanctl -t /var/run/s6/services
-
diff --git a/docker/rootfs/etc/services.d/frontend/run b/docker/rootfs/etc/services.d/frontend/run
deleted file mode 100755
index a666d53ef..000000000
--- a/docker/rootfs/etc/services.d/frontend/run
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-# This service is DEVELOPMENT only.
-
-if [ "$DEVELOPMENT" == "true" ]; then
- cd /app/frontend || exit 1
- # If yarn install fails: add --verbose --network-concurrency 1
- yarn install
- yarn watch
-else
- exit 0
-fi
diff --git a/docker/rootfs/etc/services.d/manager/finish b/docker/rootfs/etc/services.d/manager/finish
deleted file mode 100755
index 7d442d6af..000000000
--- a/docker/rootfs/etc/services.d/manager/finish
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-s6-svscanctl -t /var/run/s6/services
diff --git a/docker/rootfs/etc/services.d/manager/run b/docker/rootfs/etc/services.d/manager/run
deleted file mode 100755
index e365f4fbb..000000000
--- a/docker/rootfs/etc/services.d/manager/run
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-mkdir -p /data/letsencrypt-acme-challenge
-
-cd /app || echo
-
-if [ "$DEVELOPMENT" == "true" ]; then
- cd /app || exit 1
- # If yarn install fails: add --verbose --network-concurrency 1
- yarn install
- node --max_old_space_size=250 --abort_on_uncaught_exception node_modules/nodemon/bin/nodemon.js
-else
- cd /app || exit 1
- while :
- do
- node --abort_on_uncaught_exception --max_old_space_size=250 index.js
- sleep 1
- done
-fi
diff --git a/docker/rootfs/etc/services.d/nginx/finish b/docker/rootfs/etc/services.d/nginx/finish
deleted file mode 120000
index 63b10de42..000000000
--- a/docker/rootfs/etc/services.d/nginx/finish
+++ /dev/null
@@ -1 +0,0 @@
-/bin/true
\ No newline at end of file
diff --git a/docker/rootfs/etc/services.d/nginx/run b/docker/rootfs/etc/services.d/nginx/run
deleted file mode 100755
index 51ca5ea18..000000000
--- a/docker/rootfs/etc/services.d/nginx/run
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/with-contenv bash
-
-# Create required folders
-mkdir -p /tmp/nginx/body \
- /run/nginx \
- /var/log/nginx \
- /data/nginx \
- /data/custom_ssl \
- /data/logs \
- /data/access \
- /data/nginx/default_host \
- /data/nginx/default_www \
- /data/nginx/proxy_host \
- /data/nginx/redirection_host \
- /data/nginx/stream \
- /data/nginx/dead_host \
- /data/nginx/temp \
- /var/lib/nginx/cache/public \
- /var/lib/nginx/cache/private \
- /var/cache/nginx/proxy_temp
-
-touch /var/log/nginx/error.log && chmod 777 /var/log/nginx/error.log && chmod -R 777 /var/cache/nginx
-chown root /tmp/nginx
-
-# Dynamically generate resolvers file, if resolver is IPv6, enclose in `[]`
-# thanks @tfmm
-echo resolver "$(awk 'BEGIN{ORS=" "} $1=="nameserver" { sub(/%.*$/,"",$2); print ($2 ~ ":")? "["$2"]": $2}' /etc/resolv.conf) valid=10s;" > /etc/nginx/conf.d/include/resolvers.conf
-
-# Generate dummy self-signed certificate.
-if [ ! -f /data/nginx/dummycert.pem ] || [ ! -f /data/nginx/dummykey.pem ]
-then
- echo "Generating dummy SSL certificate..."
- openssl req \
- -new \
- -newkey rsa:2048 \
- -days 3650 \
- -nodes \
- -x509 \
- -subj '/O=localhost/OU=localhost/CN=localhost' \
- -keyout /data/nginx/dummykey.pem \
- -out /data/nginx/dummycert.pem
- echo "Complete"
-fi
-
-# Handle IPV6 settings
-/bin/handle-ipv6-setting /etc/nginx/conf.d
-/bin/handle-ipv6-setting /data/nginx
-
-exec nginx
diff --git a/docker/rootfs/fail2ban/action.d/abuseipdb.conf b/docker/rootfs/fail2ban/action.d/abuseipdb.conf
new file mode 100644
index 000000000..1702884fa
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/abuseipdb.conf
@@ -0,0 +1,105 @@
+## Version 2022/08/06
+# Fail2ban configuration file
+#
+# Action to report IP address to abuseipdb.com
+# You must sign up to obtain an API key from abuseipdb.com.
+#
+# NOTE: These reports may include sensitive Info.
+# If you want cleaner reports that ensure no user data see the helper script at the below website.
+#
+# IMPORTANT:
+#
+# Reporting an IP of abuse is a serious complaint. Make sure that it is
+# serious. Fail2ban developers and network owners recommend you only use this
+# action for:
+# * The recidive where the IP has been banned multiple times
+# * Where maxretry has been set quite high, beyond the normal user typing
+# password incorrectly.
+# * For filters that have a low likelihood of receiving human errors
+#
+# This action relies on a api_key being added to the above action conf,
+# and the appropriate categories set.
+#
+# Example, for ssh bruteforce (in section [sshd] of `jail.local`):
+# action = %(known/action)s
+# abuseipdb[abuseipdb_apikey="my-api-key", abuseipdb_category="18,22"]
+#
+# See below for categories.
+#
+# Added to fail2ban by Andrew James Collett (ajcollett)
+
+## abuseIPDB Categories, `the abuseipdb_category` MUST be set in the jail.conf action call.
+# Example, for ssh bruteforce: action = %(action_abuseipdb)s[abuseipdb_category="18,22"]
+# ID Title Description
+# 3 Fraud Orders
+# 4 DDoS Attack
+# 9 Open Proxy
+# 10 Web Spam
+# 11 Email Spam
+# 14 Port Scan
+# 18 Brute-Force
+# 19 Bad Web Bot
+# 20 Exploited Host
+# 21 Web App Attack
+# 22 SSH Secure Shell (SSH) abuse. Use this category in combination with more specific categories.
+# 23 IoT Targeted
+# See https://abuseipdb.com/categories for more descriptions
+
+[Definition]
+
+# bypass action for restored tickets
+norestored = 1
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop =
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+#
+# ** IMPORTANT! **
+#
+# By default, this posts directly to AbuseIPDB's API, unfortunately
+# this results in a lot of backslashes/escapes appearing in the
+# reports. This also may include info like your hostname.
+# If you have your own web server with PHP available, you can
+# use my (Shaun's) helper PHP script by commenting out the first #actionban
+# line below, uncommenting the second one, and pointing the URL at
+# wherever you install the helper script. For the PHP helper script, see
+#
+#
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionban = lgm=$(printf '%%.1000s\n...' ""); curl -sSf "https://api.abuseipdb.com/api/v2/report" -H "Accept: application/json" -H "Key: " --data-urlencode "comment=$lgm" --data-urlencode "ip=" --data "categories="
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban =
+
+[Init]
+# Option: abuseipdb_apikey
+# Notes Your API key from abuseipdb.com
+# Values: STRING Default: None
+# Register for abuseipdb [https://www.abuseipdb.com], get api key and set below.
+# You will need to set the category in the action call.
+abuseipdb_apikey =
diff --git a/docker/rootfs/fail2ban/action.d/apf.conf b/docker/rootfs/fail2ban/action.d/apf.conf
new file mode 100644
index 000000000..5ce02626e
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/apf.conf
@@ -0,0 +1,26 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+# https://www.rfxn.com/projects/advanced-policy-firewall/
+#
+# Note: APF doesn't play nicely with other actions. It has been observed to
+# remove bans created by other iptables based actions. If you are going to use
+# this action, use it for all of your jails.
+#
+# DON'T MIX APF and other IPTABLES based actions
+[Definition]
+
+actionstart =
+actionstop =
+actioncheck =
+actionban = apf --deny "banned by Fail2Ban "
+actionunban = apf --remove
+
+[Init]
+
+# Name used in APF configuration
+#
+name = default
+
+# DEV NOTES:
+#
+# Author: Mark McKinstry
diff --git a/docker/rootfs/fail2ban/action.d/apprise-api.conf b/docker/rootfs/fail2ban/action.d/apprise-api.conf
new file mode 100644
index 000000000..767aafc36
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/apprise-api.conf
@@ -0,0 +1,60 @@
+## Version 2022/08/06
+# Fail2Ban action configuration for apprise-api
+# Author: Roxedus https://github.com/Roxedus
+# Modified by: nemchik https://github.com/nemchik
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed once at the start of Fail2Ban.
+# Values: CMD
+#
+actionstart = curl -X POST -d '{"tag": "", "type": "info", "body": "The jail as been started successfully."}' \
+ -H "Content-Type: application/json" \
+
+
+# Option: actionstop
+# Notes.: command executed once at the end of Fail2Ban
+# Values: CMD
+#
+actionstop = curl -X POST -d '{"tag": "", "type": "info", "body": "The jail has been stopped."}' \
+ -H "Content-Type: application/json" \
+
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+
+actionban = curl -X POST -d '{"tag": "", "type": "warning", "body": "The IP has just been banned from after attempts."}' \
+ -H "Content-Type: application/json" \
+
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+
+actionunban = curl -X POST -d '{"tag": "", "type": "success", "body": "The IP has just been unbanned from ."}' \
+ -H "Content-Type: application/json" \
+
+
+[Init]
+
+proto = http
+host = apprise
+port = 8000
+key = apprise
+url = ://:/notify/
+#tag = fail2ban
+tag = all
diff --git a/docker/rootfs/fail2ban/action.d/apprise.conf b/docker/rootfs/fail2ban/action.d/apprise.conf
new file mode 100644
index 000000000..3a6bdebe4
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/apprise.conf
@@ -0,0 +1,50 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Chris Caron
+#
+#
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed once at the start of Fail2Ban.
+# Values: CMD
+#
+actionstart = printf %%b "The jail as been started successfully." | -t "[Fail2Ban] : started on `uname -n`"
+
+# Option: actionstop
+# Notes.: command executed once at the end of Fail2Ban
+# Values: CMD
+#
+actionstop = printf %%b "The jail has been stopped." | -t "[Fail2Ban] : stopped on `uname -n`"
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionban = printf %%b "The IP has just been banned by Fail2Ban after attempts against " | -n "warning" -t "[Fail2Ban] : banned from `uname -n`"
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban =
+
+[Init]
+
+# Define location of the default apprise configuration file to use
+#
+config = /etc/fail2ban/apprise.conf
+#
+apprise = apprise -c ""
diff --git a/docker/rootfs/fail2ban/action.d/blocklist_de.conf b/docker/rootfs/fail2ban/action.d/blocklist_de.conf
new file mode 100644
index 000000000..d2b0b6805
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/blocklist_de.conf
@@ -0,0 +1,85 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Steven Hiscocks
+#
+#
+
+# Action to report IP address to blocklist.de
+# Blocklist.de must be signed up to at www.blocklist.de
+# Once registered, one or more servers can be added.
+# This action requires the server 'email address' and the associated apikey.
+#
+# From blocklist.de:
+# www.blocklist.de is a free and voluntary service provided by a
+# Fraud/Abuse-specialist, whose servers are often attacked on SSH-,
+# Mail-Login-, FTP-, Webserver- and other services.
+# The mission is to report all attacks to the abuse departments of the
+# infected PCs/servers to ensure that the responsible provider can inform
+# the customer about the infection and disable them
+#
+# IMPORTANT:
+#
+# Reporting an IP of abuse is a serious complaint. Make sure that it is
+# serious. Fail2ban developers and network owners recommend you only use this
+# action for:
+# * The recidive where the IP has been banned multiple times
+# * Where maxretry has been set quite high, beyond the normal user typing
+# password incorrectly.
+# * For filters that have a low likelihood of receiving human errors
+#
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop =
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionban = curl --fail --data-urlencode "server=" --data "apikey=" --data "service=" --data "ip=" --data-urlencode "logs= " --data 'format=text' --user-agent "" "https://www.blocklist.de/en/httpreports.html"
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban =
+
+# Option: email
+# Notes server email address, as per blocklist.de account
+# Values: STRING Default: None
+#
+#email =
+
+# Option: apikey
+# Notes your user blocklist.de user account apikey
+# Values: STRING Default: None
+#
+#apikey =
+
+# Option: service
+# Notes service name you are reporting on, typically aligns with filter name
+# see http://www.blocklist.de/en/httpreports.html for full list
+# Values: STRING Default: None
+#
+#service =
diff --git a/docker/rootfs/fail2ban/action.d/bsd-ipfw.conf b/docker/rootfs/fail2ban/action.d/bsd-ipfw.conf
new file mode 100644
index 000000000..9097ed437
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/bsd-ipfw.conf
@@ -0,0 +1,95 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Nick Munger
+# Modified by: Ken Menzel
+# Daniel Black (start/stop)
+# Fabian Wenk (many ideas as per fail2ban users list)
+#
+# Ensure firewall_enable="YES" in the top of /etc/rc.conf
+#
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart = ipfw show | fgrep -c -m 1 -s 'table()' > /dev/null 2>&1 || (
+ num=$(ipfw show | awk 'BEGIN { b = } { if ($1 == b) { b = $1 + 1 } } END { print b }');
+ ipfw -q add "$num" from table\(\) to me ; echo "$num" > ""
+ )
+
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop = [ ! -f ] || ( read num < "" ipfw -q delete $num rm "" )
+
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+# requires an ipfw rule like "deny ip from table(1) to me"
+actionban = e=`ipfw table add 2>&1`; x=$?; [ $x -eq 0 -o "$e" = 'ipfw: setsockopt(IP_FW_TABLE_XADD): File exists' ] || echo "$e" | grep -q "record already exists" || { echo "$e" 1>&2; exit $x; }
+
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban = e=`ipfw table delete 2>&1`; x=$?; [ $x -eq 0 -o "$e" = 'ipfw: setsockopt(IP_FW_TABLE_XDEL): No such process' ] || echo "$e" | grep -q "record not found" || { echo "$e" 1>&2; exit $x; }
+
+[Init]
+# Option: table
+# Notes: The ipfw table to use. If a ipfw rule using this table already exists,
+# this action will not create a ipfw rule to block it and the following
+# options will have no effect.
+# Values: NUM
+table = 1
+
+# Option: port
+# Notes.: Specifies port to monitor. Blank indicate block all ports.
+# Values: [ NUM | STRING ]
+#
+port =
+
+# Option: startstatefile
+# Notes: A file to indicate that the table rule that was added. Ensure it is unique per table.
+# Values: STRING
+startstatefile = /var/run/fail2ban/ipfw-started-table_
+
+# Option: block
+# Notes: This is how much to block.
+# Can be "ip", "tcp", "udp" or various other options.
+# Values: STRING
+block = ip
+
+# Option: blocktype
+# Notes.: How to block the traffic. Use a action from man 5 ipfw
+# Common values: deny, unreach port, reset
+# ACTION defination at the top of man ipfw for allowed values.
+# Values: STRING
+#
+blocktype = unreach port
+
+# Option: lowest_rule_num
+# Notes: When fail2ban starts with action and there is no rule for the given table yet
+# then fail2ban will start looking for an empty slot starting with this rule number.
+# Values: NUM
+lowest_rule_num = 111
+
+
diff --git a/docker/rootfs/fail2ban/action.d/cloudflare-token.conf b/docker/rootfs/fail2ban/action.d/cloudflare-token.conf
new file mode 100644
index 000000000..8b83abf9a
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/cloudflare-token.conf
@@ -0,0 +1,93 @@
+## Version 2022/12/15
+#
+# Author: Logic-32
+#
+# IMPORTANT
+#
+# Please set jail.local's permission to 640 because it contains your CF API token.
+#
+# This action depends on curl.
+#
+# To get your Cloudflare API token: https://developers.cloudflare.com/api/tokens/create/
+#
+# Cloudflare Firewall API: https://developers.cloudflare.com/firewall/api/cf-firewall-rules/endpoints/
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop =
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: IP address
+# number of failures
+# unix timestamp of the ban time
+# Values: CMD
+actionban = curl -s -X POST "<_cf_api_url>" \
+ <_cf_api_prms> \
+ --data '{"mode":"","configuration":{"target":"","value":""},"notes":""}'
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: IP address
+# number of failures
+# unix timestamp of the ban time
+# Values: CMD
+#
+actionunban = id=$(curl -s -X GET "<_cf_api_url>?mode=¬es=&configuration.target=&configuration.value=" \
+ <_cf_api_prms> \
+ | awk -F"[,:}]" '{for(i=1;i<=NF;i++){if($i~/'id'\042/){print $(i+1)}}}' \
+ | tr -d ' "' \
+ | head -n 1)
+ if [ -z "$id" ]; then echo ": id for cannot be found using target "; exit 0; fi; \
+ curl -s -X DELETE "<_cf_api_url>/$id" \
+ <_cf_api_prms> \
+ --data '{"cascade": "none"}'
+
+_cf_api_url = https://api.cloudflare.com/client/v4/zones//firewall/access_rules/rules
+_cf_api_prms = -H "Authorization: Bearer " -H "Content-Type: application/json"
+
+[Init]
+
+# Declare your Cloudflare Authorization Bearer Token in the [DEFAULT] section of your jail.local file.
+
+# The Cloudflare of hte domain you want to manage.
+#
+# cfzone =
+
+# Your personal Cloudflare token. Ideally restricted to just have "Zone.Firewall Services" permissions.
+#
+# cftoken =
+
+# Target of the firewall rule. Default is "ip" (v4).
+#
+cftarget = ip
+
+# The firewall mode Cloudflare should use. Default is "block" (deny access).
+# Consider also "js_challenge" or other "allowed_modes" if you want.
+#
+cfmode = block
+
+# The message to include in the firewall IP banning rule.
+#
+notes = Fail2Ban
+
+[Init?family=inet6]
+cftarget = ip6
diff --git a/docker/rootfs/fail2ban/action.d/cloudflare.conf b/docker/rootfs/fail2ban/action.d/cloudflare.conf
new file mode 100644
index 000000000..e79eb751d
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/cloudflare.conf
@@ -0,0 +1,89 @@
+## Version 2022/08/06
+#
+# Author: Mike Rushton
+#
+# IMPORTANT
+#
+# Please set jail.local's permission to 640 because it contains your CF API key.
+#
+# This action depends on curl (and optionally jq).
+# Referenced from http://www.normyee.net/blog/2012/02/02/adding-cloudflare-support-to-fail2ban by NORM YEE
+#
+# To get your CloudFlare API Key: https://www.cloudflare.com/a/account/my-account
+#
+# CloudFlare API error codes: https://www.cloudflare.com/docs/host-api.html#s4.2
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop =
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: IP address
+# number of failures
+# unix timestamp of the ban time
+# Values: CMD
+#
+# API v1
+#actionban = curl -s -o /dev/null https://www.cloudflare.com/api_json.html -d 'a=ban' -d 'tkn=' -d 'email=' -d 'key='
+# API v4
+actionban = curl -s -o /dev/null -X POST <_cf_api_prms> \
+ -d '{"mode":"block","configuration":{"target":"","value":""},"notes":"Fail2Ban "}' \
+ <_cf_api_url>
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: IP address
+# number of failures
+# unix timestamp of the ban time
+# Values: CMD
+#
+# API v1
+#actionunban = curl -s -o /dev/null https://www.cloudflare.com/api_json.html -d 'a=nul' -d 'tkn=' -d 'email=' -d 'key='
+# API v4
+actionunban = id=$(curl -s -X GET <_cf_api_prms> \
+ "<_cf_api_url>?mode=block&configuration_target=&configuration_value=&page=1&per_page=1¬es=Fail2Ban%%20" \
+ | { jq -r '.result[0].id' 2>/dev/null || tr -d '\n' | sed -nE 's/^.*"result"\s*:\s*\[\s*\{\s*"id"\s*:\s*"([^"]+)".*$/\1/p'; })
+ if [ -z "$id" ]; then echo ": id for cannot be found"; exit 0; fi;
+ curl -s -o /dev/null -X DELETE <_cf_api_prms> "<_cf_api_url>/$id"
+
+_cf_api_url = https://api.cloudflare.com/client/v4/user/firewall/access_rules/rules
+_cf_api_prms = -H 'X-Auth-Email: ' -H 'X-Auth-Key: ' -H 'Content-Type: application/json'
+
+[Init]
+
+# If you like to use this action with mailing whois lines, you could use the composite action
+# action_cf_mwl predefined in jail.conf, just define in your jail:
+#
+# action = %(action_cf_mwl)s
+# # Your CF account e-mail
+# cfemail =
+# # Your CF API Key
+# cfapikey =
+
+cftoken =
+
+cfuser =
+
+cftarget = ip
+
+[Init?family=inet6]
+cftarget = ip6
diff --git a/docker/rootfs/fail2ban/action.d/complain.conf b/docker/rootfs/fail2ban/action.d/complain.conf
new file mode 100644
index 000000000..50caf8c1d
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/complain.conf
@@ -0,0 +1,122 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Russell Odom , Daniel Black
+# Sends a complaint e-mail to addresses listed in the whois record for an
+# offending IP address.
+# This uses the https://abusix.com/contactdb.html to lookup abuse contacts.
+#
+# DEPENDENCIES:
+# This requires the dig command from bind-utils
+#
+# You should provide the in the jail config - lines from the log
+# matching the given IP address will be provided in the complaint as evidence.
+#
+# WARNING
+# -------
+#
+# Please do not use this action unless you are certain that fail2ban
+# does not result in "false positives" for your deployment. False
+# positive reports could serve a mis-favor to the original cause by
+# flooding corresponding contact addresses, and complicating the work
+# of administration personnel responsible for handling (verified) legit
+# complains.
+#
+# Please consider using e.g. sendmail-whois-lines.conf action which
+# would send the reports with relevant information to you, so the
+# report could be first reviewed and then forwarded to a corresponding
+# contact if legit.
+#
+
+
+[INCLUDES]
+
+before = helpers-common.conf
+
+[Definition]
+
+# Used in test cases for coverage internal transformations
+debug = 0
+
+# bypass ban/unban for restored tickets
+norestored = 1
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop =
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionban = oifs=${IFS};
+ RESOLVER_ADDR="%(addr_resolver)s"
+ if [ "" -gt 0 ]; then echo "try to resolve $RESOLVER_ADDR"; fi
+ ADDRESSES=$(dig +short -t txt -q $RESOLVER_ADDR | tr -d '"')
+ IFS=,; ADDRESSES=$(echo $ADDRESSES)
+ IFS=${oifs}
+ IP=
+ if [ ! -z "$ADDRESSES" ]; then
+ ( printf %%b "\n"; date '+Note: Local timezone is %%z (%%Z)';
+ printf %%b "\nLines containing failures of (max )\n";
+ %(_grep_logs)s;
+ ) | "Abuse from " $ADDRESSES
+ fi
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban =
+
+# Server as resolver used in dig command
+#
+addr_resolver = abuse-contacts.abusix.org
+
+# Default message used for abuse content
+#
+message = Dear Sir/Madam,\n\nWe have detected abuse from the IP address $IP, which according to a abusix.com is on your network. We would appreciate if you would investigate and take action as appropriate.\n\nLog lines are given below, but please ask if you require any further information.\n\n(If you are not the correct person to contact about this please accept our apologies - your e-mail address was extracted from the whois record by an automated process.)\n\n This mail was generated by Fail2Ban.\nThe recipient address of this report was provided by the Abuse Contact DB by abusix.com. abusix.com does not maintain the content of the database. All information which we pass out, derives from the RIR databases and is processed for ease of use. If you want to change or report non working abuse contacts please contact the appropriate RIR. If you have any further question, contact abusix.com directly via email (info@abusix.com). Information about the Abuse Contact Database can be found here: https://abusix.com/global-reporting/abuse-contact-db\nabusix.com is neither responsible nor liable for the content or accuracy of this message.\n
+
+# Path to the log files which contain relevant lines for the abuser IP
+#
+logpath = /dev/null
+
+# Option: mailcmd
+# Notes.: Your system mail command. Is passed 2 args: subject and recipient
+# Values: CMD
+#
+mailcmd = mail -E 'set escape' -s
+
+# Option: mailargs
+# Notes.: Additional arguments to mail command. e.g. for standard Unix mail:
+# CC reports to another address:
+# -c me@example.com
+# Appear to come from a different address - the '--' indicates
+# arguments to be passed to Sendmail:
+# -- -f me@example.com
+# Values: [ STRING ]
+#
+mailargs =
+
+# Number of log lines to include in the email
+#
+#grepmax = 1000
+#grepopts = -m
diff --git a/docker/rootfs/fail2ban/action.d/discord-webhook.conf b/docker/rootfs/fail2ban/action.d/discord-webhook.conf
new file mode 100644
index 000000000..dab6086ae
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/discord-webhook.conf
@@ -0,0 +1,44 @@
+## Version 2022/08/06
+# Author: Gilbn from https://technicalramblings.com
+# Adapted Source: https://gist.github.com/sander1/075736a42db2c66bc6ce0fab159ca683
+# Create the Discord Webhook in: Server settings -> Webhooks -> Create Webhooks
+
+[Definition]
+
+# Notify on Startup
+actionstart = curl -X POST "" \
+ -H "Content-Type: application/json" \
+ -d '{"username":"", "content":":white_check_mark: The **[]** jail has started"}'
+
+# Notify on Shutdown
+actionstop = curl -X POST "" \
+ -H "Content-Type: application/json" \
+ -d '{"username":"", "content":":no_entry: The **[]** jail has been stopped"}'
+
+#
+actioncheck =
+
+# Notify on Banned
+actionban = curl -X POST "" \
+ -H "Content-Type: application/json" \
+ -d '{"username":"", "content":" :bell: **[]** :hammer:**BANNED**:hammer: IP: []() for **** seconds after **** failure(s). If you want to unban the IP run: `fail2ban-client unban `"}'
+
+# Notify on Unbanned
+actionunban = curl -X POST "" \
+ -H "Content-Type: application/json" \
+ -d '{"username":"", "content":":bell: **[]** **UNBANNED** IP: []()"}'
+[Init]
+
+# Discord Webhook URL
+webhook = https://discordapp.com/api/webhooks/XXXXXXXXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
+
+# Discord Bot Username
+botname = Fail2Ban
+
+# User ID to ping
+# ex: discord_userid = "<@!1234567890>"
+discord_userid =
+
+# URL prefix for an IP checking website
+# abuseipdb is used by default since there is also an action to report an IP to their API
+url_check_ip = https://www.abuseipdb.com/check/
diff --git a/docker/rootfs/fail2ban/action.d/dshield.conf b/docker/rootfs/fail2ban/action.d/dshield.conf
new file mode 100644
index 000000000..8e7a5a5e0
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/dshield.conf
@@ -0,0 +1,208 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Russell Odom
+# Submits attack reports to DShield (http://www.dshield.org/)
+#
+# You MUST configure at least:
+# (the port that's being attacked - use number not name).
+#
+# You SHOULD also provide:
+# (your public IP address, if it's not the address of eth0)
+# (your DShield userID, if you have one - recommended, but reports will
+# be used anonymously if not)
+# (the protocol in use - defaults to tcp)
+#
+# Best practice is to provide and in jail.conf like this:
+# action = dshield[port=1234,protocol=tcp]
+#
+# ...and create "dshield.local" with contents something like this:
+# [Init]
+# myip = 10.0.0.1
+# userid = 12345
+#
+# Other useful configuration values are (you can use for specifying
+# a different sender address for the report e-mails, which should match what is
+# configured at DShield), and // (to
+# configure how often the buffer is flushed).
+#
+
+[Definition]
+
+# bypass ban/unban for restored tickets
+norestored = 1
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart =
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop = if [ -f .buffer ]; then
+ cat .buffer | "FORMAT DSHIELD USERID TZ `date +%%z | sed 's/\([+-]..\)\(..\)/\1:\2/'` Fail2Ban"
+ date +%%s > .lastsent
+ fi
+ rm -f .buffer .first
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+# See http://www.dshield.org/specs.html for more on report format/notes
+#
+# Note: We are currently using for the timestamp because no tag is
+# available to indicate the timestamp of the log message(s) which triggered the
+# ban. Therefore the timestamps we are using in the report, whilst often only a
+# few seconds out, are incorrect. See
+# http://sourceforge.net/tracker/index.php?func=detail&aid=2017795&group_id=121032&atid=689047
+#
+actionban = TZONE=`date +%%z | sed 's/\([+-]..\)\(..\)/\1:\2/'`
+ DATETIME="`perl -e '@t=localtime();printf "%%4d-%%02d-%%02d %%02d:%%02d:%%02d",1900+$t[5],$t[4]+1,$t[3],$t[2],$t[1],$t[0]'` $TZONE"
+ PROTOCOL=`awk '{IGNORECASE=1;if($1==""){print $2;exit}}' /etc/protocols`
+ if [ -z "$PROTOCOL" ]; then PROTOCOL=; fi
+ printf %%b "$DATETIME\t\t\t\t\t\t\t$PROTOCOL\t\n" >> .buffer
+ NOW=`date +%%s`
+ if [ ! -f .first ]; then
+ echo | cut -d. -f1 > .first
+ fi
+ if [ ! -f .lastsent ]; then
+ echo 0 > .lastsent
+ fi
+ LOGAGE=$(($NOW - `cat .first`))
+ LASTREPORT=$(($NOW - `cat .lastsent`))
+ LINES=$( wc -l .buffer | awk '{ print $1 }' )
+ if [ $LINES -ge && $LASTREPORT -gt ] || [ $LOGAGE -gt ]; then
+ cat .buffer | "FORMAT DSHIELD USERID TZ $TZONE Fail2Ban"
+ rm -f .buffer .first
+ echo $NOW > .lastsent
+ fi
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban = if [ -f .first ]; then
+ NOW=`date +%%s`
+ LOGAGE=$(($NOW - `cat .first`))
+ if [ $LOGAGE -gt ]; then
+ cat .buffer | "FORMAT DSHIELD USERID TZ `date +%%z | sed 's/\([+-]..\)\(..\)/\1:\2/'` Fail2Ban"
+ rm -f .buffer .first
+ echo $NOW > .lastsent
+ fi
+ fi
+
+
+[Init]
+# Option: port
+# Notes.: The target port for the attack (numerical). MUST be provided in the
+# jail config, as it cannot be detected here.
+# Values: [ NUM ]
+#
+port = ???
+
+# Option: userid
+# Notes.: Your DShield user ID. Should be provided either in the jail config or
+# in a .local file.
+# Register at https://secure.dshield.org/register.html
+# Values: [ NUM ]
+#
+userid = 0
+
+# Option: myip
+# Notes.: The target IP for the attack (your public IP). Should be provided
+# either in the jail config or in a .local file unless your PUBLIC IP
+# is the first IP assigned to eth0
+# Values: [ an IP address ] Default: Tries to find the IP address of eth0,
+# which in most cases will be a private IP, and therefore incorrect
+#
+myip = `ip -4 addr show dev eth0 | grep inet | head -n 1 | sed -r 's/.*inet ([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}).*/\1/'`
+
+# Option: protocol
+# Notes.: The protocol over which the attack is happening
+# Values: [ tcp | udp | icmp | (any other protocol name from /etc/protocols) | NUM ] Default: tcp
+#
+protocol = tcp
+
+# Option: lines
+# Notes.: How many lines to buffer before making a report. Regardless of this,
+# reports are sent a minimum of apart, or if the
+# buffer contains an event over old, or on shutdown
+# Values: [ NUM ]
+#
+lines = 50
+
+# Option: minreportinterval
+# Notes.: Minimum period (in seconds) that must elapse before we submit another
+# batch of reports. DShield request a minimum of 1 hour (3600 secs)
+# between reports.
+# Values: [ NUM ]
+#
+minreportinterval = 3600
+
+# Option: maxbufferage
+# Notes.: Maximum age (in seconds) of the oldest report in the buffer before we
+# submit the batch, even if we haven't reached yet. Note that
+# this is only checked on each ban/unban, and that we always send
+# anything in the buffer on shutdown. Must be greater than
+# Values: [ NUM ]
+#
+maxbufferage = 21600
+
+# Option: srcport
+# Notes.: The source port of the attack. You're unlikely to have this info, so
+# you can leave the default
+# Values: [ NUM ]
+#
+srcport = ???
+
+# Option: tcpflags
+# Notes.: TCP flags on attack. You're unlikely to have this info, so you can
+# leave empty
+# Values: [ STRING ]
+#
+tcpflags =
+
+# Option: mailcmd
+# Notes.: Your system mail command. Is passed 2 args: subject and recipient
+# Values: CMD
+#
+mailcmd = mail -E 'set escape' -s
+
+# Option: mailargs
+# Notes.: Additional arguments to mail command. e.g. for standard Unix mail:
+# CC reports to another address:
+# -c me@example.com
+# Appear to come from a different address (the From address must match
+# the one configured at DShield - the '--' indicates arguments to be
+# passed to Sendmail):
+# -- -f me@example.com
+# Values: [ STRING ]
+#
+mailargs =
+
+# Option: dest
+# Notes.: Destination e-mail address for reports
+# Values: [ STRING ]
+#
+dest = reports@dshield.org
+
+# Option: tmpfile
+# Notes.: Base name of temporary files used for buffering
+# Values: [ STRING ]
+#
+tmpfile = /var/run/fail2ban/tmp-dshield
+
diff --git a/docker/rootfs/fail2ban/action.d/dummy.conf b/docker/rootfs/fail2ban/action.d/dummy.conf
new file mode 100644
index 000000000..cc3af1151
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/dummy.conf
@@ -0,0 +1,64 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Cyril Jaquier
+#
+#
+
+[Definition]
+
+# Option: actionstart
+# Notes.: command executed on demand at the first ban (or at the start of Fail2Ban if actionstart_on_demand is set to false).
+# Values: CMD
+#
+actionstart = if [ ! -z '' ]; then touch ; fi;
+ printf %%b "\n"
+ echo "%(debug)s started"
+
+# Option: actionflush
+# Notes.: command executed once to flush (clear) all IPS, by shutdown (resp. by stop of the jail or this action)
+# Values: CMD
+#
+actionflush = printf %%b "-*\n"
+ echo "%(debug)s clear all"
+
+# Option: actionstop
+# Notes.: command executed at the stop of jail (or at the end of Fail2Ban)
+# Values: CMD
+#
+actionstop = if [ ! -z '' ]; then rm -f ; fi;
+ echo "%(debug)s stopped"
+
+# Option: actioncheck
+# Notes.: command executed once before each actionban command
+# Values: CMD
+#
+actioncheck =
+
+# Option: actionban
+# Notes.: command executed when banning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionban = printf %%b "+\n"
+ echo "%(debug)s banned (family: )"
+
+# Option: actionunban
+# Notes.: command executed when unbanning an IP. Take care that the
+# command is executed with Fail2Ban user rights.
+# Tags: See jail.conf(5) man page
+# Values: CMD
+#
+actionunban = printf %%b "-\n"
+ echo "%(debug)s unbanned (family: )"
+
+
+debug = [] --
+
+[Init]
+
+init = 123
+
+target = /var/run/fail2ban/fail2ban.dummy
+to_target = >>
diff --git a/docker/rootfs/fail2ban/action.d/firewallcmd-allports.conf b/docker/rootfs/fail2ban/action.d/firewallcmd-allports.conf
new file mode 100644
index 000000000..34dfda950
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/firewallcmd-allports.conf
@@ -0,0 +1,46 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Donald Yandt
+# Because of the --remove-rules in stop this action requires firewalld-0.3.8+
+
+
+[INCLUDES]
+
+before = firewallcmd-common.conf
+
+[Definition]
+
+actionstart = firewall-cmd --direct --add-chain filter f2b-
+ firewall-cmd --direct --add-rule filter f2b- 1000 -j RETURN
+ firewall-cmd --direct --add-rule filter 0 -j f2b-
+
+actionstop = firewall-cmd --direct --remove-rule filter 0 -j f2b-
+ firewall-cmd --direct --remove-rules filter f2b-
+ firewall-cmd --direct --remove-chain filter f2b-
+
+
+# Example actioncheck: firewall-cmd --direct --get-chains ipv4 filter | sed -e 's, ,\n,g' | grep -q '^f2b-recidive$'
+
+actioncheck = firewall-cmd --direct --get-chains filter | sed -e 's, ,\n,g' | grep -q '^f2b-$'
+
+actionban = firewall-cmd --direct --add-rule filter f2b- 0 -s -j
+
+actionunban = firewall-cmd --direct --remove-rule filter f2b- 0 -s -j
+
+# DEV NOTES:
+#
+# Author: Donald Yandt
+# Uses "FirewallD" instead of the "iptables daemon".
+#
+#
+# Output:
+
+# actionstart:
+# $ firewall-cmd --direct --add-chain ipv4 filter f2b-recidive
+# success
+# $ firewall-cmd --direct --add-rule ipv4 filter f2b-recidive 1000 -j RETURN
+# success
+# $ sudo firewall-cmd --direct --add-rule ipv4 filter INPUT_direct 0 -j f2b-recidive
+# success
+
diff --git a/docker/rootfs/fail2ban/action.d/firewallcmd-common.conf b/docker/rootfs/fail2ban/action.d/firewallcmd-common.conf
new file mode 100644
index 000000000..6eacc284e
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/firewallcmd-common.conf
@@ -0,0 +1,77 @@
+## Version 2022/08/06
+# Fail2Ban configuration file
+#
+# Author: Donald Yandt
+#
+
+[Init]
+
+# Option: name
+# Notes Default name of the chain
+# Values: STRING
+name = default
+
+# Option port
+# Notes Can also use port numbers separated by a comma and in rich-rules comma and/or space.
+# Value STRING Default: 1:65535
+port = 1:65535
+
+# Option: protocol
+# Notes [ tcp | udp | icmp | all ]
+# Values: STRING Default: tcp
+protocol = tcp
+
+# Option: family(ipv4)
+# Notes specifies the socket address family type
+# Values: STRING
+family = ipv4
+
+# Option: chain
+# Notes specifies the firewalld chain to which the Fail2Ban rules should be
+# added
+# Values: STRING Default: INPUT_direct
+chain = INPUT_direct
+
+# Option: zone
+# Notes use command firewall-cmd --get-active-zones to see a list of all active zones. See firewalld man pages for more information on zones
+# Values: STRING Default: public
+zone = public
+
+# Option: service
+# Notes use command firewall-cmd --get-services to see a list of services available
+# Examples services: amanda-client amanda-k5-client bacula bacula-client dhcp dhcpv6 dhcpv6-client dns freeipa-ldap freeipa-ldaps
+# freeipa-replication ftp high-availability http https imaps ipp ipp-client ipsec iscsi-target kadmin kerberos
+# kpasswd ldap ldaps libvirt libvirt-tls mdns mosh mountd ms-wbt mysql nfs ntp openvpn pmcd pmproxy pmwebapi pmwebapis pop3s
+# postgresql privoxy proxy-dhcp puppetmaster radius rpc-bind rsyncd samba samba-client sane smtp squid ssh synergy
+# telnet tftp tftp-client tinc tor-socks transmission-client vdsm vnc-server wbem-https xmpp-bosh xmpp-client xmpp-local xmpp-server
+# Values: STRING Default: ssh
+service = ssh
+
+# Option: rejecttype (ipv4)
+# Notes See iptables/firewalld man pages for ipv4 reject types.
+# Values: STRING
+rejecttype = icmp-port-unreachable
+
+# Option: blocktype (ipv4/ipv6)
+# Notes See iptables/firewalld man pages for jump targets. Common values are REJECT,
+# REJECT --reject-with icmp-port-unreachable, DROP
+# Values: STRING
+blocktype = REJECT --reject-with
+
+# Option: rich-blocktype (ipv4/ipv6)
+# Notes See firewalld man pages for jump targets. Common values are reject,
+# reject type="icmp-port-unreachable", drop
+# Values: STRING
+rich-blocktype = reject type=''
+
+[Init?family=inet6]
+
+# Option: family(ipv6)
+# Notes specifies the socket address family type
+# Values: STRING
+family = ipv6
+
+# Option: rejecttype (ipv6)
+# Note: See iptables/firewalld man pages for ipv6 reject types.
+# Values: STRING
+rejecttype = icmp6-port-unreachable
diff --git a/docker/rootfs/fail2ban/action.d/firewallcmd-ipset.conf b/docker/rootfs/fail2ban/action.d/firewallcmd-ipset.conf
new file mode 100644
index 000000000..ebb9ef927
--- /dev/null
+++ b/docker/rootfs/fail2ban/action.d/firewallcmd-ipset.conf
@@ -0,0 +1,122 @@
+## Version 2022/08/06
+# Fail2Ban action file for firewall-cmd/ipset
+#
+# This requires:
+# ipset (package: ipset)
+# firewall-cmd (package: firewalld)
+#
+# This is for ipset protocol 6 (and hopefully later) (ipset v6.14).
+# Use ipset -V to see the protocol and version.
+#
+# IPset was a feature introduced in the linux kernel 2.6.39 and 3.0.0 kernels.
+#
+# If you are running on an older kernel you make need to patch in external
+# modules.
+
+[INCLUDES]
+
+before = firewallcmd-common.conf
+
+[Definition]
+
+actionstart = /actionstart>
+ firewall-cmd --direct --add-rule