diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 9abd7c6..5e6cec4 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -13,16 +13,14 @@
},
// run arguments passed to docker
- "runArgs": [
- "--security-opt", "label=disable"
- ],
+ "runArgs": ["--security-opt", "label=disable"],
"containerEnv": {
- // extensions to preload before other extensions
+ // extensions to preload before other extensions
"PRELOAD_EXTENSIONS": "arrterian.nix-env-selector"
},
- // disable command overriding and updating remote user ID
+ // disable command overriding and updating remote user ID
"overrideCommand": false,
"userEnvProbe": "loginShell",
"updateRemoteUserUID": false,
@@ -31,18 +29,14 @@
"onCreateCommand": "nix-shell --command 'echo done building nix dev environment'",
// Use 'forwardPorts' to make a list of ports inside the container available locally.
- "forwardPorts": [
- 3000
- ],
+ "forwardPorts": [8000],
"customizations": {
"vscode": {
- "extensions": [
- "arrterian.nix-env-selector"
- ]
+ "extensions": ["arrterian.nix-env-selector"]
}
}
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "go version",
-}
\ No newline at end of file
+}
diff --git a/.forgejo/workflows/release.yml b/.forgejo/workflows/release.yml
new file mode 100644
index 0000000..a8c4cd9
--- /dev/null
+++ b/.forgejo/workflows/release.yml
@@ -0,0 +1,43 @@
+name: Release code
+
+on:
+ push:
+ tags:
+ - "**"
+
+jobs:
+ build:
+ runs-on: docker
+ services:
+ dind:
+ image: docker:dind
+ env:
+ DOCKER_TLS_CERTDIR: ""
+
+ steps:
+ - name: Checkout code using Git
+ uses: actions/checkout@main
+
+ - name: Install Docker
+ run: |
+ apt update
+ apt-get install -y docker.io
+ docker context create forgejo --docker host=tcp://dind:2375
+ docker context use forgejo
+
+ - name: Log in to our container registry
+ uses: https://github.com/docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: imterah
+ password: ${{secrets.ACTIONS_PACKAGES_DEPL_KEY}}
+
+ - name: Build Docker image
+ run: |
+ docker build . --tag ghcr.io/imterah/hermes:$GITHUB_REF_NAME
+
+ - name: Upload Docker image
+ run: |
+ docker tag ghcr.io/imterah/hermes:$GITHUB_REF_NAME ghcr.io/imterah/hermes:latest
+ docker push ghcr.io/imterah/hermes:$GITHUB_REF_NAME
+ docker push ghcr.io/imterah/hermes:latest
diff --git a/.gitconfig b/.gitconfig
deleted file mode 100644
index 39e61fd..0000000
--- a/.gitconfig
+++ /dev/null
@@ -1,2 +0,0 @@
-[core]
- hooksPath = .githooks/
\ No newline at end of file
diff --git a/.githooks/pre-commit b/.githooks/pre-commit
deleted file mode 100755
index ec1c700..0000000
--- a/.githooks/pre-commit
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-shopt -s globstar
-set -e
-
-ROOT="$(git rev-parse --show-toplevel)"
-
-pushd $ROOT/api
-npx eslint src
-popd
-
-pushd $ROOT/lom
-npx eslint src
-popd
-
-# Formatting step
-$ROOT/api/node_modules/.bin/prettier --ignore-unknown --write $ROOT/{api,lom}/{eslint.config.js,src/**/*.ts}
-git update-index --again
-exit 0
\ No newline at end of file
diff --git a/.github/labeler.yml b/.github/labeler.yml
deleted file mode 100644
index 5991ffd..0000000
--- a/.github/labeler.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-modifies labeler:
- - .github/labeler.yml
-modifies ci:
- - .github/workflows/*.yml
-modifies docker:
- - '**/Dockerfile'
- - '**/docker-compose.yml'
- - '**/prod-docker.env'
-modifies api:
- - api/**/*
-modifies lom:
- - lom/**/*
-modifies gui:
- - gui/**/*
-modifies nix:
- - '**/*.nix'
\ No newline at end of file
diff --git a/.github/workflows/api-testing.yml b/.github/workflows/api-testing.yml
deleted file mode 100644
index 0438029..0000000
--- a/.github/workflows/api-testing.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-name: CI Testing (API)
-
-on:
- pull_request:
- paths:
- - "api/**"
- push:
- paths:
- - "api/**"
-
-defaults:
- run:
- working-directory: api
-
-env:
- DATABASE_URL: "postgresql://nextnet:nextnet@localhost:5432/nextnet?schema=nextnet"
-
-jobs:
- test:
- runs-on: ubuntu-latest
-
- services:
- postgres:
- image: postgres
- env:
- POSTGRES_PASSWORD: nextnet
- POSTGRES_USER: nextnet
- POSTGRES_DB: nextnet
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
- ports:
- - 5432:5432
-
- steps:
- - name: Checkout code using Git
- uses: actions/checkout@main
-
- - name: Install Node
- uses: actions/setup-node@v4
- with:
- node-version: 20.x
-
- - name: Install dependencies
- run: npm install --save-dev
-
- - name: Install prisma
- run: npx prisma migrate dev
-
- - name: Build source
- run: npm run build
-
- - name: Run eslint
- run: npx eslint src
-
- - name: Run prettier to verify if we're formatted or not
- uses: creyD/prettier_action@v4.3
- with:
- dry: true
\ No newline at end of file
diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml
deleted file mode 100644
index 016d996..0000000
--- a/.github/workflows/label.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-name: Label Issues / Pull Requests
-on: [pull_request_target]
-
-jobs:
- label:
- runs-on: ubuntu-latest
- permissions:
- contents: read
- pull-requests: write
- steps:
- - uses: actions/labeler@v4
- with:
- repo-token: "${{ secrets.GITHUB_TOKEN }}"
\ No newline at end of file
diff --git a/.github/workflows/lom-testing.yml b/.github/workflows/lom-testing.yml
deleted file mode 100644
index fbfb6c8..0000000
--- a/.github/workflows/lom-testing.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: CI Testing (LOM)
-
-on:
- pull_request:
- paths:
- - "lom/**"
- push:
- paths:
- - "lom/**"
-
-defaults:
- run:
- working-directory: lom
-
-jobs:
- test:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout code using Git
- uses: actions/checkout@main
-
- - name: Install Node
- uses: actions/setup-node@v4
- with:
- node-version: 20.x
-
- - name: Install dependencies
- run: npm install --save-dev
-
- - name: Build source
- run: npm run build
-
- - name: Run eslint
- run: npx eslint src
-
- - name: Run prettier to verify if we're formatted or not
- uses: creyD/prettier_action@v4.3
- with:
- dry: true
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index dd75183..0000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,110 +0,0 @@
-name: Release code
-
-on:
- pull_request:
- types:
- - closed
- paths:
- - VERSION
- workflow_dispatch: null
- push:
- branches: dev
- paths:
- - VERSION
- tags-ignore:
- - '*'
-
-jobs:
- build:
- if: >-
- github.event.pull_request.merged == true || github.event_name == 'workflow_dispatch' || github.event_name == 'push'
- runs-on: ubuntu-latest
- permissions:
- packages: write
- contents: write
-
- steps:
- - name: Checkout code using Git
- uses: actions/checkout@main
-
- - name: Get version information
- id: get_version
- run: echo "version=v$(cat VERSION)" >> $GITHUB_OUTPUT
-
- - name: Make tag on Git
- uses: mathieudutour/github-tag-action@v6.2
- with:
- github_token: ${{secrets.GITHUB_TOKEN}}
- custom_tag: ${{ steps.get_version.outputs.version }}
- tag_prefix: ''
-
- - name: Get previous Git tag
- id: get_prev_version
- run: echo "version=$(git describe --abbrev=0 --tags "$(git describe --abbrev=0 --tags)~") >> $GITHUB_OUTPUT"
-
- - name: Make sparse changelog (1/2)
- uses: heinrichreimer/github-changelog-generator-action@v2.1.1
- with:
- token: '${{secrets.GITHUB_TOKEN}}'
- issues: true
- issuesWoLabels: true
- pullRequests: true
- prWoLabels: true
- sinceTag: ${{steps.get_prev_version.outputs.version}}
- addSections: >-
- {"documentation":{"prefix":"**Documentation:**","labels":["documentation"]}}
-
- - name: Make sparse changelog (2/2)
- run: |
- mv CHANGELOG.md SPARSE_CHANGELOG.md
-
- - name: Make full changelog
- uses: heinrichreimer/github-changelog-generator-action@v2.1.1
- with:
- token: '${{secrets.GITHUB_TOKEN}}'
- issues: true
- issuesWoLabels: true
- pullRequests: true
- prWoLabels: true
- addSections: >-
- {"documentation":{"prefix":"**Documentation:**","labels":["documentation"]}}
-
- - name: Update changelog
- uses: stefanzweifel/git-auto-commit-action@v4
- with:
- commit_message: >-
- chore: Update changelog for tag ${{steps.get_version.outputs.version}}.
- file_pattern: CHANGELOG.md
-
- - name: Release on GitHub
- uses: softprops/action-gh-release@v2
- with:
- body_path: SPARSE_CHANGELOG.md
- files: |
- LICENSE
- docker-compose.yml
- repository: imterah/nextnet
- token: ${{ secrets.GITHUB_TOKEN }}
- tag_name: ${{ steps.get_version.outputs.version }}
-
- - name: Log in to GitHub container registry
- uses: docker/login-action@v3
- with:
- registry: ghcr.io
- username: ${{github.actor}}
- password: ${{secrets.GITHUB_TOKEN}}
-
- - name: Build all docker images
- run: |
- docker build ./api --tag ghcr.io/imterah/nextnet:$(cat VERSION)
- docker build ./lom --tag ghcr.io/imterah/nextnet-lom:$(cat VERSION)
-
- - name: Publish all docker images
- run: |
- docker tag ghcr.io/imterah/nextnet:$(cat VERSION) ghcr.io/imterah/nextnet:latest
- docker push ghcr.io/imterah/nextnet:$(cat VERSION)
- docker push ghcr.io/imterah/nextnet:latest
-
- docker tag ghcr.io/imterah/nextnet-lom:$(cat VERSION) ghcr.io/imterah/nextnet-lom:latest
- docker push ghcr.io/imterah/nextnet-lom:$(cat VERSION)
- docker push ghcr.io/imterah/nextnet-lom:latest
diff --git a/.gitignore b/.gitignore
index 9c54740..d5920a3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,14 @@
-# LOM
-lom/keys
+# Go artifacts
+backend/api/api
+backend/sshbackend/sshbackend
+backend/dummybackend/dummybackend
+backend/sshappbackend/local-code/remote-bin
+backend/sshappbackend/local-code/sshappbackend
+backend/externalbackendlauncher/externalbackendlauncher
+frontend/frontend
+
+# Backup artifacts
+*.json.gz
# Output
out
@@ -135,4 +144,4 @@ dist
.yarn/install-state.gz
.pnp.*
-.tmp
\ No newline at end of file
+.tmp
diff --git a/.prettierrc b/.prettierrc
deleted file mode 100644
index 57562cf..0000000
--- a/.prettierrc
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "arrowParens": "avoid",
- "bracketSpacing": true,
- "htmlWhitespaceSensitivity": "css",
- "insertPragma": false,
- "jsxSingleQuote": false,
- "printWidth": 80,
- "proseWrap": "always",
- "quoteProps": "as-needed",
- "requirePragma": false,
- "semi": true,
- "singleQuote": false,
- "tabWidth": 2,
- "trailingComma": "all",
- "useTabs": false
-}
\ No newline at end of file
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index 8010c71..23e7cba 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -1,10 +1,3 @@
{
- "recommendations": [
- "bbenoist.Nix",
- "Prisma.prisma",
-
- "rust-lang.rust-analyzer",
- "tamasfe.even-better-toml",
- "dustypomerleau.rust-syntax",
- ]
-}
\ No newline at end of file
+ "recommendations": ["bbenoist.Nix", "Prisma.prisma", "golang.go"]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 2ef2b25..1ee37f0 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -11,5 +11,8 @@
"editor.tabSize": 2
},
- "rust-analyzer.linkedProjects": ["./gui/Cargo.toml"]
-}
\ No newline at end of file
+ "[go]": {
+ "editor.insertSpaces": false,
+ "editor.tabSize": 4
+ }
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index c1fa49f..0000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# Changelog
-
-## [v1.1.2](https://github.com/imterah/nextnet/tree/v1.1.2) (2024-09-29)
-
-## [v1.1.1](https://github.com/imterah/nextnet/tree/v1.1.1) (2024-09-29)
-
-## [v1.1.0](https://github.com/imterah/nextnet/tree/v1.1.0) (2024-09-22)
-
-**Fixed bugs:**
-
-- Desktop app fails to build on macOS w/ `nix-shell` [\#1](https://github.com/imterah/nextnet/issues/1)
-
-**Merged pull requests:**
-
-- chore\(deps\): bump find-my-way from 8.1.0 to 8.2.2 in /api [\#17](https://github.com/imterah/nextnet/pull/17)
-- chore\(deps\): bump axios from 1.6.8 to 1.7.4 in /lom [\#16](https://github.com/imterah/nextnet/pull/16)
-- chore\(deps\): bump micromatch from 4.0.5 to 4.0.8 in /lom [\#15](https://github.com/imterah/nextnet/pull/15)
-- chore\(deps\): bump braces from 3.0.2 to 3.0.3 in /lom [\#13](https://github.com/imterah/nextnet/pull/13)
-- chore\(deps-dev\): bump braces from 3.0.2 to 3.0.3 in /api [\#11](https://github.com/imterah/nextnet/pull/11)
-- chore\(deps\): bump ws from 8.17.0 to 8.17.1 in /api [\#10](https://github.com/imterah/nextnet/pull/10)
-
-## [v1.0.1](https://github.com/imterah/nextnet/tree/v1.0.1) (2024-05-18)
-
-**Merged pull requests:**
-
-- Adds public key authentication [\#6](https://github.com/imterah/nextnet/pull/6)
-- Add support for eslint [\#5](https://github.com/imterah/nextnet/pull/5)
-
-## [v1.0.0](https://github.com/imterah/nextnet/tree/v1.0.0) (2024-05-10)
-
-## [v0.1.1](https://github.com/imterah/nextnet/tree/v0.1.1) (2024-05-05)
-
-## [v0.1.0](https://github.com/imterah/nextnet/tree/v0.1.0) (2024-05-05)
-
-**Implemented enhancements:**
-
-- \(potentially\) Migrate nix shell to nix flake [\#2](https://github.com/imterah/nextnet/issues/2)
-
-**Closed issues:**
-
-- add precommit hooks [\#3](https://github.com/imterah/nextnet/issues/3)
-
-**Merged pull requests:**
-
-- Reimplements PassyFire as a possible backend [\#4](https://github.com/imterah/nextnet/pull/4)
-
-
-
-\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)*
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..ae9c525
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,11 @@
+FROM golang:latest AS build
+WORKDIR /build
+COPY . /build
+RUN cd backend; bash build.sh
+FROM busybox:stable-glibc AS run
+WORKDIR /app
+COPY --from=build /build/backend/backends.prod.json /app/backends.json
+COPY --from=build /build/backend/api/api /app/hermes
+COPY --from=build /build/backend/sshbackend/sshbackend /app/sshbackend
+COPY --from=build /build/backend/sshappbackend/local-code/sshappbackend /app/sshappbackend
+ENTRYPOINT ["/app/hermes", "--backends-path", "/app/backends.json"]
diff --git a/LICENSE b/LICENSE
index 8914588..a085e23 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
BSD 3-Clause License
-Copyright (c) 2024, Greyson
+Copyright (c) 2024, Tera
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
diff --git a/README.md b/README.md
index ded45f1..323855d 100644
--- a/README.md
+++ b/README.md
@@ -1,43 +1,46 @@
-
NextNet
+Hermes
-
-
+
+
+
-
-
-**NextNet is a dashboard to manage portforwarding technologies.**
+
+ Port forwarding across boundaries.
+
Local Development
> [!NOTE]
-> Using [nix](https://builtwithnix.org) is recommended. If you're not using Nix, install PostgreSQL, Node.JS, and `lsof`.
+> Using [Nix](https://builtwithnix.org) is recommended for the development environment. If you're not using it, install Go. For legacy maintence tasks, install NodeJS.
-1. First, check if you have a working Nix environment if you're using Nix.
+1. Firstly, check if you have a working Nix environment if you're using Nix.
-2. Run `nix-shell`, or alternatively `source init.sh` if you're not using Nix.
+2. Secondly, Run `nix-shell`, or alternatively `source init.sh` if you're not using Nix.
API Development
-1. After that, run the project in development mode: `npm run dev`.
+1. After that, run the backend build script: `./build.sh`.
-2. If you want to explore your database, run `npx prisma studio` to open the database editor.
+2. Then, go into the `api/` directory, and then start it up: `go run . -b ../backends.dev.json`
Production Deployment
-> [!WARNING]
-> Deploying using docker compose is the only officially supported deployment method. Here be dragons!
+> [!WARNING]
+> Deploying using [Docker Compose](https://docs.docker.com/compose/) is the only officially supported deployment method.
1. Copy and change the default password (or username & db name too) from the template file `prod-docker.env`:
- ```bash
- sed "s/POSTGRES_PASSWORD=nextnet/POSTGRES_PASSWORD=$(head -c 500 /dev/random | sha512sum | cut -d " " -f 1)/g" prod-docker.env > .env
- ```
-
+ ```bash
+ sed -e "s/POSTGRES_PASSWORD=hermes/POSTGRES_PASSWORD=$(head -c 500 /dev/random | sha512sum | cut -d " " -f 1)/g" -e "s/JWT_SECRET=hermes/JWT_SECRET=$(head -c 500 /dev/random | sha512sum | cut -d " " -f 1)/g" prod-docker.env > .env
+ ```
+
2. Build the docker stack: `docker compose --env-file .env up -d`
Troubleshooting
-* I'm using the SSH tunneling, and I can't reach any of the tunnels publicly.
+This has been moved [here.](docs/troubleshooting.md)
- - Be sure to enable GatewayPorts in your sshd config (in `/etc/ssh/sshd_config` on most systems). Also, be sure to check your firewall rules on your system and your network.
\ No newline at end of file
+Documentation
+
+Go to the `docs/` folder.
diff --git a/VERSION b/VERSION
deleted file mode 100644
index 45a1b3f..0000000
--- a/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-1.1.2
diff --git a/api/Dockerfile b/api/Dockerfile
deleted file mode 100644
index 9fe5b69..0000000
--- a/api/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-FROM node:20.11.1-bookworm
-LABEL org.opencontainers.image.source="https://github.com/greysoh/nextnet"
-WORKDIR /app/
-COPY src /app/src
-COPY prisma /app/prisma
-COPY docker-entrypoint.sh /app/
-COPY tsconfig.json /app/
-COPY package.json /app/
-COPY package-lock.json /app/
-COPY srcpatch.sh /app/
-RUN sh srcpatch.sh
-RUN npm install --save-dev
-RUN npm run build
-RUN rm srcpatch.sh out/**/*.ts out/**/*.map
-RUN rm -rf src
-RUN npm prune --production
-ENTRYPOINT sh docker-entrypoint.sh
\ No newline at end of file
diff --git a/api/dev.env b/api/dev.env
deleted file mode 100644
index 750a474..0000000
--- a/api/dev.env
+++ /dev/null
@@ -1,7 +0,0 @@
-# Environment variables declared in this file are automatically made available to Prisma.
-# See the documentation for more detail: https://pris.ly/d/prisma-schema#accessing-environment-variables-from-the-schema
-
-# Prisma supports the native connection string format for PostgreSQL, MySQL, SQLite, SQL Server, MongoDB and CockroachDB.
-# See the documentation for all the connection string options: https://pris.ly/d/connection-strings
-
-DATABASE_URL="postgresql://nextnet:nextnet@localhost:5432/nextnet?schema=nextnet"
\ No newline at end of file
diff --git a/api/docker-entrypoint.sh b/api/docker-entrypoint.sh
deleted file mode 100644
index b76e0e1..0000000
--- a/api/docker-entrypoint.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-export NODE_ENV="production"
-
-if [[ "$DATABASE_URL" == "" ]]; then
- export DATABASE_URL="postgresql://$POSTGRES_USERNAME:$POSTGRES_PASSWORD@nextnet-postgres:5432/$POSTGRES_DB?schema=nextnet"
-fi
-
-echo "Welcome to NextNet."
-echo "Running database migrations..."
-npx prisma migrate deploy
-echo "Starting application..."
-npm start
diff --git a/api/eslint.config.js b/api/eslint.config.js
deleted file mode 100644
index 8cd9d68..0000000
--- a/api/eslint.config.js
+++ /dev/null
@@ -1,19 +0,0 @@
-import globals from "globals";
-import pluginJs from "@eslint/js";
-import tseslint from "typescript-eslint";
-
-export default [
- pluginJs.configs.recommended,
- ...tseslint.configs.recommended,
-
- {
- languageOptions: {
- globals: globals.node,
- },
-
- rules: {
- "no-unused-vars": "off",
- "@typescript-eslint/no-unused-vars": "off",
- },
- },
-];
diff --git a/api/init.sh b/api/init.sh
deleted file mode 100755
index ac23849..0000000
--- a/api/init.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-if [ ! -d ".tmp" ]; then
- echo "Please wait while I initialize the backend source for you..."
- cp dev.env .env
- mkdir .tmp
-fi
-
-lsof -i:5432 | grep postgres 2> /dev/null > /dev/null
-IS_PG_RUNNING=$?
-
-if [ ! -f ".tmp/ispginit" ]; then
- if [[ "$IS_PG_RUNNING" == 0 ]]; then
- kill -9 $(lsof -t -i:5432) > /dev/null 2> /dev/null
- fi
-
- echo " - Database not initialized! Initializing database..."
- mkdir .tmp/pglock
-
- initdb -D .tmp/db
- pg_ctl -D .tmp/db -l .tmp/logfile -o "--unix_socket_directories='$PWD/.tmp/pglock/'" start
- createdb -h localhost -p 5432 nextnet
-
- psql -h localhost -p 5432 nextnet -c "CREATE ROLE nextnet WITH LOGIN SUPERUSER PASSWORD 'nextnet';"
-
- npm install --save-dev
- npx prisma migrate dev
-
- touch .tmp/ispginit
-elif [[ "$IS_PG_RUNNING" == 1 ]]; then
- pg_ctl -D .tmp/db -l .tmp/logfile -o "--unix_socket_directories='$PWD/.tmp/pglock/'" start
-fi
-
-source .env # Make sure we actually load correctly
\ No newline at end of file
diff --git a/api/package-lock.json b/api/package-lock.json
deleted file mode 100644
index e2656bd..0000000
--- a/api/package-lock.json
+++ /dev/null
@@ -1,3187 +0,0 @@
-{
- "name": "nextnet",
- "version": "1.1.2",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "nextnet",
- "version": "1.1.2",
- "license": "BSD-3-Clause",
- "dependencies": {
- "@fastify/websocket": "^10.0.1",
- "@prisma/client": "^5.13.0",
- "bcrypt": "^5.1.1",
- "fastify": "^4.26.2",
- "node-ssh": "^13.2.0"
- },
- "devDependencies": {
- "@eslint/js": "^9.2.0",
- "@types/bcrypt": "^5.0.2",
- "@types/node": "^20.12.7",
- "@types/ssh2": "^1.15.0",
- "@types/ws": "^8.5.10",
- "eslint": "^8.57.0",
- "globals": "^15.2.0",
- "nodemon": "^3.0.3",
- "pino-pretty": "^11.0.0",
- "prettier": "^3.2.5",
- "prisma": "^5.13.0",
- "typescript": "^5.3.3",
- "typescript-eslint": "^7.8.0"
- }
- },
- "node_modules/@eslint-community/eslint-utils": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
- "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==",
- "dev": true,
- "dependencies": {
- "eslint-visitor-keys": "^3.3.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "peerDependencies": {
- "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
- }
- },
- "node_modules/@eslint-community/regexpp": {
- "version": "4.10.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz",
- "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==",
- "dev": true,
- "engines": {
- "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
- }
- },
- "node_modules/@eslint/eslintrc": {
- "version": "2.1.4",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
- "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
- "dev": true,
- "dependencies": {
- "ajv": "^6.12.4",
- "debug": "^4.3.2",
- "espree": "^9.6.0",
- "globals": "^13.19.0",
- "ignore": "^5.2.0",
- "import-fresh": "^3.2.1",
- "js-yaml": "^4.1.0",
- "minimatch": "^3.1.2",
- "strip-json-comments": "^3.1.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/globals": {
- "version": "13.24.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
- "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
- "dev": true,
- "dependencies": {
- "type-fest": "^0.20.2"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true
- },
- "node_modules/@eslint/js": {
- "version": "9.2.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.2.0.tgz",
- "integrity": "sha512-ESiIudvhoYni+MdsI8oD7skpprZ89qKocwRM2KEvhhBJ9nl5MRh7BXU5GTod7Mdygq+AUl+QzId6iWJKR/wABA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- }
- },
- "node_modules/@fastify/ajv-compiler": {
- "version": "3.5.0",
- "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-3.5.0.tgz",
- "integrity": "sha512-ebbEtlI7dxXF5ziNdr05mOY8NnDiPB1XvAlLHctRt/Rc+C3LCOVW5imUVX+mhvUhnNzmPBHewUkOFgGlCxgdAA==",
- "dependencies": {
- "ajv": "^8.11.0",
- "ajv-formats": "^2.1.1",
- "fast-uri": "^2.0.0"
- }
- },
- "node_modules/@fastify/error": {
- "version": "3.4.1",
- "resolved": "https://registry.npmjs.org/@fastify/error/-/error-3.4.1.tgz",
- "integrity": "sha512-wWSvph+29GR783IhmvdwWnN4bUxTD01Vm5Xad4i7i1VuAOItLvbPAb69sb0IQ2N57yprvhNIwAP5B6xfKTmjmQ=="
- },
- "node_modules/@fastify/fast-json-stringify-compiler": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-4.3.0.tgz",
- "integrity": "sha512-aZAXGYo6m22Fk1zZzEUKBvut/CIIQe/BapEORnxiD5Qr0kPHqqI69NtEMCme74h+at72sPhbkb4ZrLd1W3KRLA==",
- "dependencies": {
- "fast-json-stringify": "^5.7.0"
- }
- },
- "node_modules/@fastify/merge-json-schemas": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/@fastify/merge-json-schemas/-/merge-json-schemas-0.1.1.tgz",
- "integrity": "sha512-fERDVz7topgNjtXsJTTW1JKLy0rhuLRcquYqNR9rF7OcVpCa2OVW49ZPDIhaRRCaUuvVxI+N416xUoF76HNSXA==",
- "dependencies": {
- "fast-deep-equal": "^3.1.3"
- }
- },
- "node_modules/@fastify/websocket": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/@fastify/websocket/-/websocket-10.0.1.tgz",
- "integrity": "sha512-8/pQIxTPRD8U94aILTeJ+2O3el/r19+Ej5z1O1mXlqplsUH7KzCjAI0sgd5DM/NoPjAi5qLFNIjgM5+9/rGSNw==",
- "dependencies": {
- "duplexify": "^4.1.2",
- "fastify-plugin": "^4.0.0",
- "ws": "^8.0.0"
- }
- },
- "node_modules/@humanwhocodes/config-array": {
- "version": "0.11.14",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz",
- "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==",
- "dev": true,
- "dependencies": {
- "@humanwhocodes/object-schema": "^2.0.2",
- "debug": "^4.3.1",
- "minimatch": "^3.0.5"
- },
- "engines": {
- "node": ">=10.10.0"
- }
- },
- "node_modules/@humanwhocodes/module-importer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
- "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
- "dev": true,
- "engines": {
- "node": ">=12.22"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/nzakas"
- }
- },
- "node_modules/@humanwhocodes/object-schema": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
- "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
- "dev": true
- },
- "node_modules/@mapbox/node-pre-gyp": {
- "version": "1.0.11",
- "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",
- "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==",
- "dependencies": {
- "detect-libc": "^2.0.0",
- "https-proxy-agent": "^5.0.0",
- "make-dir": "^3.1.0",
- "node-fetch": "^2.6.7",
- "nopt": "^5.0.0",
- "npmlog": "^5.0.1",
- "rimraf": "^3.0.2",
- "semver": "^7.3.5",
- "tar": "^6.1.11"
- },
- "bin": {
- "node-pre-gyp": "bin/node-pre-gyp"
- }
- },
- "node_modules/@mapbox/node-pre-gyp/node_modules/nopt": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
- "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==",
- "dependencies": {
- "abbrev": "1"
- },
- "bin": {
- "nopt": "bin/nopt.js"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
- "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
- "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
- "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@prisma/client": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/@prisma/client/-/client-5.13.0.tgz",
- "integrity": "sha512-uYdfpPncbZ/syJyiYBwGZS8Gt1PTNoErNYMuqHDa2r30rNSFtgTA/LXsSk55R7pdRTMi5pHkeP9B14K6nHmwkg==",
- "hasInstallScript": true,
- "engines": {
- "node": ">=16.13"
- },
- "peerDependencies": {
- "prisma": "*"
- },
- "peerDependenciesMeta": {
- "prisma": {
- "optional": true
- }
- }
- },
- "node_modules/@prisma/debug": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-5.13.0.tgz",
- "integrity": "sha512-699iqlEvzyCj9ETrXhs8o8wQc/eVW+FigSsHpiskSFydhjVuwTJEfj/nIYqTaWFYuxiWQRfm3r01meuW97SZaQ==",
- "devOptional": true
- },
- "node_modules/@prisma/engines": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-5.13.0.tgz",
- "integrity": "sha512-hIFLm4H1boj6CBZx55P4xKby9jgDTeDG0Jj3iXtwaaHmlD5JmiDkZhh8+DYWkTGchu+rRF36AVROLnk0oaqhHw==",
- "devOptional": true,
- "hasInstallScript": true,
- "dependencies": {
- "@prisma/debug": "5.13.0",
- "@prisma/engines-version": "5.13.0-23.b9a39a7ee606c28e3455d0fd60e78c3ba82b1a2b",
- "@prisma/fetch-engine": "5.13.0",
- "@prisma/get-platform": "5.13.0"
- }
- },
- "node_modules/@prisma/engines-version": {
- "version": "5.13.0-23.b9a39a7ee606c28e3455d0fd60e78c3ba82b1a2b",
- "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-5.13.0-23.b9a39a7ee606c28e3455d0fd60e78c3ba82b1a2b.tgz",
- "integrity": "sha512-AyUuhahTINGn8auyqYdmxsN+qn0mw3eg+uhkp8zwknXYIqoT3bChG4RqNY/nfDkPvzWAPBa9mrDyBeOnWSgO6A==",
- "devOptional": true
- },
- "node_modules/@prisma/fetch-engine": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-5.13.0.tgz",
- "integrity": "sha512-Yh4W+t6YKyqgcSEB3odBXt7QyVSm0OQlBSldQF2SNXtmOgMX8D7PF/fvH6E6qBCpjB/yeJLy/FfwfFijoHI6sA==",
- "devOptional": true,
- "dependencies": {
- "@prisma/debug": "5.13.0",
- "@prisma/engines-version": "5.13.0-23.b9a39a7ee606c28e3455d0fd60e78c3ba82b1a2b",
- "@prisma/get-platform": "5.13.0"
- }
- },
- "node_modules/@prisma/get-platform": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-5.13.0.tgz",
- "integrity": "sha512-B/WrQwYTzwr7qCLifQzYOmQhZcFmIFhR81xC45gweInSUn2hTEbfKUPd2keAog+y5WI5xLAFNJ3wkXplvSVkSw==",
- "devOptional": true,
- "dependencies": {
- "@prisma/debug": "5.13.0"
- }
- },
- "node_modules/@types/bcrypt": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-5.0.2.tgz",
- "integrity": "sha512-6atioO8Y75fNcbmj0G7UjI9lXN2pQ/IGJ2FWT4a/btd0Lk9lQalHLKhkgKVZ3r+spnmWUKfbMi1GEe9wyHQfNQ==",
- "dev": true,
- "dependencies": {
- "@types/node": "*"
- }
- },
- "node_modules/@types/json-schema": {
- "version": "7.0.15",
- "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
- "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
- "dev": true
- },
- "node_modules/@types/node": {
- "version": "20.12.7",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
- "integrity": "sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==",
- "dev": true,
- "dependencies": {
- "undici-types": "~5.26.4"
- }
- },
- "node_modules/@types/semver": {
- "version": "7.5.8",
- "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
- "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==",
- "dev": true
- },
- "node_modules/@types/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-YcT8jP5F8NzWeevWvcyrrLB3zcneVjzYY9ZDSMAMboI+2zR1qYWFhwsyOFVzT7Jorn67vqxC0FRiw8YyG9P1ww==",
- "dev": true,
- "dependencies": {
- "@types/node": "^18.11.18"
- }
- },
- "node_modules/@types/ssh2/node_modules/@types/node": {
- "version": "18.19.31",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz",
- "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==",
- "dev": true,
- "dependencies": {
- "undici-types": "~5.26.4"
- }
- },
- "node_modules/@types/ws": {
- "version": "8.5.10",
- "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz",
- "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==",
- "dev": true,
- "dependencies": {
- "@types/node": "*"
- }
- },
- "node_modules/@typescript-eslint/eslint-plugin": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz",
- "integrity": "sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg==",
- "dev": true,
- "dependencies": {
- "@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/type-utils": "7.8.0",
- "@typescript-eslint/utils": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4",
- "graphemer": "^1.4.0",
- "ignore": "^5.3.1",
- "natural-compare": "^1.4.0",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "@typescript-eslint/parser": "^7.0.0",
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.8.0.tgz",
- "integrity": "sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/typescript-estree": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/scope-manager": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz",
- "integrity": "sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz",
- "integrity": "sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/typescript-estree": "7.8.0",
- "@typescript-eslint/utils": "7.8.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/types": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.8.0.tgz",
- "integrity": "sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/typescript-estree": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz",
- "integrity": "sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4",
- "globby": "^11.1.0",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
- "dev": true,
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
- "version": "9.0.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz",
- "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==",
- "dev": true,
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/@typescript-eslint/utils": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.8.0.tgz",
- "integrity": "sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ==",
- "dev": true,
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.4.0",
- "@types/json-schema": "^7.0.15",
- "@types/semver": "^7.5.8",
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/typescript-estree": "7.8.0",
- "semver": "^7.6.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- }
- },
- "node_modules/@typescript-eslint/visitor-keys": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz",
- "integrity": "sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@ungap/structured-clone": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
- "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
- "dev": true
- },
- "node_modules/abbrev": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
- "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
- },
- "node_modules/abort-controller": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
- "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
- "dependencies": {
- "event-target-shim": "^5.0.0"
- },
- "engines": {
- "node": ">=6.5"
- }
- },
- "node_modules/abstract-logging": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz",
- "integrity": "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="
- },
- "node_modules/acorn": {
- "version": "8.11.3",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
- "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
- "dev": true,
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/acorn-jsx": {
- "version": "5.3.2",
- "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
- "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
- "dev": true,
- "peerDependencies": {
- "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
- }
- },
- "node_modules/agent-base": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
- "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
- "dependencies": {
- "debug": "4"
- },
- "engines": {
- "node": ">= 6.0.0"
- }
- },
- "node_modules/ajv": {
- "version": "8.12.0",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz",
- "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==",
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "json-schema-traverse": "^1.0.0",
- "require-from-string": "^2.0.2",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/ajv-formats": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz",
- "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==",
- "dependencies": {
- "ajv": "^8.0.0"
- },
- "peerDependencies": {
- "ajv": "^8.0.0"
- },
- "peerDependenciesMeta": {
- "ajv": {
- "optional": true
- }
- }
- },
- "node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/ansi-styles": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
- "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
- "dev": true,
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/anymatch": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
- "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
- "dev": true,
- "dependencies": {
- "normalize-path": "^3.0.0",
- "picomatch": "^2.0.4"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/aproba": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz",
- "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ=="
- },
- "node_modules/archy": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz",
- "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw=="
- },
- "node_modules/are-we-there-yet": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz",
- "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==",
- "dependencies": {
- "delegates": "^1.0.0",
- "readable-stream": "^3.6.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/are-we-there-yet/node_modules/readable-stream": {
- "version": "3.6.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
- "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/argparse": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
- "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
- "dev": true
- },
- "node_modules/array-union": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
- "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/asn1": {
- "version": "0.2.6",
- "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
- "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
- "dependencies": {
- "safer-buffer": "~2.1.0"
- }
- },
- "node_modules/atomic-sleep": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz",
- "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==",
- "engines": {
- "node": ">=8.0.0"
- }
- },
- "node_modules/avvio": {
- "version": "8.3.0",
- "resolved": "https://registry.npmjs.org/avvio/-/avvio-8.3.0.tgz",
- "integrity": "sha512-VBVH0jubFr9LdFASy/vNtm5giTrnbVquWBhT0fyizuNK2rQ7e7ONU2plZQWUNqtE1EmxFEb+kbSkFRkstiaS9Q==",
- "dependencies": {
- "@fastify/error": "^3.3.0",
- "archy": "^1.0.0",
- "debug": "^4.0.0",
- "fastq": "^1.17.1"
- }
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
- },
- "node_modules/base64-js": {
- "version": "1.5.1",
- "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
- "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/bcrypt": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz",
- "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==",
- "hasInstallScript": true,
- "dependencies": {
- "@mapbox/node-pre-gyp": "^1.0.11",
- "node-addon-api": "^5.0.0"
- },
- "engines": {
- "node": ">= 10.0.0"
- }
- },
- "node_modules/bcrypt-pbkdf": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
- "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
- "dependencies": {
- "tweetnacl": "^0.14.3"
- }
- },
- "node_modules/binary-extensions": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
- "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/braces": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
- "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
- "dev": true,
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/buffer": {
- "version": "6.0.3",
- "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
- "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "dependencies": {
- "base64-js": "^1.3.1",
- "ieee754": "^1.2.1"
- }
- },
- "node_modules/buildcheck": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
- "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
- "optional": true,
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/callsites": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
- "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/chalk": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
- "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
- "dev": true,
- "dependencies": {
- "ansi-styles": "^4.1.0",
- "supports-color": "^7.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/chalk?sponsor=1"
- }
- },
- "node_modules/chalk/node_modules/has-flag": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
- "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chalk/node_modules/supports-color": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
- "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
- "dev": true,
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chokidar": {
- "version": "3.5.3",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
- "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
- "dev": true,
- "funding": [
- {
- "type": "individual",
- "url": "https://paulmillr.com/funding/"
- }
- ],
- "dependencies": {
- "anymatch": "~3.1.2",
- "braces": "~3.0.2",
- "glob-parent": "~5.1.2",
- "is-binary-path": "~2.1.0",
- "is-glob": "~4.0.1",
- "normalize-path": "~3.0.0",
- "readdirp": "~3.6.0"
- },
- "engines": {
- "node": ">= 8.10.0"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/chownr": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
- "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
- "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
- "dev": true,
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
- "dev": true
- },
- "node_modules/color-support": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
- "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
- "bin": {
- "color-support": "bin.js"
- }
- },
- "node_modules/colorette": {
- "version": "2.0.20",
- "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
- "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
- "dev": true
- },
- "node_modules/concat-map": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
- },
- "node_modules/console-control-strings": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
- "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ=="
- },
- "node_modules/cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/cpu-features": {
- "version": "0.0.9",
- "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz",
- "integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==",
- "hasInstallScript": true,
- "optional": true,
- "dependencies": {
- "buildcheck": "~0.0.6",
- "nan": "^2.17.0"
- },
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
- "dev": true,
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/dateformat": {
- "version": "4.6.3",
- "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz",
- "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==",
- "dev": true,
- "engines": {
- "node": "*"
- }
- },
- "node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
- "dependencies": {
- "ms": "2.1.2"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/deep-is": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
- "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
- "dev": true
- },
- "node_modules/delegates": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
- "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ=="
- },
- "node_modules/detect-libc": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
- "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/doctrine": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
- "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
- "dev": true,
- "dependencies": {
- "esutils": "^2.0.2"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/duplexify": {
- "version": "4.1.3",
- "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz",
- "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==",
- "dependencies": {
- "end-of-stream": "^1.4.1",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1",
- "stream-shift": "^1.0.2"
- }
- },
- "node_modules/duplexify/node_modules/readable-stream": {
- "version": "3.6.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
- "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
- },
- "node_modules/end-of-stream": {
- "version": "1.4.4",
- "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
- "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
- "dependencies": {
- "once": "^1.4.0"
- }
- },
- "node_modules/escape-string-regexp": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
- "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/eslint": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
- "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
- "dev": true,
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.6.1",
- "@eslint/eslintrc": "^2.1.4",
- "@eslint/js": "8.57.0",
- "@humanwhocodes/config-array": "^0.11.14",
- "@humanwhocodes/module-importer": "^1.0.1",
- "@nodelib/fs.walk": "^1.2.8",
- "@ungap/structured-clone": "^1.2.0",
- "ajv": "^6.12.4",
- "chalk": "^4.0.0",
- "cross-spawn": "^7.0.2",
- "debug": "^4.3.2",
- "doctrine": "^3.0.0",
- "escape-string-regexp": "^4.0.0",
- "eslint-scope": "^7.2.2",
- "eslint-visitor-keys": "^3.4.3",
- "espree": "^9.6.1",
- "esquery": "^1.4.2",
- "esutils": "^2.0.2",
- "fast-deep-equal": "^3.1.3",
- "file-entry-cache": "^6.0.1",
- "find-up": "^5.0.0",
- "glob-parent": "^6.0.2",
- "globals": "^13.19.0",
- "graphemer": "^1.4.0",
- "ignore": "^5.2.0",
- "imurmurhash": "^0.1.4",
- "is-glob": "^4.0.0",
- "is-path-inside": "^3.0.3",
- "js-yaml": "^4.1.0",
- "json-stable-stringify-without-jsonify": "^1.0.1",
- "levn": "^0.4.1",
- "lodash.merge": "^4.6.2",
- "minimatch": "^3.1.2",
- "natural-compare": "^1.4.0",
- "optionator": "^0.9.3",
- "strip-ansi": "^6.0.1",
- "text-table": "^0.2.0"
- },
- "bin": {
- "eslint": "bin/eslint.js"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-scope": {
- "version": "7.2.2",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
- "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
- "dev": true,
- "dependencies": {
- "esrecurse": "^4.3.0",
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-visitor-keys": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
- "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
- "dev": true,
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint/node_modules/@eslint/js": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
- "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
- "dev": true,
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- }
- },
- "node_modules/eslint/node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/eslint/node_modules/glob-parent": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
- "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
- "dev": true,
- "dependencies": {
- "is-glob": "^4.0.3"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/eslint/node_modules/globals": {
- "version": "13.24.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
- "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
- "dev": true,
- "dependencies": {
- "type-fest": "^0.20.2"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/eslint/node_modules/json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true
- },
- "node_modules/espree": {
- "version": "9.6.1",
- "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
- "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
- "dev": true,
- "dependencies": {
- "acorn": "^8.9.0",
- "acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^3.4.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/esquery": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz",
- "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==",
- "dev": true,
- "dependencies": {
- "estraverse": "^5.1.0"
- },
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/esrecurse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
- "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
- "dev": true,
- "dependencies": {
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estraverse": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
- "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
- "dev": true,
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/esutils": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
- "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/event-target-shim": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
- "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/events": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
- "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
- "engines": {
- "node": ">=0.8.x"
- }
- },
- "node_modules/fast-content-type-parse": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz",
- "integrity": "sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ=="
- },
- "node_modules/fast-copy": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.2.tgz",
- "integrity": "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==",
- "dev": true
- },
- "node_modules/fast-decode-uri-component": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz",
- "integrity": "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="
- },
- "node_modules/fast-deep-equal": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
- },
- "node_modules/fast-glob": {
- "version": "3.3.2",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
- "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.4"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
- "node_modules/fast-json-stable-stringify": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "dev": true
- },
- "node_modules/fast-json-stringify": {
- "version": "5.14.1",
- "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-5.14.1.tgz",
- "integrity": "sha512-J1Grbf0oSXV3lKsBf3itz1AvRk43qVrx3Ac10sNvi3LZaz1by4oDdYKFrJycPhS8+Gb7y8rgV/Jqw1UZVjyNvw==",
- "dependencies": {
- "@fastify/merge-json-schemas": "^0.1.0",
- "ajv": "^8.10.0",
- "ajv-formats": "^3.0.1",
- "fast-deep-equal": "^3.1.3",
- "fast-uri": "^2.1.0",
- "json-schema-ref-resolver": "^1.0.1",
- "rfdc": "^1.2.0"
- }
- },
- "node_modules/fast-json-stringify/node_modules/ajv-formats": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
- "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
- "dependencies": {
- "ajv": "^8.0.0"
- },
- "peerDependencies": {
- "ajv": "^8.0.0"
- },
- "peerDependenciesMeta": {
- "ajv": {
- "optional": true
- }
- }
- },
- "node_modules/fast-levenshtein": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
- "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
- "dev": true
- },
- "node_modules/fast-querystring": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz",
- "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==",
- "dependencies": {
- "fast-decode-uri-component": "^1.0.1"
- }
- },
- "node_modules/fast-redact": {
- "version": "3.5.0",
- "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.5.0.tgz",
- "integrity": "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/fast-safe-stringify": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz",
- "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==",
- "dev": true
- },
- "node_modules/fast-uri": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-2.3.0.tgz",
- "integrity": "sha512-eel5UKGn369gGEWOqBShmFJWfq/xSJvsgDzgLYC845GneayWvXBf0lJCBn5qTABfewy1ZDPoaR5OZCP+kssfuw=="
- },
- "node_modules/fastify": {
- "version": "4.26.2",
- "resolved": "https://registry.npmjs.org/fastify/-/fastify-4.26.2.tgz",
- "integrity": "sha512-90pjTuPGrfVKtdpLeLzND5nyC4woXZN5VadiNQCicj/iJU4viNHKhsAnb7jmv1vu2IzkLXyBiCzdWuzeXgQ5Ug==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/fastify"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/fastify"
- }
- ],
- "dependencies": {
- "@fastify/ajv-compiler": "^3.5.0",
- "@fastify/error": "^3.4.0",
- "@fastify/fast-json-stringify-compiler": "^4.3.0",
- "abstract-logging": "^2.0.1",
- "avvio": "^8.3.0",
- "fast-content-type-parse": "^1.1.0",
- "fast-json-stringify": "^5.8.0",
- "find-my-way": "^8.0.0",
- "light-my-request": "^5.11.0",
- "pino": "^8.17.0",
- "process-warning": "^3.0.0",
- "proxy-addr": "^2.0.7",
- "rfdc": "^1.3.0",
- "secure-json-parse": "^2.7.0",
- "semver": "^7.5.4",
- "toad-cache": "^3.3.0"
- }
- },
- "node_modules/fastify-plugin": {
- "version": "4.5.1",
- "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-4.5.1.tgz",
- "integrity": "sha512-stRHYGeuqpEZTL1Ef0Ovr2ltazUT9g844X5z/zEBFLG8RYlpDiOCIG+ATvYEp+/zmc7sN29mcIMp8gvYplYPIQ=="
- },
- "node_modules/fastq": {
- "version": "1.17.1",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz",
- "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==",
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
- "node_modules/file-entry-cache": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
- "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
- "dev": true,
- "dependencies": {
- "flat-cache": "^3.0.4"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/fill-range": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
- "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
- "dev": true,
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/find-my-way": {
- "version": "8.2.2",
- "resolved": "https://registry.npmjs.org/find-my-way/-/find-my-way-8.2.2.tgz",
- "integrity": "sha512-Dobi7gcTEq8yszimcfp/R7+owiT4WncAJ7VTTgFH1jYJ5GaG1FbhjwDG820hptN0QDFvzVY3RfCzdInvGPGzjA==",
- "dependencies": {
- "fast-deep-equal": "^3.1.3",
- "fast-querystring": "^1.0.0",
- "safe-regex2": "^3.1.0"
- },
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/find-up": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
- "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
- "dev": true,
- "dependencies": {
- "locate-path": "^6.0.0",
- "path-exists": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/flat-cache": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
- "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
- "dev": true,
- "dependencies": {
- "flatted": "^3.2.9",
- "keyv": "^4.5.3",
- "rimraf": "^3.0.2"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/flatted": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
- "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
- "dev": true
- },
- "node_modules/forwarded": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
- "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/fs-minipass": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
- "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
- "dependencies": {
- "minipass": "^3.0.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/fs-minipass/node_modules/minipass": {
- "version": "3.3.6",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
- "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
- "dependencies": {
- "yallist": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/fs.realpath": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
- },
- "node_modules/fsevents": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
- "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
- "dev": true,
- "hasInstallScript": true,
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/gauge": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz",
- "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==",
- "dependencies": {
- "aproba": "^1.0.3 || ^2.0.0",
- "color-support": "^1.1.2",
- "console-control-strings": "^1.0.0",
- "has-unicode": "^2.0.1",
- "object-assign": "^4.1.1",
- "signal-exit": "^3.0.0",
- "string-width": "^4.2.3",
- "strip-ansi": "^6.0.1",
- "wide-align": "^1.1.2"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/glob": {
- "version": "7.2.3",
- "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
- "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^3.1.1",
- "once": "^1.3.0",
- "path-is-absolute": "^1.0.0"
- },
- "engines": {
- "node": "*"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/globals": {
- "version": "15.2.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-15.2.0.tgz",
- "integrity": "sha512-FQ5YwCHZM3nCmtb5FzEWwdUc9K5d3V/w9mzcz8iGD1gC/aOTHc6PouYu0kkKipNJqHAT7m51sqzQjEjIP+cK0A==",
- "dev": true,
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/globby": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
- "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
- "dev": true,
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/graphemer": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
- "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
- "dev": true
- },
- "node_modules/has-flag": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
- "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/has-unicode": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
- "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ=="
- },
- "node_modules/help-me": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
- "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==",
- "dev": true
- },
- "node_modules/https-proxy-agent": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
- "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
- "dependencies": {
- "agent-base": "6",
- "debug": "4"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/ieee754": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
- "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/ignore": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz",
- "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==",
- "dev": true,
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/ignore-by-default": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
- "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==",
- "dev": true
- },
- "node_modules/import-fresh": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
- "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==",
- "dev": true,
- "dependencies": {
- "parent-module": "^1.0.0",
- "resolve-from": "^4.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/imurmurhash": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
- "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
- "dev": true,
- "engines": {
- "node": ">=0.8.19"
- }
- },
- "node_modules/inflight": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
- "dependencies": {
- "once": "^1.3.0",
- "wrappy": "1"
- }
- },
- "node_modules/inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
- },
- "node_modules/ipaddr.js": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
- "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/is-binary-path": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
- "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
- "dev": true,
- "dependencies": {
- "binary-extensions": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-fullwidth-code-point": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
- "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-glob": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
- "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
- "dev": true,
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
- "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
- "dev": true,
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/is-path-inside": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
- "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-stream": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
- "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true
- },
- "node_modules/joycon": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz",
- "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==",
- "dev": true,
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
- "dev": true,
- "dependencies": {
- "argparse": "^2.0.1"
- },
- "bin": {
- "js-yaml": "bin/js-yaml.js"
- }
- },
- "node_modules/json-buffer": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
- "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
- "dev": true
- },
- "node_modules/json-schema-ref-resolver": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/json-schema-ref-resolver/-/json-schema-ref-resolver-1.0.1.tgz",
- "integrity": "sha512-EJAj1pgHc1hxF6vo2Z3s69fMjO1INq6eGHXZ8Z6wCQeldCuwxGK9Sxf4/cScGn3FZubCVUehfWtcDM/PLteCQw==",
- "dependencies": {
- "fast-deep-equal": "^3.1.3"
- }
- },
- "node_modules/json-schema-traverse": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
- "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="
- },
- "node_modules/json-stable-stringify-without-jsonify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
- "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
- "dev": true
- },
- "node_modules/keyv": {
- "version": "4.5.4",
- "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
- "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
- "dev": true,
- "dependencies": {
- "json-buffer": "3.0.1"
- }
- },
- "node_modules/levn": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
- "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
- "dev": true,
- "dependencies": {
- "prelude-ls": "^1.2.1",
- "type-check": "~0.4.0"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/light-my-request": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-5.13.0.tgz",
- "integrity": "sha512-9IjUN9ZyCS9pTG+KqTDEQo68Sui2lHsYBrfMyVUTTZ3XhH8PMZq7xO94Kr+eP9dhi/kcKsx4N41p2IXEBil1pQ==",
- "dependencies": {
- "cookie": "^0.6.0",
- "process-warning": "^3.0.0",
- "set-cookie-parser": "^2.4.1"
- }
- },
- "node_modules/locate-path": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
- "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
- "dev": true,
- "dependencies": {
- "p-locate": "^5.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lodash.merge": {
- "version": "4.6.2",
- "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
- "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
- "dev": true
- },
- "node_modules/make-dir": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
- "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
- "dependencies": {
- "semver": "^6.0.0"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/make-dir/node_modules/semver": {
- "version": "6.3.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
- "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
- "bin": {
- "semver": "bin/semver.js"
- }
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
- "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
- "dev": true,
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/minimist": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
- "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
- "dev": true,
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/minipass": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
- "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/minizlib": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
- "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
- "dependencies": {
- "minipass": "^3.0.0",
- "yallist": "^4.0.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/minizlib/node_modules/minipass": {
- "version": "3.3.6",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
- "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
- "dependencies": {
- "yallist": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/mkdirp": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
- "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
- "bin": {
- "mkdirp": "bin/cmd.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
- },
- "node_modules/nan": {
- "version": "2.19.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz",
- "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==",
- "optional": true
- },
- "node_modules/natural-compare": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
- "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
- "dev": true
- },
- "node_modules/node-addon-api": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
- "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
- },
- "node_modules/node-fetch": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
- "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
- "dependencies": {
- "whatwg-url": "^5.0.0"
- },
- "engines": {
- "node": "4.x || >=6.0.0"
- },
- "peerDependencies": {
- "encoding": "^0.1.0"
- },
- "peerDependenciesMeta": {
- "encoding": {
- "optional": true
- }
- }
- },
- "node_modules/node-ssh": {
- "version": "13.2.0",
- "resolved": "https://registry.npmjs.org/node-ssh/-/node-ssh-13.2.0.tgz",
- "integrity": "sha512-7vsKR2Bbs66th6IWCy/7SN4MSwlVt+G6QrHB631BjRUM8/LmvDugtYhi0uAmgvHS/+PVurfNBOmELf30rm0MZg==",
- "dependencies": {
- "is-stream": "^2.0.0",
- "make-dir": "^3.1.0",
- "sb-promise-queue": "^2.1.0",
- "sb-scandir": "^3.1.0",
- "shell-escape": "^0.2.0",
- "ssh2": "^1.14.0"
- },
- "engines": {
- "node": ">= 10"
- }
- },
- "node_modules/nodemon": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.3.tgz",
- "integrity": "sha512-7jH/NXbFPxVaMwmBCC2B9F/V6X1VkEdNgx3iu9jji8WxWcvhMWkmhNWhI5077zknOnZnBzba9hZP6bCPJLSReQ==",
- "dev": true,
- "dependencies": {
- "chokidar": "^3.5.2",
- "debug": "^4",
- "ignore-by-default": "^1.0.1",
- "minimatch": "^3.1.2",
- "pstree.remy": "^1.1.8",
- "semver": "^7.5.3",
- "simple-update-notifier": "^2.0.0",
- "supports-color": "^5.5.0",
- "touch": "^3.1.0",
- "undefsafe": "^2.0.5"
- },
- "bin": {
- "nodemon": "bin/nodemon.js"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/nodemon"
- }
- },
- "node_modules/nopt": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz",
- "integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==",
- "dev": true,
- "dependencies": {
- "abbrev": "1"
- },
- "bin": {
- "nopt": "bin/nopt.js"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/normalize-path": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
- "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/npmlog": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
- "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==",
- "dependencies": {
- "are-we-there-yet": "^2.0.0",
- "console-control-strings": "^1.1.0",
- "gauge": "^3.0.0",
- "set-blocking": "^2.0.0"
- }
- },
- "node_modules/object-assign": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/on-exit-leak-free": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz",
- "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==",
- "engines": {
- "node": ">=14.0.0"
- }
- },
- "node_modules/once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "dependencies": {
- "wrappy": "1"
- }
- },
- "node_modules/optionator": {
- "version": "0.9.4",
- "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
- "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
- "dev": true,
- "dependencies": {
- "deep-is": "^0.1.3",
- "fast-levenshtein": "^2.0.6",
- "levn": "^0.4.1",
- "prelude-ls": "^1.2.1",
- "type-check": "^0.4.0",
- "word-wrap": "^1.2.5"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/p-limit": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
- "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
- "dev": true,
- "dependencies": {
- "yocto-queue": "^0.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/p-locate": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
- "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
- "dev": true,
- "dependencies": {
- "p-limit": "^3.0.2"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/parent-module": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
- "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
- "dev": true,
- "dependencies": {
- "callsites": "^3.0.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/path-exists": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-is-absolute": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "dev": true,
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/pino": {
- "version": "8.20.0",
- "resolved": "https://registry.npmjs.org/pino/-/pino-8.20.0.tgz",
- "integrity": "sha512-uhIfMj5TVp+WynVASaVEJFTncTUe4dHBq6CWplu/vBgvGHhvBvQfxz+vcOrnnBQdORH3izaGEurLfNlq3YxdFQ==",
- "dependencies": {
- "atomic-sleep": "^1.0.0",
- "fast-redact": "^3.1.1",
- "on-exit-leak-free": "^2.1.0",
- "pino-abstract-transport": "^1.1.0",
- "pino-std-serializers": "^6.0.0",
- "process-warning": "^3.0.0",
- "quick-format-unescaped": "^4.0.3",
- "real-require": "^0.2.0",
- "safe-stable-stringify": "^2.3.1",
- "sonic-boom": "^3.7.0",
- "thread-stream": "^2.0.0"
- },
- "bin": {
- "pino": "bin.js"
- }
- },
- "node_modules/pino-abstract-transport": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.1.0.tgz",
- "integrity": "sha512-lsleG3/2a/JIWUtf9Q5gUNErBqwIu1tUKTT3dUzaf5DySw9ra1wcqKjJjLX1VTY64Wk1eEOYsVGSaGfCK85ekA==",
- "dependencies": {
- "readable-stream": "^4.0.0",
- "split2": "^4.0.0"
- }
- },
- "node_modules/pino-pretty": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-11.0.0.tgz",
- "integrity": "sha512-YFJZqw59mHIY72wBnBs7XhLGG6qpJMa4pEQTRgEPEbjIYbng2LXEZZF1DoyDg9CfejEy8uZCyzpcBXXG0oOCwQ==",
- "dev": true,
- "dependencies": {
- "colorette": "^2.0.7",
- "dateformat": "^4.6.3",
- "fast-copy": "^3.0.0",
- "fast-safe-stringify": "^2.1.1",
- "help-me": "^5.0.0",
- "joycon": "^3.1.1",
- "minimist": "^1.2.6",
- "on-exit-leak-free": "^2.1.0",
- "pino-abstract-transport": "^1.0.0",
- "pump": "^3.0.0",
- "readable-stream": "^4.0.0",
- "secure-json-parse": "^2.4.0",
- "sonic-boom": "^3.0.0",
- "strip-json-comments": "^3.1.1"
- },
- "bin": {
- "pino-pretty": "bin.js"
- }
- },
- "node_modules/pino-std-serializers": {
- "version": "6.2.2",
- "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.2.tgz",
- "integrity": "sha512-cHjPPsE+vhj/tnhCy/wiMh3M3z3h/j15zHQX+S9GkTBgqJuTuJzYJ4gUyACLhDaJ7kk9ba9iRDmbH2tJU03OiA=="
- },
- "node_modules/prelude-ls": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
- "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
- "dev": true,
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/prettier": {
- "version": "3.2.5",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
- "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
- "dev": true,
- "bin": {
- "prettier": "bin/prettier.cjs"
- },
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/prettier/prettier?sponsor=1"
- }
- },
- "node_modules/prisma": {
- "version": "5.13.0",
- "resolved": "https://registry.npmjs.org/prisma/-/prisma-5.13.0.tgz",
- "integrity": "sha512-kGtcJaElNRAdAGsCNykFSZ7dBKpL14Cbs+VaQ8cECxQlRPDjBlMHNFYeYt0SKovAVy2Y65JXQwB3A5+zIQwnTg==",
- "devOptional": true,
- "hasInstallScript": true,
- "dependencies": {
- "@prisma/engines": "5.13.0"
- },
- "bin": {
- "prisma": "build/index.js"
- },
- "engines": {
- "node": ">=16.13"
- }
- },
- "node_modules/process": {
- "version": "0.11.10",
- "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
- "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
- "engines": {
- "node": ">= 0.6.0"
- }
- },
- "node_modules/process-warning": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-3.0.0.tgz",
- "integrity": "sha512-mqn0kFRl0EoqhnL0GQ0veqFHyIN1yig9RHh/InzORTUiZHFRAur+aMtRkELNwGs9aNwKS6tg/An4NYBPGwvtzQ=="
- },
- "node_modules/proxy-addr": {
- "version": "2.0.7",
- "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
- "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
- "dependencies": {
- "forwarded": "0.2.0",
- "ipaddr.js": "1.9.1"
- },
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/pstree.remy": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
- "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==",
- "dev": true
- },
- "node_modules/pump": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
- "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
- "dev": true,
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- }
- },
- "node_modules/punycode": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
- "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
- "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/quick-format-unescaped": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz",
- "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="
- },
- "node_modules/readable-stream": {
- "version": "4.5.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.5.2.tgz",
- "integrity": "sha512-yjavECdqeZ3GLXNgRXgeQEdz9fvDDkNKyHnbHRFtOr7/LcfgBcmct7t/ET+HaCTqfh06OzoAxrkN/IfjJBVe+g==",
- "dependencies": {
- "abort-controller": "^3.0.0",
- "buffer": "^6.0.3",
- "events": "^3.3.0",
- "process": "^0.11.10",
- "string_decoder": "^1.3.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- }
- },
- "node_modules/readdirp": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
- "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
- "dev": true,
- "dependencies": {
- "picomatch": "^2.2.1"
- },
- "engines": {
- "node": ">=8.10.0"
- }
- },
- "node_modules/real-require": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz",
- "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==",
- "engines": {
- "node": ">= 12.13.0"
- }
- },
- "node_modules/require-from-string": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
- "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/resolve-from": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
- "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/ret": {
- "version": "0.4.3",
- "resolved": "https://registry.npmjs.org/ret/-/ret-0.4.3.tgz",
- "integrity": "sha512-0f4Memo5QP7WQyUEAYUO3esD/XjOc3Zjjg5CPsAq1p8sIu0XPeMbHJemKA0BO7tV0X7+A0FoEpbmHXWxPyD3wQ==",
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/reusify": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
- "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
- "node_modules/rfdc": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz",
- "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg=="
- },
- "node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
- "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
- "node_modules/safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/safe-regex2": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/safe-regex2/-/safe-regex2-3.1.0.tgz",
- "integrity": "sha512-RAAZAGbap2kBfbVhvmnTFv73NWLMvDGOITFYTZBAaY8eR+Ir4ef7Up/e7amo+y1+AH+3PtLkrt9mvcTsG9LXug==",
- "dependencies": {
- "ret": "~0.4.0"
- }
- },
- "node_modules/safe-stable-stringify": {
- "version": "2.4.3",
- "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz",
- "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==",
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
- },
- "node_modules/sb-promise-queue": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/sb-promise-queue/-/sb-promise-queue-2.1.0.tgz",
- "integrity": "sha512-zwq4YuP1FQFkGx2Q7GIkZYZ6PqWpV+bg0nIO1sJhWOyGyhqbj0MsTvK6lCFo5TQwX5pZr6SCQ75e8PCDCuNvkg==",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/sb-scandir": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/sb-scandir/-/sb-scandir-3.1.0.tgz",
- "integrity": "sha512-70BVm2xz9jn94zSQdpvYrEG101/UV9TVGcfWr9T5iob3QhCK4lYXeculfBqPGFv3XTeKgx4dpWyYIDeZUqo4kg==",
- "dependencies": {
- "sb-promise-queue": "^2.1.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/secure-json-parse": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz",
- "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="
- },
- "node_modules/semver": {
- "version": "7.6.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
- "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/set-blocking": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
- "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
- },
- "node_modules/set-cookie-parser": {
- "version": "2.6.0",
- "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz",
- "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ=="
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shell-escape": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/shell-escape/-/shell-escape-0.2.0.tgz",
- "integrity": "sha512-uRRBT2MfEOyxuECseCZd28jC1AJ8hmqqneWQ4VWUTgCAFvb3wKU1jLqj6egC4Exrr88ogg3dp+zroH4wJuaXzw=="
- },
- "node_modules/signal-exit": {
- "version": "3.0.7",
- "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
- "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="
- },
- "node_modules/simple-update-notifier": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
- "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
- "dev": true,
- "dependencies": {
- "semver": "^7.5.3"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/sonic-boom": {
- "version": "3.8.1",
- "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.8.1.tgz",
- "integrity": "sha512-y4Z8LCDBuum+PBP3lSV7RHrXscqksve/bi0as7mhwVnBW+/wUqKT/2Kb7um8yqcFy0duYbbPxzt89Zy2nOCaxg==",
- "dependencies": {
- "atomic-sleep": "^1.0.0"
- }
- },
- "node_modules/split2": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
- "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
- "engines": {
- "node": ">= 10.x"
- }
- },
- "node_modules/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
- "hasInstallScript": true,
- "dependencies": {
- "asn1": "^0.2.6",
- "bcrypt-pbkdf": "^1.0.2"
- },
- "engines": {
- "node": ">=10.16.0"
- },
- "optionalDependencies": {
- "cpu-features": "~0.0.9",
- "nan": "^2.18.0"
- }
- },
- "node_modules/stream-shift": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz",
- "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ=="
- },
- "node_modules/string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
- "dependencies": {
- "safe-buffer": "~5.2.0"
- }
- },
- "node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-json-comments": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
- "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
- "dev": true,
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/supports-color": {
- "version": "5.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
- "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
- "dev": true,
- "dependencies": {
- "has-flag": "^3.0.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/tar": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
- "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
- "dependencies": {
- "chownr": "^2.0.0",
- "fs-minipass": "^2.0.0",
- "minipass": "^5.0.0",
- "minizlib": "^2.1.1",
- "mkdirp": "^1.0.3",
- "yallist": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/text-table": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
- "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
- "dev": true
- },
- "node_modules/thread-stream": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.4.1.tgz",
- "integrity": "sha512-d/Ex2iWd1whipbT681JmTINKw0ZwOUBZm7+Gjs64DHuX34mmw8vJL2bFAaNacaW72zYiTJxSHi5abUuOi5nsfg==",
- "dependencies": {
- "real-require": "^0.2.0"
- }
- },
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
- "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
- "dev": true,
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/toad-cache": {
- "version": "3.7.0",
- "resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz",
- "integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==",
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/touch": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",
- "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==",
- "dev": true,
- "dependencies": {
- "nopt": "~1.0.10"
- },
- "bin": {
- "nodetouch": "bin/nodetouch.js"
- }
- },
- "node_modules/tr46": {
- "version": "0.0.3",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
- "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
- },
- "node_modules/ts-api-utils": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz",
- "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==",
- "dev": true,
- "engines": {
- "node": ">=16"
- },
- "peerDependencies": {
- "typescript": ">=4.2.0"
- }
- },
- "node_modules/tweetnacl": {
- "version": "0.14.5",
- "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
- "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
- },
- "node_modules/type-check": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
- "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
- "dev": true,
- "dependencies": {
- "prelude-ls": "^1.2.1"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/type-fest": {
- "version": "0.20.2",
- "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
- "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/typescript": {
- "version": "5.3.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz",
- "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==",
- "dev": true,
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/typescript-eslint": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-7.8.0.tgz",
- "integrity": "sha512-sheFG+/D8N/L7gC3WT0Q8sB97Nm573Yfr+vZFzl/4nBdYcmviBPtwGSX9TJ7wpVg28ocerKVOt+k2eGmHzcgVA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/eslint-plugin": "7.8.0",
- "@typescript-eslint/parser": "7.8.0",
- "@typescript-eslint/utils": "7.8.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/undefsafe": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
- "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
- "dev": true
- },
- "node_modules/undici-types": {
- "version": "5.26.5",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
- "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
- "dev": true
- },
- "node_modules/uri-js": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dependencies": {
- "punycode": "^2.1.0"
- }
- },
- "node_modules/util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
- },
- "node_modules/webidl-conversions": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
- "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
- },
- "node_modules/whatwg-url": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
- "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
- "dependencies": {
- "tr46": "~0.0.3",
- "webidl-conversions": "^3.0.0"
- }
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/wide-align": {
- "version": "1.1.5",
- "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz",
- "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==",
- "dependencies": {
- "string-width": "^1.0.2 || 2 || 3 || 4"
- }
- },
- "node_modules/word-wrap": {
- "version": "1.2.5",
- "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
- "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
- },
- "node_modules/ws": {
- "version": "8.17.1",
- "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
- "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
- "engines": {
- "node": ">=10.0.0"
- },
- "peerDependencies": {
- "bufferutil": "^4.0.1",
- "utf-8-validate": ">=5.0.2"
- },
- "peerDependenciesMeta": {
- "bufferutil": {
- "optional": true
- },
- "utf-8-validate": {
- "optional": true
- }
- }
- },
- "node_modules/yallist": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
- "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
- },
- "node_modules/yocto-queue": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
- "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- }
- }
-}
diff --git a/api/package.json b/api/package.json
deleted file mode 100644
index 4a3ab5b..0000000
--- a/api/package.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "nextnet",
- "version": "1.1.2",
- "description": "Yet another dashboard to manage portforwarding technologies",
- "main": "index.js",
- "type": "module",
- "scripts": {
- "test": "echo \"Error: no test specified\" && exit 1",
- "build": "tsc",
- "start": "cd out && node --enable-source-maps index.js",
- "dev": "nodemon --watch src --ext ts,js,mjs,json --exec \"tsc && cd out && node --enable-source-maps index.js\""
- },
- "keywords": [],
- "author": "greysoh",
- "license": "BSD-3-Clause",
- "devDependencies": {
- "@eslint/js": "^9.2.0",
- "@types/bcrypt": "^5.0.2",
- "@types/node": "^20.12.7",
- "@types/ssh2": "^1.15.0",
- "@types/ws": "^8.5.10",
- "eslint": "^8.57.0",
- "globals": "^15.2.0",
- "nodemon": "^3.0.3",
- "pino-pretty": "^11.0.0",
- "prettier": "^3.2.5",
- "prisma": "^5.13.0",
- "typescript": "^5.3.3",
- "typescript-eslint": "^7.8.0"
- },
- "dependencies": {
- "@fastify/websocket": "^10.0.1",
- "@prisma/client": "^5.13.0",
- "bcrypt": "^5.1.1",
- "fastify": "^4.26.2",
- "node-ssh": "^13.2.0"
- }
-}
diff --git a/api/prisma/migrations/20240421200334_init/migration.sql b/api/prisma/migrations/20240421200334_init/migration.sql
deleted file mode 100644
index 17e7104..0000000
--- a/api/prisma/migrations/20240421200334_init/migration.sql
+++ /dev/null
@@ -1,53 +0,0 @@
--- CreateTable
-CREATE TABLE "DesinationProvider" (
- "id" SERIAL NOT NULL,
- "name" TEXT NOT NULL,
- "description" TEXT,
- "backend" TEXT NOT NULL,
- "connectionDetails" TEXT NOT NULL,
-
- CONSTRAINT "DesinationProvider_pkey" PRIMARY KEY ("id")
-);
-
--- CreateTable
-CREATE TABLE "ForwardRule" (
- "id" SERIAL NOT NULL,
- "name" TEXT NOT NULL,
- "description" TEXT,
- "sourceIP" TEXT NOT NULL,
- "sourcePort" INTEGER NOT NULL,
- "destIP" TEXT NOT NULL,
- "destPort" INTEGER NOT NULL,
- "destProviderID" INTEGER NOT NULL,
- "enabled" BOOLEAN NOT NULL,
-
- CONSTRAINT "ForwardRule_pkey" PRIMARY KEY ("id")
-);
-
--- CreateTable
-CREATE TABLE "Permission" (
- "id" SERIAL NOT NULL,
- "permission" TEXT NOT NULL,
- "has" BOOLEAN NOT NULL,
- "userID" INTEGER NOT NULL,
-
- CONSTRAINT "Permission_pkey" PRIMARY KEY ("id")
-);
-
--- CreateTable
-CREATE TABLE "User" (
- "id" SERIAL NOT NULL,
- "email" TEXT NOT NULL,
- "name" TEXT NOT NULL,
- "password" TEXT NOT NULL,
- "rootToken" TEXT,
- "isRootServiceAccount" BOOLEAN,
-
- CONSTRAINT "User_pkey" PRIMARY KEY ("id")
-);
-
--- CreateIndex
-CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
-
--- AddForeignKey
-ALTER TABLE "Permission" ADD CONSTRAINT "Permission_userID_fkey" FOREIGN KEY ("userID") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
diff --git a/api/prisma/migrations/20240421210417_fix_remove_destip/migration.sql b/api/prisma/migrations/20240421210417_fix_remove_destip/migration.sql
deleted file mode 100644
index a673c64..0000000
--- a/api/prisma/migrations/20240421210417_fix_remove_destip/migration.sql
+++ /dev/null
@@ -1,8 +0,0 @@
-/*
- Warnings:
-
- - You are about to drop the column `destIP` on the `ForwardRule` table. All the data in the column will be lost.
-
-*/
--- AlterTable
-ALTER TABLE "ForwardRule" DROP COLUMN "destIP";
diff --git a/api/prisma/migrations/20240425125737_fix_adds_protocol_field/migration.sql b/api/prisma/migrations/20240425125737_fix_adds_protocol_field/migration.sql
deleted file mode 100644
index a0a108f..0000000
--- a/api/prisma/migrations/20240425125737_fix_adds_protocol_field/migration.sql
+++ /dev/null
@@ -1,8 +0,0 @@
-/*
- Warnings:
-
- - Added the required column `protocol` to the `ForwardRule` table without a default value. This is not possible if the table is not empty.
-
-*/
--- AlterTable
-ALTER TABLE "ForwardRule" ADD COLUMN "protocol" TEXT NOT NULL;
diff --git a/api/prisma/migrations/20240505233740_feature_adds_username_support/migration.sql b/api/prisma/migrations/20240505233740_feature_adds_username_support/migration.sql
deleted file mode 100644
index 5af7c52..0000000
--- a/api/prisma/migrations/20240505233740_feature_adds_username_support/migration.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- AlterTable
-ALTER TABLE "User" ADD COLUMN "username" TEXT;
diff --git a/api/prisma/migrations/migration_lock.toml b/api/prisma/migrations/migration_lock.toml
deleted file mode 100644
index fbffa92..0000000
--- a/api/prisma/migrations/migration_lock.toml
+++ /dev/null
@@ -1,3 +0,0 @@
-# Please do not edit this file manually
-# It should be added in your version-control system (i.e. Git)
-provider = "postgresql"
\ No newline at end of file
diff --git a/api/prisma/schema.prisma b/api/prisma/schema.prisma
deleted file mode 100644
index 486f3e3..0000000
--- a/api/prisma/schema.prisma
+++ /dev/null
@@ -1,54 +0,0 @@
-// This is your Prisma schema file,
-// learn more about it in the docs: https://pris.ly/d/prisma-schema
-
-generator client {
- provider = "prisma-client-js"
-}
-
-datasource db {
- provider = "postgresql"
- url = env("DATABASE_URL")
-}
-
-model DesinationProvider {
- id Int @id @default(autoincrement())
-
- name String
- description String?
- backend String
- connectionDetails String
-}
-
-model ForwardRule {
- id Int @id @default(autoincrement())
-
- name String
- description String?
- protocol String
- sourceIP String
- sourcePort Int
- destPort Int
- destProviderID Int
- enabled Boolean
-}
-
-model Permission {
- id Int @id @default(autoincrement())
-
- permission String
- has Boolean
- user User @relation(fields: [userID], references: [id])
- userID Int
-}
-
-model User {
- id Int @id @default(autoincrement())
-
- email String @unique
- username String? // NOT optional in the API, but just for backwards compat
- name String
- password String // Will be hashed using bcrypt
- rootToken String?
- isRootServiceAccount Boolean?
- permissions Permission[]
-}
\ No newline at end of file
diff --git a/api/routes/NextNet API/Backend/Create.bru b/api/routes/NextNet API/Backend/Create.bru
deleted file mode 100644
index 15f623f..0000000
--- a/api/routes/NextNet API/Backend/Create.bru
+++ /dev/null
@@ -1,28 +0,0 @@
-meta {
- name: Create
- type: http
- seq: 1
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/backends/create
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "9d99397be36747b9e6f1858f1efded4756ea5b479fd5c47a6388041eecb44b4958858c6fe15f23a9cf5e9d67f48443c65342e3a69bfde231114df4bb2ab457",
- "name": "Passyfire Reimpl",
- "description": "PassyFire never dies",
- "backend": "passyfire",
- "connectionDetails": {
- "ip": "127.0.0.1",
- "port": 22,
-
- "users": {
- "g"
- }
- }
- }
-}
diff --git a/api/routes/NextNet API/Backend/Lookup.bru b/api/routes/NextNet API/Backend/Lookup.bru
deleted file mode 100644
index 47ef0a1..0000000
--- a/api/routes/NextNet API/Backend/Lookup.bru
+++ /dev/null
@@ -1,17 +0,0 @@
-meta {
- name: Lookup
- type: http
- seq: 3
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/backends/lookup
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "7d69814cdada551dd22521ad97b23b22a106278826a2b4e87dd76246594b56f973894e8265437a5d520ed7258d7c856d0d294e89b1de1a98db7fa4a"
- }
-}
diff --git a/api/routes/NextNet API/Backend/Remove.bru b/api/routes/NextNet API/Backend/Remove.bru
deleted file mode 100644
index 3d8fdb6..0000000
--- a/api/routes/NextNet API/Backend/Remove.bru
+++ /dev/null
@@ -1,23 +0,0 @@
-meta {
- name: Remove
- type: http
- seq: 2
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/backends/create
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "f1b89cc337073476289ade17ffbe7a6419b4bd52aa7ede26114bffd76fa263b5cb1bcaf389462e1d9e7acb7f4b6a7c28152a9cc9af83e3ec862f1892b1",
- "name": "PortCopier Route",
- "description": "This is a test route for portcopier.",
- "backend": "PortCopier",
- "connectionDetails": {
- "funny": true
- }
- }
-}
diff --git a/api/routes/NextNet API/Forward/Create.bru b/api/routes/NextNet API/Forward/Create.bru
deleted file mode 100644
index 985fd9d..0000000
--- a/api/routes/NextNet API/Forward/Create.bru
+++ /dev/null
@@ -1,28 +0,0 @@
-meta {
- name: Create
- type: http
- seq: 1
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/create
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "914abf2223f84375eed884671bfaefd7755d378af496b345f322214e75b51ed4465f11e26c944914c9b4fcc35c53250325fbc6530853ddfed8f72976d6fc5",
- "name": "Test Route",
- "description": "This is a test route for SSH",
-
- "protocol": "tcp",
-
- "sourceIP": "127.0.0.1",
- "sourcePort": "8000",
-
- "destinationPort": "9000",
-
- "providerID": "1"
- }
-}
diff --git a/api/routes/NextNet API/Forward/Get Inbound Connections.bru b/api/routes/NextNet API/Forward/Get Inbound Connections.bru
deleted file mode 100644
index b61a7c4..0000000
--- a/api/routes/NextNet API/Forward/Get Inbound Connections.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Get Inbound Connections
- type: http
- seq: 6
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/connections
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "914abf2223f84375eed884671bfaefd7755d378af496b345f322214e75b51ed4465f11e26c944914c9b4fcc35c53250325fbc6530853ddfed8f72976d6fc5",
- "id": "1"
- }
-}
diff --git a/api/routes/NextNet API/Forward/Lookup.bru b/api/routes/NextNet API/Forward/Lookup.bru
deleted file mode 100644
index a8d60b3..0000000
--- a/api/routes/NextNet API/Forward/Lookup.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Lookup
- type: http
- seq: 3
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/lookup
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "535c80825631c04b9add7a8682e06799d62ba57b5089b557f5bab2183fc9926b187b3b8d96da8ef16c67ec80f2917cf81bc21337f47728534f58ac9c4ed5f3fe",
- "protocol": "tcp"
- }
-}
diff --git a/api/routes/NextNet API/Forward/Remove.bru b/api/routes/NextNet API/Forward/Remove.bru
deleted file mode 100644
index 4d6b8e5..0000000
--- a/api/routes/NextNet API/Forward/Remove.bru
+++ /dev/null
@@ -1,26 +0,0 @@
-meta {
- name: Remove
- type: http
- seq: 2
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/remove
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "f1b89cc337073476289ade17ffbe7a6419b4bd52aa7ede26114bffd76fa263b5cb1bcaf389462e1d9e7acb7f4b6a7c28152a9cc9af83e3ec862f1892b1",
- "name": "Test Route",
- "description": "This is a test route for portcopier.",
-
- "sourceIP": "127.0.0.1",
- "sourcePort": "8000",
-
- "destinationPort": "9000",
-
- "providerID": "1"
- }
-}
diff --git a/api/routes/NextNet API/Forward/Start.bru b/api/routes/NextNet API/Forward/Start.bru
deleted file mode 100644
index 65accf8..0000000
--- a/api/routes/NextNet API/Forward/Start.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Start
- type: http
- seq: 4
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/start
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "914abf2223f84375eed884671bfaefd7755d378af496b345f322214e75b51ed4465f11e26c944914c9b4fcc35c53250325fbc6530853ddfed8f72976d6fc5",
- "id": "1"
- }
-}
diff --git a/api/routes/NextNet API/Forward/Stop.bru b/api/routes/NextNet API/Forward/Stop.bru
deleted file mode 100644
index 896fc71..0000000
--- a/api/routes/NextNet API/Forward/Stop.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Stop
- type: http
- seq: 5
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/forward/stop
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "914abf2223f84375eed884671bfaefd7755d378af496b345f322214e75b51ed4465f11e26c944914c9b4fcc35c53250325fbc6530853ddfed8f72976d6fc5",
- "id": "1"
- }
-}
diff --git a/api/routes/NextNet API/Users/Log In.bru b/api/routes/NextNet API/Users/Log In.bru
deleted file mode 100644
index 70eed07..0000000
--- a/api/routes/NextNet API/Users/Log In.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Log In
- type: http
- seq: 2
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/users/login
- body: json
- auth: none
-}
-
-body:json {
- {
- "email": "me@greysoh.dev",
- "password": "password"
- }
-}
diff --git a/api/routes/NextNet API/Users/Lookup.bru b/api/routes/NextNet API/Users/Lookup.bru
deleted file mode 100644
index 352b777..0000000
--- a/api/routes/NextNet API/Users/Lookup.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Lookup
- type: http
- seq: 4
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/users/lookup
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "5e2cb92a338a832d385790861312eb85d69f46f82317bfa984ac5e3517368ab5a827897b0f9775a9181b02fa3b9cffed7e59e5b3111d5bdc37f729156caf5f",
- "name": "Greyson Hofer"
- }
-}
diff --git a/api/routes/NextNet API/Users/Remove.bru b/api/routes/NextNet API/Users/Remove.bru
deleted file mode 100644
index 0bf5723..0000000
--- a/api/routes/NextNet API/Users/Remove.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Remove
- type: http
- seq: 3
-}
-
-post {
- url: http://127.0.0.1:3000/api/v1/users/remove
- body: json
- auth: inherit
-}
-
-body:json {
- {
- "token": "5e2cb92a338a832d385790861312eb85d69f46f82317bfa984ac5e3517368ab5a827897b0f9775a9181b02fa3b9cffed7e59e5b3111d5bdc37f729156caf5f",
- "uid": "2"
- }
-}
diff --git a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get All Scopes.bru b/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get All Scopes.bru
deleted file mode 100644
index 6ae13c6..0000000
--- a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get All Scopes.bru
+++ /dev/null
@@ -1,11 +0,0 @@
-meta {
- name: Get All Scopes
- type: http
- seq: 1
-}
-
-get {
- url: http://127.0.0.1:8080/api/v1/static/getScopes
- body: none
- auth: none
-}
diff --git a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get Tunnels.bru b/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get Tunnels.bru
deleted file mode 100644
index a1746a1..0000000
--- a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Get Tunnels.bru
+++ /dev/null
@@ -1,17 +0,0 @@
-meta {
- name: Get Tunnels
- type: http
- seq: 3
-}
-
-post {
- url: http://127.0.0.1:8080/api/v1/tunnels
- body: json
- auth: none
-}
-
-body:json {
- {
- "token": "641d968c3bfdf78f2df86cae106349c4c95a8dd73512ee34b296379b6cd908c87b078f1f674b43c9e3394c8b233840512d88efdecf47dc63be93276f56c"
- }
-}
diff --git a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Log In.bru b/api/routes/Passyfire Base Routes/Passyfire Base Routes/Log In.bru
deleted file mode 100644
index 0ecdb33..0000000
--- a/api/routes/Passyfire Base Routes/Passyfire Base Routes/Log In.bru
+++ /dev/null
@@ -1,18 +0,0 @@
-meta {
- name: Log In
- type: http
- seq: 2
-}
-
-post {
- url: http://127.0.0.1:8080/api/v1/users/login
- body: json
- auth: none
-}
-
-body:json {
- {
- "username": "guest",
- "password": "guest"
- }
-}
diff --git a/api/routes/Passyfire Base Routes/Passyfire Base Routes/bruno.json b/api/routes/Passyfire Base Routes/Passyfire Base Routes/bruno.json
deleted file mode 100644
index 0fdaa19..0000000
--- a/api/routes/Passyfire Base Routes/Passyfire Base Routes/bruno.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "version": "1",
- "name": "Passyfire Base Routes",
- "type": "collection",
- "ignore": [
- "node_modules",
- ".git"
- ]
-}
\ No newline at end of file
diff --git a/api/src/backendimpl/base.ts b/api/src/backendimpl/base.ts
deleted file mode 100644
index 6a8d458..0000000
--- a/api/src/backendimpl/base.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-// @eslint-ignore-file
-
-export type ParameterReturnedValue = {
- success: boolean;
- message?: string;
-};
-
-export type ForwardRule = {
- sourceIP: string;
- sourcePort: number;
- destPort: number;
-};
-
-export type ConnectedClient = {
- ip: string;
- port: number;
-
- connectionDetails: ForwardRule;
-};
-
-export class BackendBaseClass {
- state: "stopped" | "stopping" | "started" | "starting";
-
- clients?: ConnectedClient[]; // Not required to be implemented, but more consistency
- logs: string[];
-
- constructor(parameters: string) {
- this.logs = [];
- this.clients = [];
-
- this.state = "stopped";
- }
-
- addConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {}
-
- removeConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {}
-
- async start(): Promise {
- return true;
- }
-
- async stop(): Promise {
- return true;
- }
-
- getAllConnections(): ConnectedClient[] {
- if (this.clients == null) return [];
- return this.clients;
- }
-
- static checkParametersConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): ParameterReturnedValue {
- return {
- success: true,
- };
- }
-
- static checkParametersBackendInstance(data: string): ParameterReturnedValue {
- return {
- success: true,
- };
- }
-}
diff --git a/api/src/backendimpl/index.ts b/api/src/backendimpl/index.ts
deleted file mode 100644
index 776abf8..0000000
--- a/api/src/backendimpl/index.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-import { BackendBaseClass } from "./base.js";
-
-import { PassyFireBackendProvider } from "./passyfire-reimpl/index.js";
-import { SSHBackendProvider } from "./ssh.js";
-
-export const backendProviders: Record = {
- ssh: SSHBackendProvider,
- passyfire: PassyFireBackendProvider,
-};
-
-if (process.env.NODE_ENV != "production") {
- backendProviders["dummy"] = BackendBaseClass;
-}
diff --git a/api/src/backendimpl/passyfire-reimpl/index.ts b/api/src/backendimpl/passyfire-reimpl/index.ts
deleted file mode 100644
index de454ed..0000000
--- a/api/src/backendimpl/passyfire-reimpl/index.ts
+++ /dev/null
@@ -1,231 +0,0 @@
-import fastifyWebsocket from "@fastify/websocket";
-
-import type { FastifyInstance } from "fastify";
-import Fastify from "fastify";
-
-import type {
- ForwardRule,
- ConnectedClient,
- ParameterReturnedValue,
- BackendBaseClass,
-} from "../base.js";
-
-import { generateRandomData } from "../../libs/generateRandom.js";
-import { requestHandler } from "./socket.js";
-import { route } from "./routes.js";
-
-type BackendProviderUser = {
- username: string;
- password: string;
-};
-
-export type ForwardRuleExt = ForwardRule & {
- protocol: "tcp" | "udp";
- userConfig: Record;
-};
-
-export type ConnectedClientExt = ConnectedClient & {
- connectionDetails: ForwardRuleExt;
- username: string;
-};
-
-// Fight me (for better naming)
-type BackendParsedProviderString = {
- ip: string;
- port: number;
- publicPort?: number;
- isProxied?: boolean;
-
- users: BackendProviderUser[];
-};
-
-type LoggedInUser = {
- username: string;
- token: string;
-};
-
-function parseBackendProviderString(data: string): BackendParsedProviderString {
- try {
- JSON.parse(data);
- } catch (e) {
- throw new Error("Payload body is not JSON");
- }
-
- const jsonData = JSON.parse(data);
-
- if (typeof jsonData.ip != "string")
- throw new Error("IP field is not a string");
-
- if (typeof jsonData.port != "number") throw new Error("Port is not a number");
-
- if (
- typeof jsonData.publicPort != "undefined" &&
- typeof jsonData.publicPort != "number"
- )
- throw new Error("(optional field) Proxied port is not a number");
-
- if (
- typeof jsonData.isProxied != "undefined" &&
- typeof jsonData.isProxied != "boolean"
- )
- throw new Error("(optional field) 'Is proxied' is not a boolean");
-
- if (!Array.isArray(jsonData.users)) throw new Error("Users is not an array");
-
- for (const userIndex in jsonData.users) {
- const user = jsonData.users[userIndex];
-
- if (typeof user.username != "string")
- throw new Error("Username is not a string, in users array");
- if (typeof user.password != "string")
- throw new Error("Password is not a string, in users array");
- }
-
- return {
- ip: jsonData.ip,
- port: jsonData.port,
-
- publicPort: jsonData.publicPort,
- isProxied: jsonData.isProxied,
-
- users: jsonData.users,
- };
-}
-
-export class PassyFireBackendProvider implements BackendBaseClass {
- state: "stopped" | "stopping" | "started" | "starting";
-
- clients: ConnectedClientExt[];
- proxies: ForwardRuleExt[];
- users: LoggedInUser[];
- logs: string[];
-
- options: BackendParsedProviderString;
- fastify: FastifyInstance;
-
- constructor(parameters: string) {
- this.logs = [];
- this.clients = [];
- this.proxies = [];
-
- this.state = "stopped";
- this.options = parseBackendProviderString(parameters);
-
- this.users = [];
- }
-
- async start(): Promise {
- this.state = "starting";
-
- this.fastify = Fastify({
- logger: true,
- trustProxy: this.options.isProxied,
- });
-
- await this.fastify.register(fastifyWebsocket);
- route(this);
-
- this.fastify.get("/", { websocket: true }, (ws, req) =>
- requestHandler(this, ws, req),
- );
-
- await this.fastify.listen({
- port: this.options.port,
- host: this.options.ip,
- });
-
- this.state = "started";
-
- return true;
- }
-
- async stop(): Promise {
- await this.fastify.close();
-
- this.users.splice(0, this.users.length);
- this.proxies.splice(0, this.proxies.length);
- this.clients.splice(0, this.clients.length);
-
- return true;
- }
-
- addConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {
- const proxy: ForwardRuleExt = {
- sourceIP,
- sourcePort,
- destPort,
- protocol,
-
- userConfig: {},
- };
-
- for (const user of this.options.users) {
- proxy.userConfig[user.username] = generateRandomData();
- }
-
- this.proxies.push(proxy);
- }
-
- removeConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {
- const connectionCheck = PassyFireBackendProvider.checkParametersConnection(
- sourceIP,
- sourcePort,
- destPort,
- protocol,
- );
- if (!connectionCheck.success) throw new Error(connectionCheck.message);
-
- const foundProxyEntry = this.proxies.find(
- i =>
- i.sourceIP == sourceIP &&
- i.sourcePort == sourcePort &&
- i.destPort == destPort,
- );
- if (!foundProxyEntry) return;
-
- this.proxies.splice(this.proxies.indexOf(foundProxyEntry), 1);
- return;
- }
-
- getAllConnections(): ConnectedClient[] {
- if (this.clients == null) return [];
- return this.clients;
- }
-
- static checkParametersConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): ParameterReturnedValue {
- return {
- success: true,
- };
- }
-
- static checkParametersBackendInstance(data: string): ParameterReturnedValue {
- try {
- parseBackendProviderString(data);
- // @ts-expect-error: We write the function, and we know we're returning an error
- } catch (e: Error) {
- return {
- success: false,
- message: e.toString(),
- };
- }
-
- return {
- success: true,
- };
- }
-}
diff --git a/api/src/backendimpl/passyfire-reimpl/routes.ts b/api/src/backendimpl/passyfire-reimpl/routes.ts
deleted file mode 100644
index d814e12..0000000
--- a/api/src/backendimpl/passyfire-reimpl/routes.ts
+++ /dev/null
@@ -1,158 +0,0 @@
-import { generateRandomData } from "../../libs/generateRandom.js";
-import type { PassyFireBackendProvider } from "./index.js";
-
-export function route(instance: PassyFireBackendProvider) {
- const { fastify } = instance;
-
- const proxiedPort: number = instance.options.publicPort ?? 443;
-
- const unsupportedSpoofedRoutes: string[] = [
- "/api/v1/tunnels/add",
- "/api/v1/tunnels/edit",
- "/api/v1/tunnels/remove",
-
- // TODO (greysoh): Should we implement these? We have these for internal reasons. We could expose these /shrug
- "/api/v1/tunnels/start",
- "/api/v1/tunnels/stop",
-
- // Same scenario for this API.
- "/api/v1/users",
- "/api/v1/users/add",
- "/api/v1/users/remove",
- "/api/v1/users/enable",
- "/api/v1/users/disable",
- ];
-
- fastify.get("/api/v1/static/getScopes", () => {
- return {
- success: true,
- data: {
- users: {
- add: true,
- remove: true,
- get: true,
- getPasswords: true,
- },
- routes: {
- add: true,
- remove: true,
- start: true,
- stop: true,
- get: true,
- getPasswords: true,
- },
- },
- };
- });
-
- for (const spoofedRoute of unsupportedSpoofedRoutes) {
- fastify.post(spoofedRoute, (req, res) => {
- return res.status(403).send({
- error: "Invalid scope(s)",
- });
- });
- }
-
- fastify.post(
- "/api/v1/users/login",
- {
- schema: {
- body: {
- type: "object",
- required: ["username", "password"],
-
- properties: {
- username: { type: "string" },
- password: { type: "string" },
- },
- },
- },
- },
- (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- username: string;
- password: string;
- } = req.body;
-
- if (
- !instance.options.users.find(
- i => i.username == body.username && i.password == body.password,
- )
- ) {
- return res.status(403).send({
- error: "Invalid username/password.",
- });
- }
-
- const token = generateRandomData();
-
- instance.users.push({
- username: body.username,
- token,
- });
-
- return {
- success: true,
- data: {
- token,
- },
- };
- },
- );
-
- fastify.post(
- "/api/v1/tunnels",
- {
- schema: {
- body: {
- type: "object",
- required: ["token"],
- properties: {
- token: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- } = req.body;
-
- const userData = instance.users.find(user => user.token == body.token);
-
- if (!userData)
- return res.status(403).send({
- error: "Invalid token",
- });
-
- // const host = req.hostname.substring(0, req.hostname.indexOf(":"));
- const unparsedPort = req.hostname.substring(
- req.hostname.indexOf(":") + 1,
- );
-
- // @ts-expect-error: parseInt(...) can take a number just fine, at least in Node.JS
- const port = parseInt(unparsedPort == "" ? proxiedPort : unparsedPort);
-
- // This protocol is so confusing. I'm sorry.
- res.send({
- success: true,
- data: instance.proxies.map(proxy => ({
- proxyUrlSettings: {
- host: "sameAs", // Makes pfC work (this is by design apparently)
- port,
- protocol: proxy.protocol.toUpperCase(),
- },
-
- dest: `${proxy.sourceIP}:${proxy.destPort}`,
- name: `${proxy.protocol.toUpperCase()} on ::${proxy.sourcePort} -> ::${proxy.destPort}`,
-
- passwords: [proxy.userConfig[userData.username]],
-
- running: true,
- })),
- });
- },
- );
-}
diff --git a/api/src/backendimpl/passyfire-reimpl/socket.ts b/api/src/backendimpl/passyfire-reimpl/socket.ts
deleted file mode 100644
index ee9607a..0000000
--- a/api/src/backendimpl/passyfire-reimpl/socket.ts
+++ /dev/null
@@ -1,140 +0,0 @@
-import dgram from "node:dgram";
-import net from "node:net";
-
-import type { WebSocket } from "@fastify/websocket";
-import type { FastifyRequest } from "fastify";
-
-import type { ConnectedClientExt, PassyFireBackendProvider } from "./index.js";
-
-// This code sucks because this protocol sucks BUUUT it works, and I don't wanna reinvent
-// the gosh darn wheel for (almost) no reason
-
-function authenticateSocket(
- instance: PassyFireBackendProvider,
- ws: WebSocket,
- message: string,
- state: ConnectedClientExt,
-): boolean {
- if (!message.startsWith("Accept: ")) {
- ws.send("400 Bad Request");
- return false;
- }
-
- const type = message.substring(message.indexOf(":") + 1).trim();
-
- if (type == "IsPassedWS") {
- ws.send("AcceptResponse IsPassedWS: true");
- } else if (type.startsWith("Bearer")) {
- const token = type.substring(type.indexOf("Bearer") + 7);
-
- for (const proxy of instance.proxies) {
- for (const username of Object.keys(proxy.userConfig)) {
- const currentToken = proxy.userConfig[username];
-
- if (token == currentToken) {
- state.connectionDetails = proxy;
- state.username = username;
- }
- }
- }
-
- if (state.connectionDetails && state.username) {
- ws.send("AcceptResponse Bearer: true");
- return true;
- } else {
- ws.send("AcceptResponse Bearer: false");
- }
- }
-
- return false;
-}
-
-export function requestHandler(
- instance: PassyFireBackendProvider,
- ws: WebSocket,
- req: FastifyRequest,
-) {
- let state: "authentication" | "data" = "authentication";
- let socket: dgram.Socket | net.Socket | undefined;
-
- // @ts-expect-error: FIXME because this is a mess
- const connectedClient: ConnectedClientExt = {};
-
- ws.on("close", () => {
- instance.clients.splice(
- instance.clients.indexOf(connectedClient as ConnectedClientExt),
- 1,
- );
- });
-
- ws.on("message", (rawData: ArrayBuffer) => {
- if (state == "authentication") {
- const data = rawData.toString();
-
- if (authenticateSocket(instance, ws, data, connectedClient)) {
- ws.send("AcceptResponse Bearer: true");
-
- connectedClient.ip = req.ip;
- connectedClient.port = req.socket.remotePort ?? -1;
-
- instance.clients.push(connectedClient);
-
- if (connectedClient.connectionDetails.protocol == "tcp") {
- socket = new net.Socket();
-
- socket.connect(
- connectedClient.connectionDetails.sourcePort,
- connectedClient.connectionDetails.sourceIP,
- );
-
- socket.on("connect", () => {
- state = "data";
-
- ws.send("InitProxy: Attempting to connect");
- ws.send("InitProxy: Connected");
- });
-
- socket.on("data", data => {
- ws.send(data);
- });
- } else if (connectedClient.connectionDetails.protocol == "udp") {
- socket = dgram.createSocket("udp4");
- state = "data";
-
- ws.send("InitProxy: Attempting to connect");
- ws.send("InitProxy: Connected");
-
- socket.on("message", (data, rinfo) => {
- if (
- rinfo.address != connectedClient.connectionDetails.sourceIP ||
- rinfo.port != connectedClient.connectionDetails.sourcePort
- )
- return;
- ws.send(data);
- });
- }
- }
- } else if (state == "data") {
- if (socket instanceof dgram.Socket) {
- const array = new Uint8Array(rawData);
-
- socket.send(
- array,
- connectedClient.connectionDetails.sourcePort,
- connectedClient.connectionDetails.sourceIP,
- err => {
- if (err) throw err;
- },
- );
- } else if (socket instanceof net.Socket) {
- const array = new Uint8Array(rawData);
-
- socket.write(array);
- }
- } else {
- throw new Error(
- `Whooops, our WebSocket reached an unsupported state: '${state}'`,
- );
- }
- });
-}
diff --git a/api/src/backendimpl/ssh.ts b/api/src/backendimpl/ssh.ts
deleted file mode 100644
index 7866924..0000000
--- a/api/src/backendimpl/ssh.ts
+++ /dev/null
@@ -1,331 +0,0 @@
-import { NodeSSH } from "node-ssh";
-import { Socket } from "node:net";
-
-import type {
- BackendBaseClass,
- ForwardRule,
- ConnectedClient,
- ParameterReturnedValue,
-} from "./base.js";
-
-import {
- TcpConnectionDetails,
- AcceptConnection,
- ClientChannel,
- RejectConnection,
-} from "ssh2";
-
-type ForwardRuleExt = ForwardRule & {
- enabled: boolean;
-};
-
-// Fight me (for better naming)
-type BackendParsedProviderString = {
- ip: string;
- port: number;
-
- username: string;
- privateKey: string;
-
- listenOnIPs: string[];
-};
-
-function parseBackendProviderString(data: string): BackendParsedProviderString {
- try {
- JSON.parse(data);
- } catch (e) {
- throw new Error("Payload body is not JSON");
- }
-
- const jsonData = JSON.parse(data);
-
- if (typeof jsonData.ip != "string") {
- throw new Error("IP field is not a string");
- }
-
- if (typeof jsonData.port != "number") {
- throw new Error("Port is not a number");
- }
-
- if (typeof jsonData.username != "string") {
- throw new Error("Username is not a string");
- }
-
- if (typeof jsonData.privateKey != "string") {
- throw new Error("Private key is not a string");
- }
-
- let listenOnIPs: string[] = [];
-
- if (!Array.isArray(jsonData.listenOnIPs)) {
- listenOnIPs.push("0.0.0.0");
- } else {
- listenOnIPs = jsonData.listenOnIPs;
- }
-
- return {
- ip: jsonData.ip,
- port: jsonData.port,
-
- username: jsonData.username,
- privateKey: jsonData.privateKey,
-
- listenOnIPs,
- };
-}
-
-export class SSHBackendProvider implements BackendBaseClass {
- state: "stopped" | "stopping" | "started" | "starting";
-
- clients: ConnectedClient[];
- proxies: ForwardRuleExt[];
- logs: string[];
-
- sshInstance: NodeSSH;
- options: BackendParsedProviderString;
-
- constructor(parameters: string) {
- this.logs = [];
- this.proxies = [];
- this.clients = [];
-
- this.options = parseBackendProviderString(parameters);
-
- this.state = "stopped";
- }
-
- async start(): Promise {
- this.state = "starting";
- this.logs.push("Starting SSHBackendProvider...");
-
- if (this.sshInstance) {
- this.sshInstance.dispose();
- }
-
- this.sshInstance = new NodeSSH();
-
- try {
- await this.sshInstance.connect({
- host: this.options.ip,
- port: this.options.port,
-
- username: this.options.username,
- privateKey: this.options.privateKey,
- });
- } catch (e) {
- this.logs.push(`Failed to start SSHBackendProvider! Error: '${e}'`);
- this.state = "stopped";
-
- // @ts-expect-error: We know that stuff will be initialized in order, so this will be safe
- this.sshInstance = null;
-
- return false;
- }
-
- if (this.sshInstance.connection) {
- this.sshInstance.connection.on("end", async () => {
- if (this.state != "started") return;
- this.logs.push("We disconnected from the SSH server. Restarting...");
-
- // Create a new array from the existing list of proxies, so we have a backup of the proxy list before
- // we wipe the list of all proxies and clients (as we're disconnected anyways)
- const proxies = Array.from(this.proxies);
-
- this.proxies.splice(0, this.proxies.length);
- this.clients.splice(0, this.clients.length);
-
- await this.start();
-
- if (this.state != "started") return;
-
- for (const proxy of proxies) {
- if (!proxy.enabled) continue;
-
- this.addConnection(
- proxy.sourceIP,
- proxy.sourcePort,
- proxy.destPort,
- "tcp",
- );
- }
- });
- }
-
- this.state = "started";
- this.logs.push("Successfully started SSHBackendProvider.");
-
- return true;
- }
-
- async stop(): Promise {
- this.state = "stopping";
- this.logs.push("Stopping SSHBackendProvider...");
-
- this.proxies.splice(0, this.proxies.length);
-
- this.sshInstance.dispose();
-
- // @ts-expect-error: We know that stuff will be initialized in order, so this will be safe
- this.sshInstance = null;
-
- this.logs.push("Successfully stopped SSHBackendProvider.");
- this.state = "stopped";
-
- return true;
- }
-
- addConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {
- const connectionCheck = SSHBackendProvider.checkParametersConnection(
- sourceIP,
- sourcePort,
- destPort,
- protocol,
- );
-
- if (!connectionCheck.success) throw new Error(connectionCheck.message);
-
- const foundProxyEntry = this.proxies.find(
- i =>
- i.sourceIP == sourceIP &&
- i.sourcePort == sourcePort &&
- i.destPort == destPort,
- );
-
- if (foundProxyEntry) return;
-
- const connCallback = (
- info: TcpConnectionDetails,
- accept: AcceptConnection,
- reject: RejectConnection,
- ) => {
- const foundProxyEntry = this.proxies.find(
- i =>
- i.sourceIP == sourceIP &&
- i.sourcePort == sourcePort &&
- i.destPort == destPort,
- );
-
- if (!foundProxyEntry || !foundProxyEntry.enabled) return reject();
-
- const client: ConnectedClient = {
- ip: info.srcIP,
- port: info.srcPort,
-
- connectionDetails: foundProxyEntry,
- };
-
- this.clients.push(client);
-
- const srcConn = new Socket();
-
- srcConn.connect({
- host: sourceIP,
- port: sourcePort,
- });
-
- // Why is this so confusing
- const destConn = accept();
-
- destConn.addListener("data", (chunk: Uint8Array) => {
- srcConn.write(chunk);
- });
-
- destConn.addListener("end", () => {
- this.clients.splice(this.clients.indexOf(client), 1);
- srcConn.end();
- });
-
- srcConn.on("data", data => {
- destConn.write(data);
- });
-
- srcConn.on("end", () => {
- this.clients.splice(this.clients.indexOf(client), 1);
- destConn.end();
- });
- };
-
- for (const ip of this.options.listenOnIPs) {
- this.sshInstance.forwardIn(ip, destPort, connCallback);
- }
-
- this.proxies.push({
- sourceIP,
- sourcePort,
- destPort,
-
- enabled: true,
- });
- }
-
- removeConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): void {
- const connectionCheck = SSHBackendProvider.checkParametersConnection(
- sourceIP,
- sourcePort,
- destPort,
- protocol,
- );
-
- if (!connectionCheck.success) throw new Error(connectionCheck.message);
-
- const foundProxyEntry = this.proxies.find(
- i =>
- i.sourceIP == sourceIP &&
- i.sourcePort == sourcePort &&
- i.destPort == destPort,
- );
-
- if (!foundProxyEntry) return;
-
- foundProxyEntry.enabled = false;
- }
-
- getAllConnections(): ConnectedClient[] {
- return this.clients;
- }
-
- static checkParametersConnection(
- sourceIP: string,
- sourcePort: number,
- destPort: number,
- protocol: "tcp" | "udp",
- ): ParameterReturnedValue {
- if (protocol == "udp") {
- return {
- success: false,
- message:
- "SSH does not support UDP tunneling! Please use something like PortCopier instead (if it gets done)",
- };
- }
-
- return {
- success: true,
- };
- }
-
- static checkParametersBackendInstance(data: string): ParameterReturnedValue {
- try {
- parseBackendProviderString(data);
- // @ts-expect-error: We write the function, and we know we're returning an error
- } catch (e: Error) {
- return {
- success: false,
- message: e.toString(),
- };
- }
-
- return {
- success: true,
- };
- }
-}
diff --git a/api/src/index.ts b/api/src/index.ts
deleted file mode 100644
index a411f6a..0000000
--- a/api/src/index.ts
+++ /dev/null
@@ -1,140 +0,0 @@
-import process from "node:process";
-
-import { PrismaClient } from "@prisma/client";
-import Fastify from "fastify";
-
-import type {
- ServerOptions,
- SessionToken,
- RouteOptions,
-} from "./libs/types.js";
-
-import type { BackendBaseClass } from "./backendimpl/base.js";
-
-import { route as getPermissions } from "./routes/getPermissions.js";
-
-import { route as backendCreate } from "./routes/backends/create.js";
-import { route as backendRemove } from "./routes/backends/remove.js";
-import { route as backendLookup } from "./routes/backends/lookup.js";
-
-import { route as forwardConnections } from "./routes/forward/connections.js";
-import { route as forwardCreate } from "./routes/forward/create.js";
-import { route as forwardRemove } from "./routes/forward/remove.js";
-import { route as forwardLookup } from "./routes/forward/lookup.js";
-import { route as forwardStart } from "./routes/forward/start.js";
-import { route as forwardStop } from "./routes/forward/stop.js";
-
-import { route as userCreate } from "./routes/user/create.js";
-import { route as userRemove } from "./routes/user/remove.js";
-import { route as userLookup } from "./routes/user/lookup.js";
-import { route as userLogin } from "./routes/user/login.js";
-
-import { backendInit } from "./libs/backendInit.js";
-
-const prisma = new PrismaClient();
-
-const isSignupEnabled = Boolean(process.env.IS_SIGNUP_ENABLED);
-const unsafeAdminSignup = Boolean(process.env.UNSAFE_ADMIN_SIGNUP);
-
-const noUsersCheck = (await prisma.user.count()) == 0;
-
-if (unsafeAdminSignup) {
- console.error(
- "WARNING: You have admin sign up on! This means that anyone that signs up will have admin rights!",
- );
-}
-
-const serverOptions: ServerOptions = {
- isSignupEnabled: isSignupEnabled ? true : noUsersCheck,
- isSignupAsAdminEnabled: unsafeAdminSignup ? true : noUsersCheck,
-
- allowUnsafeGlobalTokens: process.env.NODE_ENV != "production",
-};
-
-const sessionTokens: Record = {};
-const backends: Record = {};
-
-const loggerEnv = {
- development: {
- transport: {
- target: "pino-pretty",
- options: {
- translateTime: "HH:MM:ss Z",
- ignore: "pid,hostname,time",
- },
- },
- },
- production: true,
- test: false,
-};
-
-const fastify = Fastify({
- logger:
- process.env.NODE_ENV == "production"
- ? loggerEnv.production
- : loggerEnv.development,
- trustProxy: Boolean(process.env.IS_BEHIND_PROXY),
-});
-
-const routeOptions: RouteOptions = {
- fastify: fastify,
- prisma: prisma,
- tokens: sessionTokens,
- options: serverOptions,
-
- backends: backends,
-};
-
-fastify.log.info("Initializing forwarding rules...");
-
-const createdBackends = await prisma.desinationProvider.findMany();
-
-const logWrapper = (arg: string) => fastify.log.info(arg);
-const errorWrapper = (arg: string) => fastify.log.error(arg);
-
-for (const backend of createdBackends) {
- fastify.log.info(
- `Running init steps for ID '${backend.id}' (${backend.name})`,
- );
-
- const init = await backendInit(
- backend,
- backends,
- prisma,
- logWrapper,
- errorWrapper,
- );
-
- if (init) fastify.log.info("Init successful.");
-}
-
-fastify.log.info("Done.");
-
-getPermissions(routeOptions);
-
-backendCreate(routeOptions);
-backendRemove(routeOptions);
-backendLookup(routeOptions);
-
-forwardConnections(routeOptions);
-forwardCreate(routeOptions);
-forwardRemove(routeOptions);
-forwardLookup(routeOptions);
-forwardStart(routeOptions);
-forwardStop(routeOptions);
-
-userCreate(routeOptions);
-userRemove(routeOptions);
-userLookup(routeOptions);
-userLogin(routeOptions);
-
-// Run the server!
-try {
- await fastify.listen({
- port: 3000,
- host: process.env.NODE_ENV == "production" ? "0.0.0.0" : "127.0.0.1",
- });
-} catch (err) {
- fastify.log.error(err);
- process.exit(1);
-}
diff --git a/api/src/libs/backendInit.ts b/api/src/libs/backendInit.ts
deleted file mode 100644
index 36c8e0b..0000000
--- a/api/src/libs/backendInit.ts
+++ /dev/null
@@ -1,84 +0,0 @@
-import { format } from "node:util";
-
-import type { PrismaClient } from "@prisma/client";
-
-import { backendProviders } from "../backendimpl/index.js";
-import { BackendBaseClass } from "../backendimpl/base.js";
-
-type Backend = {
- id: number;
- name: string;
- description: string | null;
- backend: string;
- connectionDetails: string;
-};
-
-export async function backendInit(
- backend: Backend,
- backends: Record,
- prisma: PrismaClient,
- logger?: (arg: string) => void,
- errorOut?: (arg: string) => void,
-): Promise {
- const log = (...args: string[]) =>
- logger ? logger(format(...args)) : console.log(...args);
-
- const error = (...args: string[]) =>
- errorOut ? errorOut(format(...args)) : log(...args);
-
- const ourProvider = backendProviders[backend.backend];
-
- if (!ourProvider) {
- error(" - Error: Invalid backend recieved!");
-
- // Prevent crashes when we don't recieve a backend
- backends[backend.id] = new BackendBaseClass("");
-
- backends[backend.id].logs.push("** Failed To Create Backend **");
-
- backends[backend.id].logs.push(
- "Reason: Invalid backend recieved (couldn't find the backend to use!)",
- );
-
- return false;
- }
-
- log(" - Initializing backend...");
-
- backends[backend.id] = new ourProvider(backend.connectionDetails);
- const ourBackend = backends[backend.id];
-
- if (!(await ourBackend.start())) {
- error(" - Error initializing backend!");
- error(" - " + ourBackend.logs.join("\n - "));
-
- return false;
- }
-
- log(" - Initializing clients...");
-
- const clients = await prisma.forwardRule.findMany({
- where: {
- destProviderID: backend.id,
- enabled: true,
- },
- });
-
- for (const client of clients) {
- if (client.protocol != "tcp" && client.protocol != "udp") {
- error(
- ` - Error: Client with ID of '${client.id}' has an invalid protocol! (must be either TCP or UDP)`,
- );
- continue;
- }
-
- ourBackend.addConnection(
- client.sourceIP,
- client.sourcePort,
- client.destPort,
- client.protocol,
- );
- }
-
- return true;
-}
diff --git a/api/src/libs/generateRandom.ts b/api/src/libs/generateRandom.ts
deleted file mode 100644
index 987986e..0000000
--- a/api/src/libs/generateRandom.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-function getRandomInt(min: number, max: number): number {
- const minCeiled = Math.ceil(min);
- const maxFloored = Math.floor(max);
- return Math.floor(Math.random() * (maxFloored - minCeiled) + minCeiled); // The maximum is exclusive and the minimum is inclusive
-}
-
-export function generateRandomData(length: number = 128): string {
- let newString = "";
-
- for (let i = 0; i < length; i += 2) {
- const randomNumber = getRandomInt(0, 255);
-
- if (randomNumber == 0) {
- i -= 2;
- continue;
- }
-
- newString += randomNumber.toString(16);
- }
-
- return newString;
-}
diff --git a/api/src/libs/permissions.ts b/api/src/libs/permissions.ts
deleted file mode 100644
index d0b0601..0000000
--- a/api/src/libs/permissions.ts
+++ /dev/null
@@ -1,110 +0,0 @@
-import type { PrismaClient } from "@prisma/client";
-import type { SessionToken } from "./types.js";
-
-export const permissionListDisabled: Record = {
- "routes.add": false,
- "routes.remove": false,
- "routes.start": false,
- "routes.stop": false,
- "routes.edit": false,
- "routes.visible": false,
- "routes.visibleConn": false,
-
- "backends.add": false,
- "backends.remove": false,
- "backends.start": false,
- "backends.stop": false,
- "backends.edit": false,
- "backends.visible": false,
- "backends.secretVis": false,
-
- "permissions.see": false,
-
- "users.add": false,
- "users.remove": false,
- "users.lookup": false,
- "users.edit": false,
-};
-
-// FIXME: This solution fucking sucks.
-export const permissionListEnabled: Record = JSON.parse(
- JSON.stringify(permissionListDisabled),
-);
-
-for (const index of Object.keys(permissionListEnabled)) {
- permissionListEnabled[index] = true;
-}
-
-export async function hasPermission(
- permissionList: string[],
- uid: number,
- prisma: PrismaClient,
-): Promise {
- for (const permission of permissionList) {
- const permissionNode = await prisma.permission.findFirst({
- where: {
- userID: uid,
- permission,
- },
- });
-
- if (!permissionNode || !permissionNode.has) return false;
- }
-
- return true;
-}
-
-export async function getUID(
- token: string,
- tokens: Record,
- prisma: PrismaClient,
-): Promise {
- let userID = -1;
-
- // Look up in our currently authenticated users
- for (const otherTokenKey of Object.keys(tokens)) {
- const otherTokenList = tokens[parseInt(otherTokenKey)];
-
- for (const otherTokenIndex in otherTokenList) {
- const otherToken = otherTokenList[otherTokenIndex];
-
- if (otherToken.token == token) {
- if (
- otherToken.expiresAt <
- otherToken.createdAt + (otherToken.createdAt - Date.now())
- ) {
- otherTokenList.splice(parseInt(otherTokenIndex), 1);
- continue;
- } else {
- userID = parseInt(otherTokenKey);
- }
- }
- }
- }
-
- // Fine, we'll look up for global tokens...
- // FIXME: Could this be more efficient? IDs are sequential in SQL I think
- if (userID == -1) {
- const allUsers = await prisma.user.findMany({
- where: {
- isRootServiceAccount: true,
- },
- });
-
- for (const user of allUsers) {
- if (user.rootToken == token) userID = user.id;
- }
- }
-
- return userID;
-}
-
-export async function hasPermissionByToken(
- permissionList: string[],
- token: string,
- tokens: Record,
- prisma: PrismaClient,
-): Promise {
- const userID = await getUID(token, tokens, prisma);
- return await hasPermission(permissionList, userID, prisma);
-}
diff --git a/api/src/libs/types.ts b/api/src/libs/types.ts
deleted file mode 100644
index 7638d76..0000000
--- a/api/src/libs/types.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import type { PrismaClient } from "@prisma/client";
-import type { FastifyInstance } from "fastify";
-
-import type { BackendBaseClass } from "../backendimpl/base.js";
-
-export type ServerOptions = {
- isSignupEnabled: boolean;
- isSignupAsAdminEnabled: boolean;
-
- allowUnsafeGlobalTokens: boolean;
-};
-
-// NOTE: Someone should probably use Redis for this, but this is fine...
-export type SessionToken = {
- createdAt: number;
- expiresAt: number; // Should be (createdAt + (30 minutes))
-
- token: string;
-};
-
-export type RouteOptions = {
- fastify: FastifyInstance;
- prisma: PrismaClient;
- tokens: Record;
-
- options: ServerOptions;
- backends: Record;
-};
diff --git a/api/src/routes/ROUTE_PLAN.md b/api/src/routes/ROUTE_PLAN.md
deleted file mode 100644
index 6b7c0f4..0000000
--- a/api/src/routes/ROUTE_PLAN.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# Route Plan
-- [x] /api/v1/users/create
-- [x] /api/v1/users/login
-- [x] /api/v1/users/remove
-- [ ] /api/v1/users/modify
-- [x] /api/v1/users/lookup
-- [x] /api/v1/backends/create
-- [x] /api/v1/backends/remove
-- [ ] /api/v1/backends/modify
-- [x] /api/v1/backends/lookup
-- [x] /api/v1/routes/create
-- [x] /api/v1/routes/remove
-- [ ] /api/v1/routes/modify
-- [x] /api/v1/routes/lookup
-- [ ] /api/v1/routes/start
-- [ ] /api/v1/routes/stop
-- [x] /api/v1/getPermissions
\ No newline at end of file
diff --git a/api/src/routes/backends/create.ts b/api/src/routes/backends/create.ts
deleted file mode 100644
index 0801572..0000000
--- a/api/src/routes/backends/create.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-import { backendProviders } from "../../backendimpl/index.js";
-import { backendInit } from "../../libs/backendInit.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- const logWrapper = (arg: string) => fastify.log.info(arg);
- const errorWrapper = (arg: string) => fastify.log.error(arg);
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new backend to use
- */
- fastify.post(
- "/api/v1/backends/create",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "name", "backend", "connectionDetails"],
-
- properties: {
- token: { type: "string" },
- name: { type: "string" },
- description: { type: "string" },
- backend: { type: "string" },
- connectionDetails: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- name: string;
- description?: string;
- connectionDetails: string;
- backend: string;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["backends.add"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- if (!backendProviders[body.backend]) {
- return res.status(400).send({
- error: "Unsupported backend!",
- });
- }
-
- const connectionDetailsValidityCheck = backendProviders[
- body.backend
- ].checkParametersBackendInstance(body.connectionDetails);
-
- if (!connectionDetailsValidityCheck.success) {
- return res.status(400).send({
- error:
- connectionDetailsValidityCheck.message ??
- "Unknown error while attempting to parse connectionDetails (it's on your side)",
- });
- }
-
- const backend = await prisma.desinationProvider.create({
- data: {
- name: body.name,
- description: body.description,
-
- backend: body.backend,
- connectionDetails: body.connectionDetails,
- },
- });
-
- const init = await backendInit(
- backend,
- backends,
- prisma,
- logWrapper,
- errorWrapper,
- );
-
- if (!init) {
- // TODO: better error code
- return res.status(504).send({
- error: "Backend is created, but failed to initalize correctly",
- id: backend.id,
- });
- }
-
- return {
- success: true,
- id: backend.id,
- };
- },
- );
-}
diff --git a/api/src/routes/backends/lookup.ts b/api/src/routes/backends/lookup.ts
deleted file mode 100644
index a387ffc..0000000
--- a/api/src/routes/backends/lookup.ts
+++ /dev/null
@@ -1,84 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/backends/lookup",
- {
- schema: {
- body: {
- type: "object",
- required: ["token"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- name: { type: "string" },
- description: { type: "string" },
- backend: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id?: number;
- name?: string;
- description?: string;
- backend?: string;
- } = req.body;
-
- if (
- !(await hasPermission(body.token, [
- "backends.visible", // wtf?
- ]))
- ) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const canSeeSecrets = await hasPermission(body.token, [
- "backends.secretVis",
- ]);
-
- const prismaBackends = await prisma.desinationProvider.findMany({
- where: {
- id: body.id,
- name: body.name,
- description: body.description,
- backend: body.backend,
- },
- });
-
- return {
- success: true,
- data: prismaBackends.map(i => ({
- id: i.id,
-
- name: i.name,
- description: i.description,
-
- backend: i.backend,
- connectionDetails: canSeeSecrets ? i.connectionDetails : "",
-
- logs: backends[i.id].logs,
- })),
- };
- },
- );
-}
diff --git a/api/src/routes/backends/remove.ts b/api/src/routes/backends/remove.ts
deleted file mode 100644
index 711286b..0000000
--- a/api/src/routes/backends/remove.ts
+++ /dev/null
@@ -1,71 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/backends/remove",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "id"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["backends.remove"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- if (!backends[body.id]) {
- return res.status(400).send({
- error: "Backend not found",
- });
- }
-
- // Unload the backend
- if (!(await backends[body.id].stop())) {
- return res.status(400).send({
- error: "Failed to stop backend! Please report this issue.",
- });
- }
-
- delete backends[body.id];
-
- await prisma.desinationProvider.delete({
- where: {
- id: body.id,
- },
- });
-
- return {
- success: true,
- };
- },
- );
-}
diff --git a/api/src/routes/forward/connections.ts b/api/src/routes/forward/connections.ts
deleted file mode 100644
index 6358ead..0000000
--- a/api/src/routes/forward/connections.ts
+++ /dev/null
@@ -1,72 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- fastify.post(
- "/api/v1/forward/connections",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "id"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["routes.visibleConn"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const forward = await prisma.forwardRule.findUnique({
- where: {
- id: body.id,
- },
- });
-
- if (!forward) {
- return res.status(400).send({
- error: "Could not find forward entry",
- });
- }
-
- if (!backends[forward.destProviderID]) {
- return res.status(400).send({
- error: "Backend not found",
- });
- }
-
- return {
- success: true,
- data: backends[forward.destProviderID].getAllConnections().filter(i => {
- return (
- i.connectionDetails.sourceIP == forward.sourceIP &&
- i.connectionDetails.sourcePort == forward.sourcePort &&
- i.connectionDetails.destPort == forward.destPort
- );
- }),
- };
- },
- );
-}
diff --git a/api/src/routes/forward/create.ts b/api/src/routes/forward/create.ts
deleted file mode 100644
index 2363416..0000000
--- a/api/src/routes/forward/create.ts
+++ /dev/null
@@ -1,119 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/forward/create",
- {
- schema: {
- body: {
- type: "object",
- required: [
- "token",
- "name",
- "protocol",
- "sourceIP",
- "sourcePort",
- "destinationPort",
- "providerID",
- ],
-
- properties: {
- token: { type: "string" },
-
- name: { type: "string" },
- description: { type: "string" },
-
- protocol: { type: "string" },
-
- sourceIP: { type: "string" },
- sourcePort: { type: "number" },
-
- destinationPort: { type: "number" },
-
- providerID: { type: "number" },
- autoStart: { type: "boolean" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
-
- name: string;
- description?: string;
-
- protocol: "tcp" | "udp";
-
- sourceIP: string;
- sourcePort: number;
-
- destinationPort: number;
-
- providerID: number;
-
- autoStart?: boolean;
- } = req.body;
-
- if (body.protocol != "tcp" && body.protocol != "udp") {
- return res.status(400).send({
- error: "Body protocol field must be either tcp or udp",
- });
- }
-
- if (!(await hasPermission(body.token, ["routes.add"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const lookupIDForDestProvider =
- await prisma.desinationProvider.findUnique({
- where: {
- id: body.providerID,
- },
- });
-
- if (!lookupIDForDestProvider)
- return res.status(400).send({
- error: "Could not find provider",
- });
-
- const forwardRule = await prisma.forwardRule.create({
- data: {
- name: body.name,
- description: body.description,
-
- protocol: body.protocol,
-
- sourceIP: body.sourceIP,
- sourcePort: body.sourcePort,
-
- destPort: body.destinationPort,
- destProviderID: body.providerID,
-
- enabled: Boolean(body.autoStart),
- },
- });
-
- return {
- success: true,
- id: forwardRule.id,
- };
- },
- );
-}
diff --git a/api/src/routes/forward/lookup.ts b/api/src/routes/forward/lookup.ts
deleted file mode 100644
index 3b87c36..0000000
--- a/api/src/routes/forward/lookup.ts
+++ /dev/null
@@ -1,113 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/forward/lookup",
- {
- schema: {
- body: {
- type: "object",
- required: ["token"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
-
- name: { type: "string" },
- protocol: { type: "string" },
- description: { type: "string" },
-
- sourceIP: { type: "string" },
- sourcePort: { type: "number" },
- destPort: { type: "number" },
-
- providerID: { type: "number" },
- autoStart: { type: "boolean" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
-
- id?: number;
- name?: string;
- description?: string;
-
- protocol?: "tcp" | "udp";
-
- sourceIP?: string;
- sourcePort?: number;
-
- destinationPort?: number;
-
- providerID?: number;
- autoStart?: boolean;
- } = req.body;
-
- if (body.protocol && body.protocol != "tcp" && body.protocol != "udp") {
- return res.status(400).send({
- error: "Protocol specified in body must be either 'tcp' or 'udp'",
- });
- }
-
- if (
- !(await hasPermission(body.token, [
- "routes.visible", // wtf?
- ]))
- ) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const forwardRules = await prisma.forwardRule.findMany({
- where: {
- id: body.id,
- name: body.name,
- description: body.description,
-
- sourceIP: body.sourceIP,
- sourcePort: body.sourcePort,
-
- destPort: body.destinationPort,
-
- destProviderID: body.providerID,
- enabled: body.autoStart,
- },
- });
-
- return {
- success: true,
- data: forwardRules.map(i => ({
- id: i.id,
- name: i.name,
- description: i.description,
-
- sourceIP: i.sourceIP,
- sourcePort: i.sourcePort,
-
- destPort: i.destPort,
-
- providerID: i.destProviderID,
- autoStart: i.enabled, // TODO: Add enabled flag in here to see if we're running or not
- })),
- };
- },
- );
-}
diff --git a/api/src/routes/forward/remove.ts b/api/src/routes/forward/remove.ts
deleted file mode 100644
index 264b82a..0000000
--- a/api/src/routes/forward/remove.ts
+++ /dev/null
@@ -1,56 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/forward/remove",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "id"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["routes.remove"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- await prisma.forwardRule.delete({
- where: {
- id: body.id,
- },
- });
-
- return {
- success: true,
- };
- },
- );
-}
diff --git a/api/src/routes/forward/start.ts b/api/src/routes/forward/start.ts
deleted file mode 100644
index fea55fa..0000000
--- a/api/src/routes/forward/start.ts
+++ /dev/null
@@ -1,76 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/forward/start",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "id"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["routes.start"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const forward = await prisma.forwardRule.findUnique({
- where: {
- id: body.id,
- },
- });
-
- if (!forward)
- return res.status(400).send({
- error: "Could not find forward entry",
- });
-
- if (!backends[forward.destProviderID])
- return res.status(400).send({
- error: "Backend not found",
- });
-
- // @ts-expect-error: Other restrictions in place make it so that it MUST be either TCP or UDP
- const protocol: "tcp" | "udp" = forward.protocol;
-
- backends[forward.destProviderID].addConnection(
- forward.sourceIP,
- forward.sourcePort,
- forward.destPort,
- protocol,
- );
-
- return {
- success: true,
- };
- },
- );
-}
diff --git a/api/src/routes/forward/stop.ts b/api/src/routes/forward/stop.ts
deleted file mode 100644
index e933b0a..0000000
--- a/api/src/routes/forward/stop.ts
+++ /dev/null
@@ -1,76 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, backends } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new route to use
- */
- fastify.post(
- "/api/v1/forward/stop",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "id"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["routes.stop"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const forward = await prisma.forwardRule.findUnique({
- where: {
- id: body.id,
- },
- });
-
- if (!forward)
- return res.status(400).send({
- error: "Could not find forward entry",
- });
-
- if (!backends[forward.destProviderID])
- return res.status(400).send({
- error: "Backend not found",
- });
-
- // @ts-expect-error: Other restrictions in place make it so that it MUST be either TCP or UDP
- const protocol: "tcp" | "udp" = forward.protocol;
-
- backends[forward.destProviderID].removeConnection(
- forward.sourceIP,
- forward.sourcePort,
- forward.destPort,
- protocol,
- );
-
- return {
- success: true,
- };
- },
- );
-}
diff --git a/api/src/routes/getPermissions.ts b/api/src/routes/getPermissions.ts
deleted file mode 100644
index e000085..0000000
--- a/api/src/routes/getPermissions.ts
+++ /dev/null
@@ -1,51 +0,0 @@
-import { hasPermission, getUID } from "../libs/permissions.js";
-import type { RouteOptions } from "../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- /**
- * Logs in to a user account.
- */
- fastify.post(
- "/api/v1/getPermissions",
- {
- schema: {
- body: {
- type: "object",
- required: ["token"],
-
- properties: {
- token: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- } = req.body;
-
- const uid = await getUID(body.token, tokens, prisma);
-
- if (!(await hasPermission(["permissions.see"], uid, prisma))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const permissionsRaw = await prisma.permission.findMany({
- where: {
- userID: uid,
- },
- });
-
- return {
- success: true,
- // Get the ones that we have, and transform them into just their name
- data: permissionsRaw.filter(i => i.has).map(i => i.permission),
- };
- },
- );
-}
diff --git a/api/src/routes/user/create.ts b/api/src/routes/user/create.ts
deleted file mode 100644
index 034faa1..0000000
--- a/api/src/routes/user/create.ts
+++ /dev/null
@@ -1,125 +0,0 @@
-import { hash } from "bcrypt";
-
-import { permissionListEnabled } from "../../libs/permissions.js";
-import { generateRandomData } from "../../libs/generateRandom.js";
-
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens, options } = routeOptions;
-
- /**
- * Creates a new user account to use, only if it is enabled.
- */
- fastify.post(
- "/api/v1/users/create",
- {
- schema: {
- body: {
- type: "object",
- required: ["name", "email", "username", "password"],
-
- properties: {
- name: { type: "string" },
- username: { type: "string" },
- email: { type: "string" },
- password: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- name: string;
- email: string;
- password: string;
- username: string;
- } = req.body;
-
- if (!options.isSignupEnabled) {
- return res.status(403).send({
- error: "Signing up is not enabled at this time.",
- });
- }
-
- const userSearch = await prisma.user.findFirst({
- where: {
- email: body.email,
- },
- });
-
- if (userSearch) {
- return res.status(400).send({
- error: "User already exists",
- });
- }
-
- const saltedPassword: string = await hash(body.password, 15);
-
- const userData = {
- name: body.name,
- email: body.email,
- password: saltedPassword,
-
- username: body.username,
-
- permissions: {
- create: [] as {
- permission: string;
- has: boolean;
- }[],
- },
- };
-
- // TODO: There's probably a faster way to pull this off, but I'm lazy
- for (const permissionKey of Object.keys(permissionListEnabled)) {
- if (
- options.isSignupAsAdminEnabled ||
- permissionKey.startsWith("routes") ||
- permissionKey == "permissions.see"
- ) {
- userData.permissions.create.push({
- permission: permissionKey,
- has: permissionListEnabled[permissionKey],
- });
- }
- }
-
- if (options.allowUnsafeGlobalTokens) {
- // @ts-expect-error: Setting this correctly is a goddamn mess, but this is safe to an extent. It won't crash at least
- userData.rootToken = generateRandomData();
- // @ts-expect-error: Read above.
- userData.isRootServiceAccount = true;
- }
-
- const userCreateResults = await prisma.user.create({
- data: userData,
- });
-
- // FIXME(?): Redundant checks
- if (options.allowUnsafeGlobalTokens) {
- return {
- success: true,
- token: userCreateResults.rootToken,
- };
- } else {
- const generatedToken = generateRandomData();
-
- tokens[userCreateResults.id] = [];
-
- tokens[userCreateResults.id].push({
- createdAt: Date.now(),
- expiresAt: Date.now() + 30 * 60_000,
-
- token: generatedToken,
- });
-
- return {
- success: true,
- token: generatedToken,
- };
- }
- },
- );
-}
diff --git a/api/src/routes/user/login.ts b/api/src/routes/user/login.ts
deleted file mode 100644
index b44d880..0000000
--- a/api/src/routes/user/login.ts
+++ /dev/null
@@ -1,76 +0,0 @@
-import { compare } from "bcrypt";
-
-import { generateRandomData } from "../../libs/generateRandom.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- /**
- * Logs in to a user account.
- */
- fastify.post(
- "/api/v1/users/login",
- {
- schema: {
- body: {
- type: "object",
- required: ["password"],
-
- properties: {
- email: { type: "string" },
- username: { type: "string" },
- password: { type: "string" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- email?: string;
- username?: string;
- password: string;
- } = req.body;
-
- if (!body.email && !body.username)
- return res.status(400).send({
- error: "missing both email and username. please supply at least one.",
- });
-
- const userSearch = await prisma.user.findFirst({
- where: {
- email: body.email,
- username: body.username,
- },
- });
-
- if (!userSearch)
- return res.status(403).send({
- error: "Email or password is incorrect",
- });
-
- const passwordIsValid = await compare(body.password, userSearch.password);
-
- if (!passwordIsValid)
- return res.status(403).send({
- error: "Email or password is incorrect",
- });
-
- const token = generateRandomData();
- if (!tokens[userSearch.id]) tokens[userSearch.id] = [];
-
- tokens[userSearch.id].push({
- createdAt: Date.now(),
- expiresAt: Date.now() + 30 * 60_000,
-
- token,
- });
-
- return {
- success: true,
- token,
- };
- },
- );
-}
diff --git a/api/src/routes/user/lookup.ts b/api/src/routes/user/lookup.ts
deleted file mode 100644
index 8359231..0000000
--- a/api/src/routes/user/lookup.ts
+++ /dev/null
@@ -1,72 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- fastify.post(
- "/api/v1/users/lookup",
- {
- schema: {
- body: {
- type: "object",
- required: ["token"],
-
- properties: {
- token: { type: "string" },
- id: { type: "number" },
- name: { type: "string" },
- email: { type: "string" },
- username: { type: "string" },
- isServiceAccount: { type: "boolean" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- id?: number;
- name?: string;
- email?: string;
- username?: string;
- isServiceAccount?: boolean;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["users.lookup"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- const users = await prisma.user.findMany({
- where: {
- id: body.id,
- name: body.name,
- email: body.email,
- username: body.username,
- isRootServiceAccount: body.isServiceAccount,
- },
- });
-
- return {
- success: true,
- data: users.map(i => ({
- id: i.id,
- name: i.name,
- email: i.email,
- isServiceAccount: i.isRootServiceAccount,
- username: i.username,
- })),
- };
- },
- );
-}
diff --git a/api/src/routes/user/remove.ts b/api/src/routes/user/remove.ts
deleted file mode 100644
index f7159d9..0000000
--- a/api/src/routes/user/remove.ts
+++ /dev/null
@@ -1,62 +0,0 @@
-import { hasPermissionByToken } from "../../libs/permissions.js";
-import type { RouteOptions } from "../../libs/types.js";
-
-export function route(routeOptions: RouteOptions) {
- const { fastify, prisma, tokens } = routeOptions;
-
- function hasPermission(
- token: string,
- permissionList: string[],
- ): Promise {
- return hasPermissionByToken(permissionList, token, tokens, prisma);
- }
-
- /**
- * Creates a new backend to use
- */
- fastify.post(
- "/api/v1/users/remove",
- {
- schema: {
- body: {
- type: "object",
- required: ["token", "uid"],
-
- properties: {
- token: { type: "string" },
- uid: { type: "number" },
- },
- },
- },
- },
- async (req, res) => {
- // @ts-expect-error: Fastify routes schema parsing is trustworthy, so we can "assume" invalid types
- const body: {
- token: string;
- uid: number;
- } = req.body;
-
- if (!(await hasPermission(body.token, ["users.remove"]))) {
- return res.status(403).send({
- error: "Unauthorized",
- });
- }
-
- await prisma.permission.deleteMany({
- where: {
- userID: body.uid,
- },
- });
-
- await prisma.user.delete({
- where: {
- id: body.uid,
- },
- });
-
- return {
- success: true,
- };
- },
- );
-}
diff --git a/api/srcpatch.sh b/api/srcpatch.sh
deleted file mode 100755
index 319e30b..0000000
--- a/api/srcpatch.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-# !-- DO NOT USE THIS FOR DEVELOPMENT --!
-# This is only to source patch files in production deployments, if prisma isn't configured already.
-printf "//@ts-nocheck\n$(cat src/routes/backends/lookup.ts)" > src/routes/backends/lookup.ts
-printf "//@ts-nocheck\n$(cat src/routes/forward/lookup.ts)" > src/routes/forward/lookup.ts
-printf "//@ts-nocheck\n$(cat src/routes/user/lookup.ts)" > src/routes/user/lookup.ts
-printf "//@ts-nocheck\n$(cat src/routes/getPermissions.ts)" > src/routes/getPermissions.ts
diff --git a/api/tsconfig.json b/api/tsconfig.json
deleted file mode 100644
index d584b3b..0000000
--- a/api/tsconfig.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
- "compilerOptions": {
- "target": "es2020",
- "module": "es2022",
- "moduleResolution": "node",
-
- "outDir": "./out",
- "rootDir": "./src",
-
- "strict": true,
- "esModuleInterop": true,
- "sourceMap": true,
-
- "declaration": true,
- "declarationMap": true,
-
- "strictPropertyInitialization": false,
- },
-
- "include": ["src/**/*.ts"],
- "exclude": ["node_modules"]
-}
\ No newline at end of file
diff --git a/apiclient/apiclient.go b/apiclient/apiclient.go
new file mode 100644
index 0000000..8f23a80
--- /dev/null
+++ b/apiclient/apiclient.go
@@ -0,0 +1,21 @@
+package apiclient
+
+import "git.terah.dev/imterah/hermes/apiclient/users"
+
+type HermesAPIClient struct {
+ URL string
+}
+
+/// Users
+
+func (api *HermesAPIClient) UserGetRefreshToken(username *string, email *string, password string) (string, error) {
+ return users.GetRefreshToken(api.URL, username, email, password)
+}
+
+func (api *HermesAPIClient) UserGetJWTFromToken(refreshToken string) (string, error) {
+ return users.GetJWTFromToken(api.URL, refreshToken)
+}
+
+func (api *HermesAPIClient) UserCreate(fullName, username, email, password string, isBot bool) (string, error) {
+ return users.CreateUser(api.URL, fullName, username, email, password, isBot)
+}
diff --git a/apiclient/backendstructs/struct.go b/apiclient/backendstructs/struct.go
new file mode 100644
index 0000000..be4b757
--- /dev/null
+++ b/apiclient/backendstructs/struct.go
@@ -0,0 +1,102 @@
+package backendstructs
+
+type BackendCreationRequest struct {
+ Token string `validate:"required"`
+ Name string `validate:"required"`
+ Description *string `json:"description"`
+ Backend string `validate:"required"`
+ BackendParameters interface{} `json:"connectionDetails" validate:"required"`
+}
+
+type BackendLookupRequest struct {
+ Token string `validate:"required"`
+ BackendID *uint `json:"id"`
+ Name *string `json:"name"`
+ Description *string `json:"description"`
+ Backend *string `json:"backend"`
+}
+
+type BackendRemovalRequest struct {
+ Token string `validate:"required"`
+ BackendID uint `json:"id" validate:"required"`
+}
+
+type ConnectionsRequest struct {
+ Token string `validate:"required" json:"token"`
+ Id uint `validate:"required" json:"id"`
+}
+
+type ProxyCreationRequest struct {
+ Token string `validate:"required" json:"token"`
+ Name string `validate:"required" json:"name"`
+ Description *string `json:"description"`
+ Protocol string `validate:"required" json:"protocol"`
+ SourceIP string `validate:"required" json:"sourceIP"`
+ SourcePort uint16 `validate:"required" json:"sourcePort"`
+ DestinationPort uint16 `validate:"required" json:"destinationPort"`
+ ProviderID uint `validate:"required" json:"providerID"`
+ AutoStart *bool `json:"autoStart"`
+}
+
+type ProxyLookupRequest struct {
+ Token string `validate:"required" json:"token"`
+ Id *uint `json:"id"`
+ Name *string `json:"name"`
+ Description *string `json:"description"`
+ Protocol *string `json:"protocol"`
+ SourceIP *string `json:"sourceIP"`
+ SourcePort *uint16 `json:"sourcePort"`
+ DestinationPort *uint16 `json:"destPort"`
+ ProviderID *uint `json:"providerID"`
+ AutoStart *bool `json:"autoStart"`
+}
+
+type ProxyRemovalRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+type ProxyStartRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+type ProxyStopRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+type UserCreationRequest struct {
+ Name string `json:"name" validate:"required"`
+ Email string `json:"email" validate:"required"`
+ Password string `json:"password" validate:"required"`
+ Username string `json:"username" validate:"required"`
+
+ ExistingUserToken string `json:"token"`
+ IsBot bool `json:"isBot"`
+}
+
+type UserLoginRequest struct {
+ Username *string `json:"username"`
+ Email *string `json:"email"`
+
+ Password string `json:"password" validate:"required"`
+}
+
+type UserLookupRequest struct {
+ Token string `validate:"required"`
+ UID *uint `json:"id"`
+ Name *string `json:"name"`
+ Email *string `json:"email"`
+ Username *string `json:"username"`
+ IsBot *bool `json:"isServiceAccount"`
+}
+
+type UserRefreshRequest struct {
+ Token string `json:"token" validate:"required"`
+}
+
+type UserRemovalRequest struct {
+ Token string `json:"token" validate:"required"`
+ UID *uint `json:"uid"`
+}
diff --git a/apiclient/users/auth.go b/apiclient/users/auth.go
new file mode 100644
index 0000000..91e7f67
--- /dev/null
+++ b/apiclient/users/auth.go
@@ -0,0 +1,99 @@
+package users
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/apiclient/backendstructs"
+)
+
+type refreshTokenResponse struct {
+ Success bool `json:"success"`
+ RefreshToken string `json:"refreshToken"`
+}
+
+type jwtTokenResponse struct {
+ Success bool `json:"success"`
+ JWT string `json:"token"`
+}
+
+func GetRefreshToken(url string, username, email *string, password string) (string, error) {
+ body, err := json.Marshal(&backendstructs.UserLoginRequest{
+ Username: username,
+ Email: email,
+ Password: password,
+ })
+
+ if err != nil {
+ return "", err
+ }
+
+ res, err := http.Post(fmt.Sprintf("%s/api/v1/users/login", url), "application/json", bytes.NewBuffer(body))
+
+ if err != nil {
+ return "", err
+ }
+
+ bodyContents, err := io.ReadAll(res.Body)
+
+ if err != nil {
+ return "", fmt.Errorf("failed to read response body: %s", err.Error())
+ }
+
+ response := &refreshTokenResponse{}
+
+ if err := json.Unmarshal(bodyContents, response); err != nil {
+ return "", err
+ }
+
+ if !response.Success {
+ return "", fmt.Errorf("failed to get refresh token")
+ }
+
+ if response.RefreshToken == "" {
+ return "", fmt.Errorf("refresh token is empty")
+ }
+
+ return response.RefreshToken, nil
+}
+
+func GetJWTFromToken(url, refreshToken string) (string, error) {
+ body, err := json.Marshal(&backendstructs.UserRefreshRequest{
+ Token: refreshToken,
+ })
+
+ if err != nil {
+ return "", err
+ }
+
+ res, err := http.Post(fmt.Sprintf("%s/api/v1/users/refresh", url), "application/json", bytes.NewBuffer(body))
+
+ if err != nil {
+ return "", err
+ }
+
+ bodyContents, err := io.ReadAll(res.Body)
+
+ if err != nil {
+ return "", fmt.Errorf("failed to read response body: %s", err.Error())
+ }
+
+ response := &jwtTokenResponse{}
+
+ if err := json.Unmarshal(bodyContents, response); err != nil {
+ return "", err
+ }
+
+ if !response.Success {
+ return "", fmt.Errorf("failed to get JWT token")
+ }
+
+ if response.JWT == "" {
+ return "", fmt.Errorf("JWT token is empty")
+ }
+
+ return response.JWT, nil
+}
diff --git a/apiclient/users/create.go b/apiclient/users/create.go
new file mode 100644
index 0000000..6e03c58
--- /dev/null
+++ b/apiclient/users/create.go
@@ -0,0 +1,63 @@
+package users
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/apiclient/backendstructs"
+)
+
+type createUserResponse struct {
+ Error string `json:"error"`
+ Success bool `json:"success"`
+ RefreshToken string `json:"refreshToken"`
+}
+
+func CreateUser(url, fullName, username, email, password string, isBot bool) (string, error) {
+ body, err := json.Marshal(&backendstructs.UserCreationRequest{
+ Username: username,
+ Name: fullName,
+ Email: email,
+ Password: password,
+ IsBot: isBot,
+ })
+
+ if err != nil {
+ return "", err
+ }
+
+ res, err := http.Post(fmt.Sprintf("%s/api/v1/users/create", url), "application/json", bytes.NewBuffer(body))
+
+ if err != nil {
+ return "", err
+ }
+
+ bodyContents, err := io.ReadAll(res.Body)
+
+ if err != nil {
+ return "", fmt.Errorf("failed to read response body: %s", err.Error())
+ }
+
+ response := &createUserResponse{}
+
+ if err := json.Unmarshal(bodyContents, response); err != nil {
+ return "", err
+ }
+
+ if response.Error != "" {
+ return "", fmt.Errorf("error from server: %s", response.Error)
+ }
+
+ if !response.Success {
+ return "", fmt.Errorf("failed to get refresh token")
+ }
+
+ if response.RefreshToken == "" {
+ return "", fmt.Errorf("refresh token is empty")
+ }
+
+ return response.RefreshToken, nil
+}
diff --git a/backend/api/backendruntime/core.go b/backend/api/backendruntime/core.go
new file mode 100644
index 0000000..eac5934
--- /dev/null
+++ b/backend/api/backendruntime/core.go
@@ -0,0 +1,15 @@
+package backendruntime
+
+import "os"
+
+var (
+ AvailableBackends []*Backend
+ RunningBackends map[uint]*Runtime
+ TempDir string
+ shouldLog bool
+)
+
+func init() {
+ RunningBackends = make(map[uint]*Runtime)
+ shouldLog = os.Getenv("HERMES_DEVELOPMENT_MODE") != "" || os.Getenv("HERMES_BACKEND_LOGGING_ENABLED") != "" || os.Getenv("HERMES_LOG_LEVEL") == "debug"
+}
diff --git a/backend/api/backendruntime/runtime.go b/backend/api/backendruntime/runtime.go
new file mode 100644
index 0000000..8d5ca7a
--- /dev/null
+++ b/backend/api/backendruntime/runtime.go
@@ -0,0 +1,396 @@
+package backendruntime
+
+import (
+ "context"
+ "fmt"
+ "net"
+ "os"
+ "os/exec"
+ "strings"
+ "sync"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/backendlauncher"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+)
+
+// TODO TODO TODO(imterah):
+// This code is a mess. This NEEDS to be rearchitected and refactored to work better. Or at the very least, this code needs to be documented heavily.
+
+func handleCommand(command interface{}, sock net.Conn, rtcChan chan interface{}) error {
+ bytes, err := commonbackend.Marshal(command)
+
+ if err != nil {
+ log.Warnf("Failed to marshal message: %s", err.Error())
+ rtcChan <- fmt.Errorf("failed to marshal message: %s", err.Error())
+
+ return fmt.Errorf("failed to marshal message: %s", err.Error())
+ }
+
+ if _, err := sock.Write(bytes); err != nil {
+ log.Warnf("Failed to write message: %s", err.Error())
+ rtcChan <- fmt.Errorf("failed to write message: %s", err.Error())
+
+ return fmt.Errorf("failed to write message: %s", err.Error())
+ }
+
+ data, err := commonbackend.Unmarshal(sock)
+
+ if err != nil {
+ log.Warnf("Failed to unmarshal message: %s", err.Error())
+ rtcChan <- fmt.Errorf("failed to unmarshal message: %s", err.Error())
+
+ return fmt.Errorf("failed to unmarshal message: %s", err.Error())
+ }
+
+ rtcChan <- data
+
+ return nil
+}
+
+func (runtime *Runtime) goRoutineHandler() error {
+ log.Debug("Starting up backend runtime")
+ log.Debug("Running socket acquisition")
+
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ sockPath, sockListener, err := backendlauncher.GetUnixSocket(TempDir)
+
+ if err != nil {
+ return err
+ }
+
+ runtime.currentListener = sockListener
+
+ log.Debugf("Acquired unix socket at: %s", sockPath)
+
+ go func() {
+ log.Debug("Created new Goroutine for socket connection handling")
+
+ for {
+ log.Debug("Waiting for Unix socket connections...")
+ sock, err := runtime.currentListener.Accept()
+
+ if err != nil {
+ log.Warnf("Failed to accept Unix socket connection in a backend runtime instance: %s", err.Error())
+ return
+ }
+
+ log.Debug("Recieved connection. Attempting to figure out backend state...")
+
+ timeoutChannel := time.After(500 * time.Millisecond)
+
+ select {
+ case <-timeoutChannel:
+ log.Debug("Timeout reached. Assuming backend is running.")
+ case hasRestarted, ok := <-runtime.processRestartNotification:
+ if !ok {
+ log.Warnf("Failed to get the process restart notification state!")
+ }
+
+ if hasRestarted {
+ if runtime.OnCrashCallback == nil {
+ log.Warn("The backend has restarted for some reason, but we could not run the on crash callback as the callback is not set!")
+ } else {
+ log.Debug("We have restarted. Running the restart callback...")
+ runtime.OnCrashCallback(sock)
+ }
+
+ log.Debug("Clearing caches...")
+ runtime.cleanUpPendingCommandProcessingJobs()
+ runtime.messageBufferLock = sync.Mutex{}
+ } else {
+ log.Debug("We have not restarted.")
+ }
+ }
+
+ go func() {
+ log.Debug("Setting up Hermes keepalive Goroutine")
+ hasFailedBackendRunningCheckAlready := false
+
+ for {
+ if !runtime.isRuntimeRunning {
+ return
+ }
+
+ // Asking for the backend status seems to be a "good-enough" keepalive system. Plus, it provides useful telemetry.
+ // There isn't a ping command in the backend API, so we have to make do with what we have.
+ //
+ // To be safe here, we have to use the proper (yet annoying) facilities to prevent cross-talk, since we're in
+ // a goroutine, and can't talk directly. This actually has benefits, as the OuterLoop should exit on its own, if we
+ // encounter a critical error.
+ statusResponse, err := runtime.ProcessCommand(&commonbackend.BackendStatusRequest{})
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend (in backend runtime keep alive): %s", err.Error())
+ log.Debugf("Attempting to close socket...")
+ err := sock.Close()
+
+ if err != nil {
+ log.Debugf("Failed to close socket: %s", err.Error())
+ }
+
+ continue
+ }
+
+ switch responseMessage := statusResponse.(type) {
+ case *commonbackend.BackendStatusResponse:
+ if !responseMessage.IsRunning {
+ if hasFailedBackendRunningCheckAlready {
+ if responseMessage.Message != "" {
+ log.Warnf("Backend (in backend keepalive) is up but not active: %s", responseMessage.Message)
+ } else {
+ log.Warnf("Backend (in backend keepalive) is up but not active")
+ }
+ }
+
+ hasFailedBackendRunningCheckAlready = true
+ }
+ default:
+ log.Errorf("Got illegal response type for backend (in backend keepalive): %T", responseMessage)
+ log.Debugf("Attempting to close socket...")
+ err := sock.Close()
+
+ if err != nil {
+ log.Debugf("Failed to close socket: %s", err.Error())
+ }
+ }
+
+ time.Sleep(5 * time.Second)
+ }
+ }()
+
+ OuterLoop:
+ for {
+ _ = <-runtime.startProcessingNotification
+ runtime.isRuntimeCurrentlyProcessing = true
+
+ for chanIndex, messageData := range runtime.messageBuffer {
+ if messageData == nil {
+ continue
+ }
+
+ err := handleCommand(messageData.Message, sock, messageData.Channel)
+
+ if err != nil {
+ log.Warnf("failed to handle command in backend runtime instance: %s", err.Error())
+
+ if strings.HasPrefix(err.Error(), "failed to write message") {
+ break OuterLoop
+ }
+ }
+
+ runtime.messageBuffer[chanIndex] = nil
+ }
+
+ runtime.isRuntimeCurrentlyProcessing = false
+ }
+
+ sock.Close()
+ }
+ }()
+
+ runtime.processRestartNotification <- false
+
+ for {
+ log.Debug("Starting process...")
+
+ ctx := context.Background()
+
+ runtime.currentProcess = exec.CommandContext(ctx, runtime.ProcessPath)
+ runtime.currentProcess.Env = append(runtime.currentProcess.Env, fmt.Sprintf("HERMES_API_SOCK=%s", sockPath), fmt.Sprintf("HERMES_LOG_LEVEL=%s", logLevel))
+
+ runtime.currentProcess.Stdout = runtime.logger
+ runtime.currentProcess.Stderr = runtime.logger
+
+ err := runtime.currentProcess.Run()
+
+ if err != nil {
+ if err, ok := err.(*exec.ExitError); ok {
+ if err.ExitCode() != -1 && err.ExitCode() != 0 {
+ log.Warnf("A backend process died with exit code '%d' and with error '%s'", err.ExitCode(), err.Error())
+ }
+ } else {
+ log.Warnf("A backend process died with error: %s", err.Error())
+ }
+ } else {
+ log.Debug("Process exited gracefully.")
+ }
+
+ if !runtime.isRuntimeRunning {
+ return nil
+ }
+
+ log.Debug("Sleeping 5 seconds, and then restarting process")
+ time.Sleep(5 * time.Second)
+
+ // NOTE(imterah): This could cause hangs if we're not careful. If the process dies so much that we can't keep up, it should deserve to be hung, really.
+ // There's probably a better way to do this, but this works.
+ //
+ // If this does turn out to be a problem, just increase the Goroutine buffer size.
+ runtime.processRestartNotification <- true
+
+ log.Debug("Sent off notification.")
+ }
+}
+
+func (runtime *Runtime) Start() error {
+ if runtime.isRuntimeRunning {
+ return fmt.Errorf("runtime already running")
+ }
+
+ runtime.messageBuffer = make([]*messageForBuf, 10)
+ runtime.messageBufferLock = sync.Mutex{}
+
+ runtime.startProcessingNotification = make(chan bool)
+ runtime.processRestartNotification = make(chan bool, 1)
+
+ runtime.logger = &writeLogger{
+ Runtime: runtime,
+ }
+
+ go func() {
+ err := runtime.goRoutineHandler()
+
+ if err != nil {
+ log.Errorf("Failed during execution of runtime: %s", err.Error())
+ }
+ }()
+
+ runtime.isRuntimeRunning = true
+ return nil
+}
+
+func (runtime *Runtime) Stop() error {
+ if !runtime.isRuntimeRunning {
+ return fmt.Errorf("runtime not running")
+ }
+
+ runtime.isRuntimeRunning = false
+
+ if runtime.currentProcess != nil && runtime.currentProcess.Cancel != nil {
+ err := runtime.currentProcess.Cancel()
+
+ if err != nil {
+ return fmt.Errorf("failed to stop process: %s", err.Error())
+ }
+ } else {
+ log.Warn("Failed to kill process (Stop recieved), currentProcess or currentProcess.Cancel is nil")
+ }
+
+ if runtime.currentListener != nil {
+ err := runtime.currentListener.Close()
+
+ if err != nil {
+ return fmt.Errorf("failed to stop listener: %s", err.Error())
+ }
+ } else {
+ log.Warn("Failed to kill listener, as the listener is nil")
+ }
+
+ return nil
+}
+
+func (runtime *Runtime) ProcessCommand(command interface{}) (interface{}, error) {
+ schedulingAttempts := 0
+ var commandChannel chan interface{}
+
+SchedulingLoop:
+ for {
+ if !runtime.isRuntimeRunning {
+ time.Sleep(10 * time.Millisecond)
+ }
+
+ if schedulingAttempts > 50 {
+ return nil, fmt.Errorf("failed to schedule message transmission after 50 tries (REPORT THIS ISSUE)")
+ }
+
+ runtime.messageBufferLock.Lock()
+
+ // Attempt to find spot in buffer to schedule message transmission
+ for i, message := range runtime.messageBuffer {
+ if message != nil {
+ continue
+ }
+
+ commandChannel = make(chan interface{})
+
+ runtime.messageBuffer[i] = &messageForBuf{
+ Channel: commandChannel,
+ Message: command,
+ }
+
+ runtime.messageBufferLock.Unlock()
+ break SchedulingLoop
+ }
+
+ runtime.messageBufferLock.Unlock()
+ time.Sleep(100 * time.Millisecond)
+
+ schedulingAttempts++
+ }
+
+ if !runtime.isRuntimeCurrentlyProcessing {
+ runtime.startProcessingNotification <- true
+ }
+
+ // Fetch response and close Channel
+ response, ok := <-commandChannel
+
+ if !ok {
+ return nil, fmt.Errorf("failed to read from command channel: recieved signal that is not OK")
+ }
+
+ close(commandChannel)
+
+ err, ok := response.(error)
+
+ if ok {
+ return nil, err
+ }
+
+ return response, nil
+}
+
+func (runtime *Runtime) cleanUpPendingCommandProcessingJobs() {
+ for messageIndex, message := range runtime.messageBuffer {
+ if message == nil {
+ continue
+ }
+
+ timeoutChannel := time.After(100 * time.Millisecond)
+
+ select {
+ case <-timeoutChannel:
+ log.Warn("Message channel is likely running (timed out reading from it without an error)")
+ close(message.Channel)
+ case _, ok := <-message.Channel:
+ if ok {
+ log.Warn("Message channel is running, but should be stopped (since message is NOT nil!)")
+ close(message.Channel)
+ }
+ }
+
+ runtime.messageBuffer[messageIndex] = nil
+ }
+}
+
+func NewBackend(path string) *Runtime {
+ return &Runtime{
+ ProcessPath: path,
+ }
+}
+
+func Init(backends []*Backend) error {
+ var err error
+ TempDir, err = os.MkdirTemp("", "hermes-sockets-")
+
+ if err != nil {
+ return err
+ }
+
+ AvailableBackends = backends
+
+ return nil
+}
diff --git a/backend/api/backendruntime/struct.go b/backend/api/backendruntime/struct.go
new file mode 100644
index 0000000..cd4b3b8
--- /dev/null
+++ b/backend/api/backendruntime/struct.go
@@ -0,0 +1,61 @@
+package backendruntime
+
+import (
+ "net"
+ "os/exec"
+ "strings"
+ "sync"
+
+ "github.com/charmbracelet/log"
+)
+
+type Backend struct {
+ Name string `validate:"required"`
+ Path string `validate:"required"`
+}
+
+type messageForBuf struct {
+ Channel chan interface{}
+ // TODO(imterah): could this be refactored to just be a []byte instead? Look into this
+ Message interface{}
+}
+
+type Runtime struct {
+ isRuntimeRunning bool
+ isRuntimeCurrentlyProcessing bool
+ startProcessingNotification chan bool
+ logger *writeLogger
+ currentProcess *exec.Cmd
+ currentListener net.Listener
+ processRestartNotification chan bool
+
+ messageBufferLock sync.Mutex
+ messageBuffer []*messageForBuf
+
+ ProcessPath string
+ Logs []string
+
+ OnCrashCallback func(sock net.Conn)
+}
+
+type writeLogger struct {
+ Runtime *Runtime
+}
+
+func (writer writeLogger) Write(p []byte) (n int, err error) {
+ logSplit := strings.Split(string(p), "\n")
+
+ if shouldLog {
+ for _, logLine := range logSplit {
+ if logLine == "" {
+ continue
+ }
+
+ log.Debug("spawned backend logs: " + logLine)
+ }
+ }
+
+ writer.Runtime.Logs = append(writer.Runtime.Logs, logSplit...)
+
+ return len(p), err
+}
diff --git a/backend/api/controllers/v1/backends/create.go b/backend/api/controllers/v1/backends/create.go
new file mode 100644
index 0000000..314dc3e
--- /dev/null
+++ b/backend/api/controllers/v1/backends/create.go
@@ -0,0 +1,270 @@
+package backends
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type BackendCreationRequest struct {
+ Token string `validate:"required"`
+ Name string `validate:"required"`
+ Description *string
+ Backend string `validate:"required"`
+ BackendParameters interface{} `json:"connectionDetails" validate:"required"`
+}
+
+func SetupCreateBackend(state *state.State) {
+ state.Engine.POST("/api/v1/backends/create", func(c *gin.Context) {
+ var req BackendCreationRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "backends.add") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var backendParameters []byte
+
+ switch parameters := req.BackendParameters.(type) {
+ case string:
+ backendParameters = []byte(parameters)
+ case map[string]interface{}:
+ backendParameters, err = json.Marshal(parameters)
+
+ if err != nil {
+ log.Warnf("Failed to marshal JSON recieved as BackendParameters: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to prepare parameters",
+ })
+
+ return
+ }
+ default:
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Invalid type for connectionDetails (recieved %T)", parameters),
+ })
+
+ return
+ }
+
+ var backendRuntimeFilePath string
+
+ for _, runtime := range backendruntime.AvailableBackends {
+ if runtime.Name == req.Backend {
+ backendRuntimeFilePath = runtime.Path
+ }
+ }
+
+ if backendRuntimeFilePath == "" {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Unsupported backend recieved",
+ })
+
+ return
+ }
+
+ backend := backendruntime.NewBackend(backendRuntimeFilePath)
+ err = backend.Start()
+
+ if err != nil {
+ log.Warnf("Failed to start backend: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to start backend",
+ })
+
+ return
+ }
+
+ backendParamCheckResponse, err := backend.ProcessCommand(&commonbackend.CheckServerParameters{
+ Arguments: backendParameters,
+ })
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend: %s", err.Error())
+
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get status response from backend",
+ })
+
+ return
+ }
+
+ switch responseMessage := backendParamCheckResponse.(type) {
+ case *commonbackend.CheckParametersResponse:
+ if responseMessage.InResponseTo != "checkServerParameters" {
+ log.Errorf("Got illegal response to CheckServerParameters: %s", responseMessage.InResponseTo)
+
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get status response from backend",
+ })
+
+ return
+ }
+
+ if !responseMessage.IsValid {
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ var errorMessage string
+
+ if responseMessage.Message == "" {
+ errorMessage = "Unkown error while trying to parse connectionDetails"
+ } else {
+ errorMessage = fmt.Sprintf("Invalid backend parameters: %s", responseMessage.Message)
+ }
+
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": errorMessage,
+ })
+
+ return
+ }
+ default:
+ log.Warnf("Got illegal response type for backend: %T", responseMessage)
+ }
+
+ log.Info("Passed backend checks successfully")
+
+ backendInDatabase := &db.Backend{
+ UserID: user.ID,
+ Name: req.Name,
+ Description: req.Description,
+ Backend: req.Backend,
+ BackendParameters: base64.StdEncoding.EncodeToString(backendParameters),
+ }
+
+ if result := state.DB.DB.Create(&backendInDatabase); result.Error != nil {
+ log.Warnf("Failed to create backend: %s", result.Error.Error())
+
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to add backend into database",
+ })
+
+ return
+ }
+
+ backendStartResponse, err := backend.ProcessCommand(&commonbackend.Start{
+ Arguments: backendParameters,
+ })
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend: %s", err.Error())
+
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get status response from backend",
+ })
+
+ return
+ }
+
+ switch responseMessage := backendStartResponse.(type) {
+ case *commonbackend.BackendStatusResponse:
+ if !responseMessage.IsRunning {
+ err = backend.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to start backend: %s", err.Error())
+ }
+
+ var errorMessage string
+
+ if responseMessage.Message == "" {
+ errorMessage = "Unkown error while trying to start the backend"
+ } else {
+ errorMessage = fmt.Sprintf("Failed to start backend: %s", responseMessage.Message)
+ }
+
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": errorMessage,
+ })
+
+ return
+ }
+ default:
+ log.Warnf("Got illegal response type for backend: %T", responseMessage)
+ }
+
+ backendruntime.RunningBackends[backendInDatabase.ID] = backend
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/backends/lookup.go b/backend/api/controllers/v1/backends/lookup.go
new file mode 100644
index 0000000..6cbb386
--- /dev/null
+++ b/backend/api/controllers/v1/backends/lookup.go
@@ -0,0 +1,164 @@
+package backends
+
+import (
+ "encoding/base64"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type BackendLookupRequest struct {
+ Token string `validate:"required"`
+ BackendID *uint `json:"id"`
+ Name *string
+ Description *string
+ Backend *string
+}
+
+type SanitizedBackend struct {
+ Name string `json:"name"`
+ BackendID uint `json:"id"`
+ OwnerID uint `json:"ownerID"`
+ Description *string `json:"description,omitempty"`
+ Backend string `json:"backend"`
+ BackendParameters *string `json:"connectionDetails,omitempty"`
+ Logs []string `json:"logs"`
+}
+
+type LookupResponse struct {
+ Success bool `json:"success"`
+ Data []*SanitizedBackend `json:"data"`
+}
+
+func SetupLookupBackend(state *state.State) {
+ state.Engine.POST("/api/v1/backends/lookup", func(c *gin.Context) {
+ var req BackendLookupRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "backends.visible") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ backends := []db.Backend{}
+ queryString := []string{}
+ queryParameters := []interface{}{}
+
+ if req.BackendID != nil {
+ queryString = append(queryString, "id = ?")
+ queryParameters = append(queryParameters, req.BackendID)
+ }
+
+ if req.Name != nil {
+ queryString = append(queryString, "name = ?")
+ queryParameters = append(queryParameters, req.Name)
+ }
+
+ if req.Description != nil {
+ queryString = append(queryString, "description = ?")
+ queryParameters = append(queryParameters, req.Description)
+ }
+
+ if req.Backend != nil {
+ queryString = append(queryString, "is_bot = ?")
+ queryParameters = append(queryParameters, req.Backend)
+ }
+
+ if err := state.DB.DB.Where(strings.Join(queryString, " AND "), queryParameters...).Find(&backends).Error; err != nil {
+ log.Warnf("Failed to get backends: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get backends",
+ })
+
+ return
+ }
+
+ sanitizedBackends := make([]*SanitizedBackend, len(backends))
+ hasSecretVisibility := permissions.UserHasPermission(user, "backends.secretVis")
+
+ for backendIndex, backend := range backends {
+ foundBackend, ok := backendruntime.RunningBackends[backend.ID]
+
+ if !ok {
+ log.Warnf("Failed to get backend #%d controller", backend.ID)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get backends",
+ })
+
+ return
+ }
+
+ sanitizedBackends[backendIndex] = &SanitizedBackend{
+ BackendID: backend.ID,
+ OwnerID: backend.UserID,
+ Name: backend.Name,
+ Description: backend.Description,
+ Backend: backend.Backend,
+ Logs: foundBackend.Logs,
+ }
+
+ if backend.UserID == user.ID || hasSecretVisibility {
+ backendParametersBytes, err := base64.StdEncoding.DecodeString(backend.BackendParameters)
+
+ if err != nil {
+ log.Warnf("Failed to decode base64 backend parameters: %s", err.Error())
+ }
+
+ backendParameters := string(backendParametersBytes)
+ sanitizedBackends[backendIndex].BackendParameters = &backendParameters
+ }
+ }
+
+ c.JSON(http.StatusOK, &LookupResponse{
+ Success: true,
+ Data: sanitizedBackends,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/backends/remove.go b/backend/api/controllers/v1/backends/remove.go
new file mode 100644
index 0000000..338ccbd
--- /dev/null
+++ b/backend/api/controllers/v1/backends/remove.go
@@ -0,0 +1,124 @@
+package backends
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type BackendRemovalRequest struct {
+ Token string `validate:"required"`
+ BackendID uint `json:"id" validate:"required"`
+}
+
+func SetupRemoveBackend(state *state.State) {
+ state.Engine.POST("/api/v1/backends/remove", func(c *gin.Context) {
+ var req BackendRemovalRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "backends.remove") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var backend *db.Backend
+ backendRequest := state.DB.DB.Where("id = ?", req.BackendID).Find(&backend)
+
+ if backendRequest.Error != nil {
+ log.Warnf("failed to find if backend exists or not: %s", backendRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if backend exists",
+ })
+
+ return
+ }
+
+ backendExists := backendRequest.RowsAffected > 0
+
+ if !backendExists {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Backend doesn't exist",
+ })
+
+ return
+ }
+
+ if err := state.DB.DB.Delete(backend).Error; err != nil {
+ log.Warnf("failed to delete backend: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to delete backend",
+ })
+
+ return
+ }
+
+ backendInstance, ok := backendruntime.RunningBackends[req.BackendID]
+
+ if ok {
+ err = backendInstance.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Backend deleted, but failed to stop",
+ })
+
+ delete(backendruntime.RunningBackends, req.BackendID)
+ return
+ }
+
+ delete(backendruntime.RunningBackends, req.BackendID)
+ }
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/connections.go b/backend/api/controllers/v1/proxies/connections.go
new file mode 100644
index 0000000..7ea42fb
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/connections.go
@@ -0,0 +1,165 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ConnectionsRequest struct {
+ Token string `validate:"required" json:"token"`
+ Id uint `validate:"required" json:"id"`
+}
+
+type ConnectionDetailsForConnection struct {
+ SourceIP string `json:"sourceIP"`
+ SourcePort uint16 `json:"sourcePort"`
+ DestPort uint16 `json:"destPort"`
+}
+
+type SanitizedConnection struct {
+ ClientIP string `json:"ip"`
+ Port uint16 `json:"port"`
+
+ ConnectionDetails *ConnectionDetailsForConnection `json:"connectionDetails"`
+}
+
+type ConnectionsResponse struct {
+ Success bool `json:"success"`
+ Data []*SanitizedConnection `json:"data"`
+}
+
+func SetupGetConnections(state *state.State) {
+ state.Engine.POST("/api/v1/forward/connections", func(c *gin.Context) {
+ var req ConnectionsRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.visibleConn") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var proxy db.Proxy
+ proxyRequest := state.DB.DB.Where("id = ?", req.Id).First(&proxy)
+
+ if proxyRequest.Error != nil {
+ log.Warnf("failed to find proxy: %s", proxyRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find forward entry",
+ })
+
+ return
+ }
+
+ proxyExists := proxyRequest.RowsAffected > 0
+
+ if !proxyExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "No forward entry found",
+ })
+
+ return
+ }
+
+ backendRuntime, ok := backendruntime.RunningBackends[proxy.BackendID]
+
+ if !ok {
+ log.Warnf("Couldn't fetch backend runtime from backend ID #%d", proxy.BackendID)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Couldn't fetch backend runtime",
+ })
+
+ return
+ }
+
+ backendResponse, err := backendRuntime.ProcessCommand(&commonbackend.ProxyConnectionsRequest{})
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get status response from backend",
+ })
+
+ return
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.ProxyConnectionsResponse:
+ sanitizedConnections := []*SanitizedConnection{}
+
+ for _, connection := range responseMessage.Connections {
+ if connection.SourceIP == proxy.SourceIP && connection.SourcePort == proxy.SourcePort && proxy.DestinationPort == proxy.DestinationPort {
+ sanitizedConnections = append(sanitizedConnections, &SanitizedConnection{
+ ClientIP: connection.ClientIP,
+ Port: connection.ClientPort,
+
+ ConnectionDetails: &ConnectionDetailsForConnection{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ },
+ })
+ }
+ }
+
+ c.JSON(http.StatusOK, &ConnectionsResponse{
+ Success: true,
+ Data: sanitizedConnections,
+ })
+ default:
+ log.Warnf("Got illegal response type for backend: %T", responseMessage)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Got illegal response type",
+ })
+ }
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/create.go b/backend/api/controllers/v1/proxies/create.go
new file mode 100644
index 0000000..d790c49
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/create.go
@@ -0,0 +1,177 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ProxyCreationRequest struct {
+ Token string `validate:"required" json:"token"`
+ Name string `validate:"required" json:"name"`
+ Description *string `json:"description"`
+ Protocol string `validate:"required" json:"protocol"`
+ SourceIP string `validate:"required" json:"sourceIP"`
+ SourcePort uint16 `validate:"required" json:"sourcePort"`
+ DestinationPort uint16 `validate:"required" json:"destinationPort"`
+ ProviderID uint `validate:"required" json:"providerID"`
+ AutoStart *bool `json:"autoStart"`
+}
+
+func SetupCreateProxy(state *state.State) {
+ state.Engine.POST("/api/v1/forward/create", func(c *gin.Context) {
+ var req ProxyCreationRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.add") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ if req.Protocol != "tcp" && req.Protocol != "udp" {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Protocol must be either 'tcp' or 'udp'",
+ })
+
+ return
+ }
+
+ var backend db.Backend
+ backendRequest := state.DB.DB.Where("id = ?", req.ProviderID).First(&backend)
+
+ if backendRequest.Error != nil {
+ log.Warnf("failed to find if backend exists or not: %s", backendRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if backend exists",
+ })
+ }
+
+ backendExists := backendRequest.RowsAffected > 0
+
+ if !backendExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Could not find backend",
+ })
+ }
+
+ autoStart := false
+
+ if req.AutoStart != nil {
+ autoStart = *req.AutoStart
+ }
+
+ proxy := &db.Proxy{
+ UserID: user.ID,
+ BackendID: req.ProviderID,
+ Name: req.Name,
+ Description: req.Description,
+ Protocol: req.Protocol,
+ SourceIP: req.SourceIP,
+ SourcePort: req.SourcePort,
+ DestinationPort: req.DestinationPort,
+ AutoStart: autoStart,
+ }
+
+ if result := state.DB.DB.Create(proxy); result.Error != nil {
+ log.Warnf("failed to create proxy: %s", result.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to add forward rule to database",
+ })
+
+ return
+ }
+
+ if autoStart {
+ backend, ok := backendruntime.RunningBackends[proxy.BackendID]
+
+ if !ok {
+ log.Warnf("Couldn't fetch backend runtime from backend ID #%d", proxy.BackendID)
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ "id": proxy.ID,
+ })
+
+ return
+ }
+
+ backendResponse, err := backend.ProcessCommand(&commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend #%d: %s", proxy.BackendID, err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "failed to get response from backend",
+ })
+
+ return
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.ProxyStatusResponse:
+ if !responseMessage.IsActive {
+ log.Warnf("Failed to start proxy for backend #%d", proxy.BackendID)
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d: %T", proxy.BackendID, responseMessage)
+ }
+ }
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ "id": proxy.ID,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/lookup.go b/backend/api/controllers/v1/proxies/lookup.go
new file mode 100644
index 0000000..bf2c3ea
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/lookup.go
@@ -0,0 +1,184 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ProxyLookupRequest struct {
+ Token string `validate:"required" json:"token"`
+ Id *uint `json:"id"`
+ Name *string `json:"name"`
+ Description *string `json:"description"`
+ Protocol *string `json:"protocol"`
+ SourceIP *string `json:"sourceIP"`
+ SourcePort *uint16 `json:"sourcePort"`
+ DestinationPort *uint16 `json:"destPort"`
+ ProviderID *uint `json:"providerID"`
+ AutoStart *bool `json:"autoStart"`
+}
+
+type SanitizedProxy struct {
+ Id uint `json:"id"`
+ Name string `json:"name"`
+ Description *string `json:"description,omitempty"`
+ Protcol string `json:"protocol"`
+ SourceIP string `json:"sourceIP"`
+ SourcePort uint16 `json:"sourcePort"`
+ DestinationPort uint16 `json:"destPort"`
+ ProviderID uint `json:"providerID"`
+ AutoStart bool `json:"autoStart"`
+}
+
+type ProxyLookupResponse struct {
+ Success bool `json:"success"`
+ Data []*SanitizedProxy `json:"data"`
+}
+
+func SetupLookupProxy(state *state.State) {
+ state.Engine.POST("/api/v1/forward/lookup", func(c *gin.Context) {
+ var req ProxyLookupRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.visible") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ if req.Protocol != nil {
+ if *req.Protocol != "tcp" && *req.Protocol != "udp" {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Protocol specified in body must either be 'tcp' or 'udp'",
+ })
+
+ return
+ }
+ }
+
+ proxies := []db.Proxy{}
+
+ queryString := []string{}
+ queryParameters := []interface{}{}
+
+ if req.Id != nil {
+ queryString = append(queryString, "id = ?")
+ queryParameters = append(queryParameters, req.Id)
+ }
+
+ if req.Name != nil {
+ queryString = append(queryString, "name = ?")
+ queryParameters = append(queryParameters, req.Name)
+ }
+
+ if req.Description != nil {
+ queryString = append(queryString, "description = ?")
+ queryParameters = append(queryParameters, req.Description)
+ }
+
+ if req.SourceIP != nil {
+ queryString = append(queryString, "name = ?")
+ queryParameters = append(queryParameters, req.Name)
+ }
+
+ if req.SourcePort != nil {
+ queryString = append(queryString, "source_port = ?")
+ queryParameters = append(queryParameters, req.SourcePort)
+ }
+
+ if req.DestinationPort != nil {
+ queryString = append(queryString, "destination_port = ?")
+ queryParameters = append(queryParameters, req.DestinationPort)
+ }
+
+ if req.ProviderID != nil {
+ queryString = append(queryString, "backend_id = ?")
+ queryParameters = append(queryParameters, req.ProviderID)
+ }
+
+ if req.AutoStart != nil {
+ queryString = append(queryString, "auto_start = ?")
+ queryParameters = append(queryParameters, req.AutoStart)
+ }
+
+ if req.Protocol != nil {
+ queryString = append(queryString, "protocol = ?")
+ queryParameters = append(queryParameters, req.Protocol)
+ }
+
+ if err := state.DB.DB.Where(strings.Join(queryString, " AND "), queryParameters...).Find(&proxies).Error; err != nil {
+ log.Warnf("failed to get proxies: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get proxies",
+ })
+
+ return
+ }
+
+ sanitizedProxies := make([]*SanitizedProxy, len(proxies))
+
+ for proxyIndex, proxy := range proxies {
+ sanitizedProxies[proxyIndex] = &SanitizedProxy{
+ Id: proxy.ID,
+ Name: proxy.Name,
+ Description: proxy.Description,
+ Protcol: proxy.Protocol,
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestinationPort: proxy.DestinationPort,
+ ProviderID: proxy.BackendID,
+ AutoStart: proxy.AutoStart,
+ }
+ }
+
+ c.JSON(http.StatusOK, &ProxyLookupResponse{
+ Success: true,
+ Data: sanitizedProxies,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/remove.go b/backend/api/controllers/v1/proxies/remove.go
new file mode 100644
index 0000000..304c5c7
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/remove.go
@@ -0,0 +1,150 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ProxyRemovalRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+func SetupRemoveProxy(state *state.State) {
+ state.Engine.POST("/api/v1/forward/remove", func(c *gin.Context) {
+ var req ProxyRemovalRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.remove") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var proxy *db.Proxy
+ proxyRequest := state.DB.DB.Where("id = ?", req.ID).Find(&proxy)
+
+ if proxyRequest.Error != nil {
+ log.Warnf("failed to find if proxy exists or not: %s", proxyRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if forward rule exists",
+ })
+
+ return
+ }
+
+ proxyExists := proxyRequest.RowsAffected > 0
+
+ if !proxyExists {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Forward rule doesn't exist",
+ })
+
+ return
+ }
+
+ if err := state.DB.DB.Delete(proxy).Error; err != nil {
+ log.Warnf("failed to delete proxy: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to delete forward rule",
+ })
+
+ return
+ }
+
+ backend, ok := backendruntime.RunningBackends[proxy.BackendID]
+
+ if !ok {
+ log.Warnf("Couldn't fetch backend runtime from backend ID #%d", proxy.BackendID)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Couldn't fetch backend runtime",
+ })
+
+ return
+ }
+
+ backendResponse, err := backend.ProcessCommand(&commonbackend.RemoveProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend #%d: %s", proxy.BackendID, err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get response from backend. Proxy was still successfully deleted",
+ })
+
+ return
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.ProxyStatusResponse:
+ if responseMessage.IsActive {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to stop proxy. Proxy was still successfully deleted",
+ })
+ } else {
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d: %T", proxy.BackendID, responseMessage)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Got invalid response from backend. Proxy was still successfully deleted",
+ })
+ }
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/start.go b/backend/api/controllers/v1/proxies/start.go
new file mode 100644
index 0000000..1680ddf
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/start.go
@@ -0,0 +1,136 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ProxyStartRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+func SetupStartProxy(state *state.State) {
+ state.Engine.POST("/api/v1/forward/start", func(c *gin.Context) {
+ var req ProxyStartRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.start") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var proxy *db.Proxy
+ proxyRequest := state.DB.DB.Where("id = ?", req.ID).Find(&proxy)
+
+ if proxyRequest.Error != nil {
+ log.Warnf("failed to find if proxy exists or not: %s", proxyRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if forward rule exists",
+ })
+
+ return
+ }
+
+ proxyExists := proxyRequest.RowsAffected > 0
+
+ if !proxyExists {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Forward rule doesn't exist",
+ })
+
+ return
+ }
+
+ backend, ok := backendruntime.RunningBackends[proxy.BackendID]
+
+ if !ok {
+ log.Warnf("Couldn't fetch backend runtime from backend ID #%d", proxy.BackendID)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Couldn't fetch backend runtime",
+ })
+
+ return
+ }
+
+ backendResponse, err := backend.ProcessCommand(&commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ switch responseMessage := backendResponse.(type) {
+ case error:
+ log.Warnf("Failed to get response for backend #%d: %s", proxy.BackendID, responseMessage.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "failed to get response from backend",
+ })
+ case *commonbackend.ProxyStatusResponse:
+ if !responseMessage.IsActive {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "failed to start proxy",
+ })
+ } else {
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d: %T", proxy.BackendID, responseMessage)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Got invalid response from backend. Proxy was likely still successfully started",
+ })
+ }
+ })
+}
diff --git a/backend/api/controllers/v1/proxies/stop.go b/backend/api/controllers/v1/proxies/stop.go
new file mode 100644
index 0000000..27d63ce
--- /dev/null
+++ b/backend/api/controllers/v1/proxies/stop.go
@@ -0,0 +1,136 @@
+package proxies
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type ProxyStopRequest struct {
+ Token string `validate:"required" json:"token"`
+ ID uint `validate:"required" json:"id"`
+}
+
+func SetupStopProxy(state *state.State) {
+ state.Engine.POST("/api/v1/forward/stop", func(c *gin.Context) {
+ var req ProxyStartRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ if !permissions.UserHasPermission(user, "routes.stop") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+
+ var proxy *db.Proxy
+ proxyRequest := state.DB.DB.Where("id = ?", req.ID).Find(&proxy)
+
+ if proxyRequest.Error != nil {
+ log.Warnf("failed to find if proxy exists or not: %s", proxyRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if forward rule exists",
+ })
+
+ return
+ }
+
+ proxyExists := proxyRequest.RowsAffected > 0
+
+ if !proxyExists {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Forward rule doesn't exist",
+ })
+
+ return
+ }
+
+ backend, ok := backendruntime.RunningBackends[proxy.BackendID]
+
+ if !ok {
+ log.Warnf("Couldn't fetch backend runtime from backend ID #%d", proxy.BackendID)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Couldn't fetch backend runtime",
+ })
+
+ return
+ }
+
+ backendResponse, err := backend.ProcessCommand(&commonbackend.RemoveProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ switch responseMessage := backendResponse.(type) {
+ case error:
+ log.Warnf("Failed to get response for backend #%d: %s", proxy.BackendID, responseMessage.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "failed to get response from backend",
+ })
+ case *commonbackend.ProxyStatusResponse:
+ if responseMessage.IsActive {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "failed to stop proxy",
+ })
+ } else {
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d: %T", proxy.BackendID, responseMessage)
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Got invalid response from backend. Proxy was likely still successfully stopped",
+ })
+ }
+ })
+}
diff --git a/backend/api/controllers/v1/users/core.go b/backend/api/controllers/v1/users/core.go
new file mode 100644
index 0000000..c08a3bf
--- /dev/null
+++ b/backend/api/controllers/v1/users/core.go
@@ -0,0 +1,15 @@
+package users
+
+import "os"
+
+var (
+ signupEnabled bool
+ unsafeSignup bool
+ forceNoExpiryTokens bool
+)
+
+func init() {
+ signupEnabled = os.Getenv("HERMES_SIGNUP_ENABLED") != ""
+ unsafeSignup = os.Getenv("HERMES_UNSAFE_ADMIN_SIGNUP_ENABLED") != ""
+ forceNoExpiryTokens = os.Getenv("HERMES_FORCE_DISABLE_REFRESH_TOKEN_EXPIRY") != ""
+}
diff --git a/backend/api/controllers/v1/users/create.go b/backend/api/controllers/v1/users/create.go
new file mode 100644
index 0000000..f39aa95
--- /dev/null
+++ b/backend/api/controllers/v1/users/create.go
@@ -0,0 +1,160 @@
+package users
+
+import (
+ "crypto/rand"
+ "encoding/base64"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ permissionHelper "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+ "golang.org/x/crypto/bcrypt"
+)
+
+type UserCreationRequest struct {
+ Name string `validate:"required"`
+ Email string `validate:"required"`
+ Password string `validate:"required"`
+ Username string `validate:"required"`
+ IsBot bool
+}
+
+func SetupCreateUser(state *state.State) {
+ state.Engine.POST("/api/v1/users/create", func(c *gin.Context) {
+ if !signupEnabled && !unsafeSignup {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Signing up is not enabled at this time.",
+ })
+
+ return
+ }
+
+ var req UserCreationRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ var user *db.User
+ userRequest := state.DB.DB.Where("email = ? OR username = ?", req.Email, req.Username).Find(&user)
+
+ if userRequest.Error != nil {
+ log.Warnf("failed to find if user exists or not: %s", userRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if user exists",
+ })
+
+ return
+ }
+
+ userExists := userRequest.RowsAffected > 0
+
+ if userExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "User already exists",
+ })
+
+ return
+ }
+
+ passwordHashed, err := bcrypt.GenerateFromPassword([]byte(req.Password), bcrypt.DefaultCost)
+
+ if err != nil {
+ log.Warnf("Failed to generate password for client upon signup: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate password hash",
+ })
+
+ return
+ }
+
+ permissions := []db.Permission{}
+
+ for _, permission := range permissionHelper.DefaultPermissionNodes {
+ permissionEnabledState := false
+
+ if unsafeSignup || strings.HasPrefix(permission, "routes.") || permission == "permissions.see" {
+ permissionEnabledState = true
+ }
+
+ permissions = append(permissions, db.Permission{
+ PermissionNode: permission,
+ HasPermission: permissionEnabledState,
+ })
+ }
+
+ tokenRandomData := make([]byte, 80)
+
+ if _, err := rand.Read(tokenRandomData); err != nil {
+ log.Warnf("Failed to read random data to use as token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate refresh token",
+ })
+
+ return
+ }
+
+ user = &db.User{
+ Email: req.Email,
+ Username: req.Username,
+ Name: req.Name,
+ IsBot: &req.IsBot,
+ Password: base64.StdEncoding.EncodeToString(passwordHashed),
+ Permissions: permissions,
+ Tokens: []db.Token{
+ {
+ Token: base64.StdEncoding.EncodeToString(tokenRandomData),
+ DisableExpiry: forceNoExpiryTokens,
+ CreationIPAddr: c.ClientIP(),
+ },
+ },
+ }
+
+ if result := state.DB.DB.Create(&user); result.Error != nil {
+ log.Warnf("Failed to create user: %s", result.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to add user into database",
+ })
+
+ return
+ }
+
+ jwt, err := state.JWT.Generate(user.ID)
+
+ if err != nil {
+ log.Warnf("Failed to generate JWT: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate refresh token",
+ })
+
+ return
+ }
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ "token": jwt,
+ "refreshToken": base64.StdEncoding.EncodeToString(tokenRandomData),
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/users/login.go b/backend/api/controllers/v1/users/login.go
new file mode 100644
index 0000000..ea4f2f3
--- /dev/null
+++ b/backend/api/controllers/v1/users/login.go
@@ -0,0 +1,158 @@
+package users
+
+import (
+ "crypto/rand"
+ "encoding/base64"
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+ "golang.org/x/crypto/bcrypt"
+)
+
+type UserLoginRequest struct {
+ Username *string
+ Email *string
+
+ Password string `validate:"required"`
+}
+
+func SetupLoginUser(state *state.State) {
+ state.Engine.POST("/api/v1/users/login", func(c *gin.Context) {
+ var req UserLoginRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if req.Email == nil && req.Username == nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Missing both email and username in body",
+ })
+
+ return
+ }
+
+ userFindRequestArguments := make([]interface{}, 1)
+ userFindRequest := ""
+
+ if req.Email != nil {
+ userFindRequestArguments[0] = &req.Email
+ userFindRequest += "email = ?"
+ }
+
+ if req.Username != nil {
+ userFindRequestArguments[0] = &req.Username
+ userFindRequest += "username = ?"
+ }
+
+ var user *db.User
+ userRequest := state.DB.DB.Where(userFindRequest, userFindRequestArguments...).Find(&user)
+
+ if userRequest.Error != nil {
+ log.Warnf("failed to find if user exists or not: %s", userRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if user exists",
+ })
+
+ return
+ }
+
+ userExists := userRequest.RowsAffected > 0
+
+ if !userExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "User not found",
+ })
+
+ return
+ }
+
+ decodedPassword := make([]byte, base64.StdEncoding.DecodedLen(len(user.Password)))
+ _, err := base64.StdEncoding.Decode(decodedPassword, []byte(user.Password))
+
+ if err != nil {
+ log.Warnf("failed to decode password in database: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse database result for password",
+ })
+
+ return
+ }
+
+ err = bcrypt.CompareHashAndPassword(decodedPassword, []byte(req.Password))
+
+ if err != nil {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Invalid password",
+ })
+
+ return
+ }
+
+ tokenRandomData := make([]byte, 80)
+
+ if _, err := rand.Read(tokenRandomData); err != nil {
+ log.Warnf("Failed to read random data to use as token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate refresh token",
+ })
+
+ return
+ }
+
+ token := &db.Token{
+ UserID: user.ID,
+
+ Token: base64.StdEncoding.EncodeToString(tokenRandomData),
+ DisableExpiry: forceNoExpiryTokens,
+ CreationIPAddr: c.ClientIP(),
+ }
+
+ if result := state.DB.DB.Create(&token); result.Error != nil {
+ log.Warnf("Failed to create user: %s", result.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to add refresh token into database",
+ })
+
+ return
+ }
+
+ jwt, err := state.JWT.Generate(user.ID)
+
+ if err != nil {
+ log.Warnf("Failed to generate JWT: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate refresh token",
+ })
+
+ return
+ }
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ "token": jwt,
+ "refreshToken": base64.StdEncoding.EncodeToString(tokenRandomData),
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/users/lookup.go b/backend/api/controllers/v1/users/lookup.go
new file mode 100644
index 0000000..f5c14fc
--- /dev/null
+++ b/backend/api/controllers/v1/users/lookup.go
@@ -0,0 +1,137 @@
+package users
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type UserLookupRequest struct {
+ Token string `validate:"required"`
+ UID *uint `json:"id"`
+ Name *string `json:"name"`
+ Email *string `json:"email"`
+ Username *string `json:"username"`
+ IsBot *bool `json:"isServiceAccount"`
+}
+
+type SanitizedUsers struct {
+ UID uint `json:"id"`
+ Name string `json:"name"`
+ Email string `json:"email"`
+ Username string `json:"username"`
+ IsBot bool `json:"isServiceAccount"`
+}
+
+type LookupResponse struct {
+ Success bool `json:"success"`
+ Data []*SanitizedUsers `json:"data"`
+}
+
+func SetupLookupUser(state *state.State) {
+ state.Engine.POST("/api/v1/users/lookup", func(c *gin.Context) {
+ var req UserLookupRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ users := []db.User{}
+ queryString := []string{}
+ queryParameters := []interface{}{}
+
+ if !permissions.UserHasPermission(user, "users.lookup") {
+ queryString = append(queryString, "id = ?")
+ queryParameters = append(queryParameters, user.ID)
+ } else if permissions.UserHasPermission(user, "users.lookup") && req.UID != nil {
+ queryString = append(queryString, "id = ?")
+ queryParameters = append(queryParameters, req.UID)
+ }
+
+ if req.Name != nil {
+ queryString = append(queryString, "name = ?")
+ queryParameters = append(queryParameters, req.Name)
+ }
+
+ if req.Email != nil {
+ queryString = append(queryString, "email = ?")
+ queryParameters = append(queryParameters, req.Email)
+ }
+
+ if req.IsBot != nil {
+ queryString = append(queryString, "is_bot = ?")
+ queryParameters = append(queryParameters, req.IsBot)
+ }
+
+ if err := state.DB.DB.Where(strings.Join(queryString, " AND "), queryParameters...).Find(&users).Error; err != nil {
+ log.Warnf("Failed to get users: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to get users",
+ })
+
+ return
+ }
+
+ sanitizedUsers := make([]*SanitizedUsers, len(users))
+
+ for userIndex, user := range users {
+ isBot := false
+
+ if user.IsBot != nil {
+ isBot = *user.IsBot
+ }
+
+ sanitizedUsers[userIndex] = &SanitizedUsers{
+ UID: user.ID,
+ Name: user.Name,
+ Email: user.Email,
+ Username: user.Username,
+ IsBot: isBot,
+ }
+ }
+
+ c.JSON(http.StatusOK, &LookupResponse{
+ Success: true,
+ Data: sanitizedUsers,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/users/refresh.go b/backend/api/controllers/v1/users/refresh.go
new file mode 100644
index 0000000..ce9fbaf
--- /dev/null
+++ b/backend/api/controllers/v1/users/refresh.go
@@ -0,0 +1,118 @@
+package users
+
+import (
+ "fmt"
+ "net/http"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type UserRefreshRequest struct {
+ Token string `validate:"required"`
+}
+
+func SetupRefreshUserToken(state *state.State) {
+ state.Engine.POST("/api/v1/users/refresh", func(c *gin.Context) {
+ var req UserRefreshRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ var tokenInDatabase *db.Token
+ tokenRequest := state.DB.DB.Where("token = ?", req.Token).Find(&tokenInDatabase)
+
+ if tokenRequest.Error != nil {
+ log.Warnf("failed to find if token exists or not: %s", tokenRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if token exists",
+ })
+
+ return
+ }
+
+ tokenExists := tokenRequest.RowsAffected > 0
+
+ if !tokenExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "Token not found",
+ })
+
+ return
+ }
+
+ // First, we check to make sure that the key expiry is disabled before checking if the key is expired.
+ // Then, we check if the IP addresses differ, or if it has been 7 days since the token has been created.
+ if !tokenInDatabase.DisableExpiry && (c.ClientIP() != tokenInDatabase.CreationIPAddr || time.Now().Before(tokenInDatabase.CreatedAt.Add((24*7)*time.Hour))) {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Token has expired",
+ })
+
+ tx := state.DB.DB.Delete(tokenInDatabase)
+
+ if tx.Error != nil {
+ log.Warnf("Failed to delete expired token from database: %s", tx.Error.Error())
+ }
+
+ return
+ }
+
+ // Get the user to check if the user exists before doing anything
+ var user *db.User
+ userRequest := state.DB.DB.Where("id = ?", tokenInDatabase.UserID).Find(&user)
+
+ if tokenRequest.Error != nil {
+ log.Warnf("failed to find if token user or not: %s", userRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find user",
+ })
+
+ return
+ }
+
+ userExists := userRequest.RowsAffected > 0
+
+ if !userExists {
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "User not found",
+ })
+
+ return
+ }
+
+ jwt, err := state.JWT.Generate(user.ID)
+
+ if err != nil {
+ log.Warnf("Failed to generate JWT: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to generate refresh token",
+ })
+
+ return
+ }
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ "token": jwt,
+ })
+ })
+}
diff --git a/backend/api/controllers/v1/users/remove.go b/backend/api/controllers/v1/users/remove.go
new file mode 100644
index 0000000..59e460c
--- /dev/null
+++ b/backend/api/controllers/v1/users/remove.go
@@ -0,0 +1,106 @@
+package users
+
+import (
+ "fmt"
+ "net/http"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/permissions"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+)
+
+type UserRemovalRequest struct {
+ Token string `validate:"required"`
+ UID *uint `json:"uid"`
+}
+
+func SetupRemoveUser(state *state.State) {
+ state.Engine.POST("/api/v1/users/remove", func(c *gin.Context) {
+ var req UserRemovalRequest
+
+ if err := c.BindJSON(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to parse body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ if err := state.Validator.Struct(&req); err != nil {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": fmt.Sprintf("Failed to validate body: %s", err.Error()),
+ })
+
+ return
+ }
+
+ user, err := state.JWT.GetUserFromJWT(req.Token)
+
+ if err != nil {
+ if err.Error() == "token is expired" || err.Error() == "user does not exist" {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": err.Error(),
+ })
+
+ return
+ } else {
+ log.Warnf("Failed to get user from the provided JWT token: %s", err.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to parse token",
+ })
+
+ return
+ }
+ }
+
+ uid := user.ID
+
+ if req.UID != nil {
+ uid = *req.UID
+
+ if uid != user.ID && !permissions.UserHasPermission(user, "users.remove") {
+ c.JSON(http.StatusForbidden, gin.H{
+ "error": "Missing permissions",
+ })
+
+ return
+ }
+ }
+
+ // Make sure the user exists first if we have a custom UserID
+
+ if uid != user.ID {
+ var customUser *db.User
+ userRequest := state.DB.DB.Where("id = ?", uid).Find(customUser)
+
+ if userRequest.Error != nil {
+ log.Warnf("failed to find if user exists or not: %s", userRequest.Error.Error())
+
+ c.JSON(http.StatusInternalServerError, gin.H{
+ "error": "Failed to find if user exists",
+ })
+
+ return
+ }
+
+ userExists := userRequest.RowsAffected > 0
+
+ if !userExists {
+ c.JSON(http.StatusBadRequest, gin.H{
+ "error": "User doesn't exist",
+ })
+
+ return
+ }
+ }
+
+ state.DB.DB.Select("Tokens", "Permissions", "Proxys", "Backends").Where("id = ?", uid).Delete(user)
+
+ c.JSON(http.StatusOK, gin.H{
+ "success": true,
+ })
+ })
+}
diff --git a/backend/api/db/db.go b/backend/api/db/db.go
new file mode 100644
index 0000000..295bff8
--- /dev/null
+++ b/backend/api/db/db.go
@@ -0,0 +1,77 @@
+package db
+
+import (
+ "fmt"
+ "os"
+
+ "gorm.io/driver/postgres"
+ "gorm.io/driver/sqlite"
+ "gorm.io/gorm"
+)
+
+type DB struct {
+ DB *gorm.DB
+}
+
+func New(backend, params string) (*DB, error) {
+ var err error
+
+ dialector, err := initDialector(backend, params)
+
+ if err != nil {
+ return nil, fmt.Errorf("failed to initialize physical database: %s", err)
+ }
+
+ database, err := gorm.Open(dialector)
+
+ if err != nil {
+ return nil, fmt.Errorf("failed to open database: %s", err)
+ }
+
+ return &DB{DB: database}, nil
+}
+
+func (db *DB) DoMigrations() error {
+ if err := db.DB.AutoMigrate(&Proxy{}); err != nil {
+ return err
+ }
+
+ if err := db.DB.AutoMigrate(&Backend{}); err != nil {
+ return err
+ }
+
+ if err := db.DB.AutoMigrate(&Permission{}); err != nil {
+ return err
+ }
+
+ if err := db.DB.AutoMigrate(&Token{}); err != nil {
+ return err
+ }
+
+ if err := db.DB.AutoMigrate(&User{}); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func initDialector(backend, params string) (gorm.Dialector, error) {
+ switch backend {
+ case "sqlite":
+ if params == "" {
+ return nil, fmt.Errorf("sqlite database file not specified")
+ }
+
+ return sqlite.Open(params), nil
+ case "postgresql":
+ if params == "" {
+ return nil, fmt.Errorf("postgres DSN not specified")
+ }
+
+ return postgres.Open(params), nil
+ case "":
+ return nil, fmt.Errorf("no database backend specified in environment variables")
+ default:
+ return nil, fmt.Errorf("unknown database backend specified: %s", os.Getenv(backend))
+ }
+}
diff --git a/backend/api/db/models.go b/backend/api/db/models.go
new file mode 100644
index 0000000..290cd6e
--- /dev/null
+++ b/backend/api/db/models.go
@@ -0,0 +1,66 @@
+package db
+
+import (
+ "gorm.io/gorm"
+)
+
+type Backend struct {
+ gorm.Model
+
+ UserID uint
+
+ Name string
+ Description *string
+ Backend string
+ BackendParameters string
+
+ Proxies []Proxy
+}
+
+type Proxy struct {
+ gorm.Model
+
+ BackendID uint
+ UserID uint
+
+ Name string
+ Description *string
+ Protocol string
+ SourceIP string
+ SourcePort uint16
+ DestinationPort uint16
+ AutoStart bool
+}
+
+type Permission struct {
+ gorm.Model
+
+ PermissionNode string
+ HasPermission bool
+ UserID uint
+}
+
+type Token struct {
+ gorm.Model
+
+ UserID uint
+
+ Token string
+ DisableExpiry bool
+ CreationIPAddr string
+}
+
+type User struct {
+ gorm.Model
+
+ Email string `gorm:"unique"`
+ Username string `gorm:"unique"`
+ Name string
+ Password string
+ IsBot *bool
+
+ Permissions []Permission
+ OwnedProxies []Proxy
+ OwnedBackends []Backend
+ Tokens []Token
+}
diff --git a/backend/api/jwt/jwt.go b/backend/api/jwt/jwt.go
new file mode 100644
index 0000000..40e011b
--- /dev/null
+++ b/backend/api/jwt/jwt.go
@@ -0,0 +1,107 @@
+package jwt
+
+import (
+ "errors"
+ "fmt"
+ "strconv"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "github.com/golang-jwt/jwt/v5"
+)
+
+var (
+ DevelopmentModeTimings = time.Duration(60*24) * time.Minute
+ NormalModeTimings = time.Duration(3) * time.Minute
+)
+
+type JWTCore struct {
+ Key []byte
+ Database *db.DB
+ TimeMultiplier time.Duration
+}
+
+func New(key []byte, database *db.DB, timeMultiplier time.Duration) *JWTCore {
+ jwtCore := &JWTCore{
+ Key: key,
+ Database: database,
+ TimeMultiplier: timeMultiplier,
+ }
+
+ return jwtCore
+}
+
+func (jwtCore *JWTCore) Parse(tokenString string, options ...jwt.ParserOption) (*jwt.Token, error) {
+ return jwt.Parse(tokenString, jwtCore.jwtKeyCallback, options...)
+}
+
+func (jwtCore *JWTCore) GetUserFromJWT(token string) (*db.User, error) {
+ if jwtCore.Database == nil {
+ return nil, fmt.Errorf("database is not initialized")
+ }
+
+ parsedJWT, err := jwtCore.Parse(token)
+
+ if err != nil {
+ if errors.Is(err, jwt.ErrTokenExpired) {
+ return nil, fmt.Errorf("token is expired")
+ } else {
+ return nil, err
+ }
+ }
+
+ audience, err := parsedJWT.Claims.GetAudience()
+
+ if err != nil {
+ return nil, err
+ }
+
+ if len(audience) < 1 {
+ return nil, fmt.Errorf("audience is too small")
+ }
+
+ uid, err := strconv.Atoi(audience[0])
+
+ if err != nil {
+ return nil, err
+ }
+
+ user := &db.User{}
+ userRequest := jwtCore.Database.DB.Preload("Permissions").Where("id = ?", uint(uid)).Find(&user)
+
+ if userRequest.Error != nil {
+ return user, fmt.Errorf("failed to find if user exists or not: %s", userRequest.Error.Error())
+ }
+
+ userExists := userRequest.RowsAffected > 0
+
+ if !userExists {
+ return user, fmt.Errorf("user does not exist")
+ }
+
+ return user, nil
+}
+
+func (jwtCore *JWTCore) Generate(uid uint) (string, error) {
+ currentJWTTime := jwt.NewNumericDate(time.Now())
+
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(time.Now().Add(jwtCore.TimeMultiplier)),
+ IssuedAt: currentJWTTime,
+ NotBefore: currentJWTTime,
+ // Convert the user ID to a string, and then set it as the audience parameters only value (there's only 1 user per key)
+ Audience: []string{strconv.Itoa(int(uid))},
+ })
+
+ signedToken, err := token.SignedString(jwtCore.Key)
+
+ if err != nil {
+ return "", err
+ }
+
+ return signedToken, nil
+}
+
+func (jwtCore *JWTCore) jwtKeyCallback(*jwt.Token) (any, error) {
+ return jwtCore.Key, nil
+}
diff --git a/backend/api/main.go b/backend/api/main.go
new file mode 100644
index 0000000..1438af8
--- /dev/null
+++ b/backend/api/main.go
@@ -0,0 +1,423 @@
+package main
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "net"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/api/backendruntime"
+ "git.terah.dev/imterah/hermes/backend/api/controllers/v1/backends"
+ "git.terah.dev/imterah/hermes/backend/api/controllers/v1/proxies"
+ "git.terah.dev/imterah/hermes/backend/api/controllers/v1/users"
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/jwt"
+ "git.terah.dev/imterah/hermes/backend/api/state"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/gin-gonic/gin"
+ "github.com/urfave/cli/v2"
+)
+
+func apiEntrypoint(cCtx *cli.Context) error {
+ developmentMode := false
+
+ if os.Getenv("HERMES_DEVELOPMENT_MODE") != "" {
+ log.Warn("You have development mode enabled. This may weaken security.")
+ developmentMode = true
+ }
+
+ log.Info("Hermes is initializing...")
+ log.Debug("Initializing database and opening it...")
+
+ databaseBackendName := os.Getenv("HERMES_DATABASE_BACKEND")
+ var databaseBackendParams string
+
+ if databaseBackendName == "sqlite" {
+ databaseBackendParams = os.Getenv("HERMES_SQLITE_FILEPATH")
+
+ if databaseBackendParams == "" {
+ log.Fatal("HERMES_SQLITE_FILEPATH is not set")
+ }
+ } else if databaseBackendName == "postgresql" {
+ databaseBackendParams = os.Getenv("HERMES_POSTGRES_DSN")
+
+ if databaseBackendParams == "" {
+ log.Fatal("HERMES_POSTGRES_DSN is not set")
+ }
+ } else {
+ log.Fatalf("Unsupported database backend: %s", databaseBackendName)
+ }
+
+ dbInstance, err := db.New(databaseBackendName, databaseBackendParams)
+
+ if err != nil {
+ log.Fatalf("Failed to initialize database: %s", err)
+ }
+
+ log.Debug("Running database migrations...")
+
+ if err := dbInstance.DoMigrations(); err != nil {
+ return fmt.Errorf("Failed to run database migrations: %s", err)
+ }
+
+ log.Debug("Initializing the JWT subsystem...")
+
+ jwtDataString := os.Getenv("HERMES_JWT_SECRET")
+ var jwtKey []byte
+ var jwtValidityTimeDuration time.Duration
+
+ if jwtDataString == "" {
+ log.Fatalf("HERMES_JWT_SECRET is not set")
+ }
+
+ if os.Getenv("HERMES_JWT_BASE64_ENCODED") != "" {
+ jwtKey, err = base64.StdEncoding.DecodeString(jwtDataString)
+
+ if err != nil {
+ log.Fatalf("Failed to decode base64 JWT: %s", err.Error())
+ }
+ } else {
+ jwtKey = []byte(jwtDataString)
+ }
+
+ if developmentMode {
+ jwtValidityTimeDuration = jwt.DevelopmentModeTimings
+ } else {
+ jwtValidityTimeDuration = jwt.NormalModeTimings
+ }
+
+ jwtInstance := jwt.New(jwtKey, dbInstance, jwtValidityTimeDuration)
+
+ log.Debug("Initializing the backend subsystem...")
+
+ backendMetadataPath := cCtx.String("backends-path")
+ backendMetadata, err := os.ReadFile(backendMetadataPath)
+
+ if err != nil {
+ return fmt.Errorf("Failed to read backends: %s", err.Error())
+ }
+
+ availableBackends := []*backendruntime.Backend{}
+ err = json.Unmarshal(backendMetadata, &availableBackends)
+
+ if err != nil {
+ return fmt.Errorf("Failed to parse backends: %s", err.Error())
+ }
+
+ for _, backend := range availableBackends {
+ backend.Path = path.Join(filepath.Dir(backendMetadataPath), backend.Path)
+ }
+
+ backendruntime.Init(availableBackends)
+
+ log.Debug("Enumerating backends...")
+
+ backendList := []db.Backend{}
+
+ if err := dbInstance.DB.Find(&backendList).Error; err != nil {
+ return fmt.Errorf("Failed to enumerate backends: %s", err.Error())
+ }
+
+ for _, backend := range backendList {
+ log.Infof("Starting up backend #%d: %s", backend.ID, backend.Name)
+
+ var backendRuntimeFilePath string
+
+ for _, runtime := range backendruntime.AvailableBackends {
+ if runtime.Name == backend.Backend {
+ backendRuntimeFilePath = runtime.Path
+ }
+ }
+
+ if backendRuntimeFilePath == "" {
+ log.Errorf("Unsupported backend recieved for ID %d: %s", backend.ID, backend.Backend)
+ continue
+ }
+
+ backendInstance := backendruntime.NewBackend(backendRuntimeFilePath)
+
+ backendInstance.OnCrashCallback = func(conn net.Conn) {
+ backendParameters, err := base64.StdEncoding.DecodeString(backend.BackendParameters)
+
+ if err != nil {
+ log.Errorf("Failed to decode backend parameters for backend #%d: %s", backend.ID, err.Error())
+ return
+ }
+
+ marshalledStartCommand, err := commonbackend.Marshal(&commonbackend.Start{
+ Arguments: backendParameters,
+ })
+
+ if err != nil {
+ log.Errorf("Failed to marshal start command for backend #%d: %s", backend.ID, err.Error())
+ return
+ }
+
+ if _, err := conn.Write(marshalledStartCommand); err != nil {
+ log.Errorf("Failed to send start command for backend #%d: %s", backend.ID, err.Error())
+ return
+ }
+
+ backendResponse, err := commonbackend.Unmarshal(conn)
+
+ if err != nil {
+ log.Errorf("Failed to get start command response for backend #%d: %s", backend.ID, err.Error())
+ return
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.BackendStatusResponse:
+ if !responseMessage.IsRunning {
+ log.Errorf("Failed to start backend #%d: %s", backend.ID, responseMessage.Message)
+ return
+ }
+
+ log.Infof("Backend #%d has been reinitialized successfully", backend.ID)
+ }
+
+ log.Warnf("Backend #%d has reinitialized! Starting up auto-starting proxies...", backend.ID)
+
+ autoStartProxies := []db.Proxy{}
+
+ if err := dbInstance.DB.Where("backend_id = ? AND auto_start = true", backend.ID).Find(&autoStartProxies).Error; err != nil {
+ log.Errorf("Failed to query proxies to autostart: %s", err.Error())
+ return
+ }
+
+ for _, proxy := range autoStartProxies {
+ log.Infof("Starting up route #%d for backend #%d: %s", proxy.ID, backend.ID, proxy.Name)
+
+ marhalledCommand, err := commonbackend.Marshal(&commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ if err != nil {
+ log.Errorf("Failed to marshal proxy adding request for backend #%d and route #%d: %s", proxy.BackendID, proxy.ID, err.Error())
+ continue
+ }
+
+ if _, err := conn.Write(marhalledCommand); err != nil {
+ log.Errorf("Failed to send proxy adding request for backend #%d and route #%d: %s", proxy.BackendID, proxy.ID, err.Error())
+ continue
+ }
+
+ backendResponse, err := commonbackend.Unmarshal(conn)
+
+ if err != nil {
+ log.Errorf("Failed to get response for backend #%d and route #%d: %s", proxy.BackendID, proxy.ID, err.Error())
+ continue
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.ProxyStatusResponse:
+ if !responseMessage.IsActive {
+ log.Warnf("Failed to start proxy for backend #%d and route #%d", proxy.BackendID, proxy.ID)
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d and proxy #%d: %T", proxy.BackendID, proxy.ID, responseMessage)
+ continue
+ }
+ }
+ }
+
+ err = backendInstance.Start()
+
+ if err != nil {
+ log.Errorf("Failed to start backend #%d: %s", backend.ID, err.Error())
+ continue
+ }
+
+ backendParameters, err := base64.StdEncoding.DecodeString(backend.BackendParameters)
+
+ if err != nil {
+ log.Errorf("Failed to decode backend parameters for backend #%d: %s", backend.ID, err.Error())
+ continue
+ }
+
+ backendStartResponse, err := backendInstance.ProcessCommand(&commonbackend.Start{
+ Arguments: backendParameters,
+ })
+
+ if err != nil {
+ log.Warnf("Failed to get response for backend #%d: %s", backend.ID, err.Error())
+
+ err = backendInstance.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to stop backend: %s", err.Error())
+ }
+
+ continue
+ }
+
+ switch responseMessage := backendStartResponse.(type) {
+ case *commonbackend.BackendStatusResponse:
+ if !responseMessage.IsRunning {
+ err = backendInstance.Stop()
+
+ if err != nil {
+ log.Warnf("Failed to start backend: %s", err.Error())
+ }
+
+ if responseMessage.Message == "" {
+ log.Errorf("Unkown error while trying to start the backend #%d", backend.ID)
+ } else {
+ log.Errorf("Failed to start backend: %s", responseMessage.Message)
+ }
+
+ continue
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d: %T", backend.ID, responseMessage)
+ continue
+ }
+
+ backendruntime.RunningBackends[backend.ID] = backendInstance
+
+ log.Infof("Successfully initialized backend #%d", backend.ID)
+
+ autoStartProxies := []db.Proxy{}
+
+ if err := dbInstance.DB.Where("backend_id = ? AND auto_start = true", backend.ID).Find(&autoStartProxies).Error; err != nil {
+ log.Errorf("Failed to query proxies to autostart: %s", err.Error())
+ continue
+ }
+
+ for _, proxy := range autoStartProxies {
+ log.Infof("Starting up route #%d for backend #%d: %s", proxy.ID, backend.ID, proxy.Name)
+
+ backendResponse, err := backendInstance.ProcessCommand(&commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestinationPort,
+ Protocol: proxy.Protocol,
+ })
+
+ if err != nil {
+ log.Errorf("Failed to get response for backend #%d and route #%d: %s", proxy.BackendID, proxy.ID, err.Error())
+ continue
+ }
+
+ switch responseMessage := backendResponse.(type) {
+ case *commonbackend.ProxyStatusResponse:
+ if !responseMessage.IsActive {
+ log.Warnf("Failed to start proxy for backend #%d and route #%d", proxy.BackendID, proxy.ID)
+ }
+ default:
+ log.Errorf("Got illegal response type for backend #%d and proxy #%d: %T", proxy.BackendID, proxy.ID, responseMessage)
+ continue
+ }
+ }
+
+ log.Infof("Successfully started backend #%d", backend.ID)
+ }
+
+ log.Debug("Initializing API...")
+
+ if !developmentMode {
+ gin.SetMode(gin.ReleaseMode)
+ }
+
+ engine := gin.Default()
+
+ listeningAddress := os.Getenv("HERMES_LISTENING_ADDRESS")
+
+ if listeningAddress == "" {
+ if developmentMode {
+ listeningAddress = "localhost:8000"
+ } else {
+ listeningAddress = "0.0.0.0:8000"
+ }
+ }
+
+ trustedProxiesString := os.Getenv("HERMES_TRUSTED_HTTP_PROXIES")
+
+ if trustedProxiesString != "" {
+ trustedProxies := strings.Split(trustedProxiesString, ",")
+
+ engine.ForwardedByClientIP = true
+ engine.SetTrustedProxies(trustedProxies)
+ } else {
+ engine.ForwardedByClientIP = false
+ engine.SetTrustedProxies(nil)
+ }
+
+ state := state.New(dbInstance, jwtInstance, engine)
+
+ // Initialize routes
+ users.SetupCreateUser(state)
+ users.SetupLoginUser(state)
+ users.SetupRefreshUserToken(state)
+ users.SetupRemoveUser(state)
+ users.SetupLookupUser(state)
+
+ backends.SetupCreateBackend(state)
+ backends.SetupRemoveBackend(state)
+ backends.SetupLookupBackend(state)
+
+ proxies.SetupCreateProxy(state)
+ proxies.SetupRemoveProxy(state)
+ proxies.SetupLookupProxy(state)
+ proxies.SetupStartProxy(state)
+ proxies.SetupStopProxy(state)
+ proxies.SetupGetConnections(state)
+
+ log.Infof("Listening on '%s'", listeningAddress)
+ err = engine.Run(listeningAddress)
+
+ if err != nil {
+ return fmt.Errorf("Error running web server: %s", err.Error())
+ }
+
+ return nil
+}
+
+func main() {
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ app := &cli.App{
+ Name: "hermes",
+ Usage: "port forwarding across boundaries",
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "backends-path",
+ Aliases: []string{"b"},
+ Usage: "path to the backend manifest file",
+ Required: true,
+ },
+ },
+ Action: apiEntrypoint,
+ }
+
+ if err := app.Run(os.Args); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/backend/api/permissions/permission_nodes.go b/backend/api/permissions/permission_nodes.go
new file mode 100644
index 0000000..f13e80a
--- /dev/null
+++ b/backend/api/permissions/permission_nodes.go
@@ -0,0 +1,38 @@
+package permissions
+
+import "git.terah.dev/imterah/hermes/backend/api/db"
+
+var DefaultPermissionNodes []string = []string{
+ "routes.add",
+ "routes.remove",
+ "routes.start",
+ "routes.stop",
+ "routes.edit",
+ "routes.visible",
+ "routes.visibleConn",
+
+ "backends.add",
+ "backends.remove",
+ "backends.start",
+ "backends.stop",
+ "backends.edit",
+ "backends.visible",
+ "backends.secretVis",
+
+ "permissions.see",
+
+ "users.add",
+ "users.remove",
+ "users.lookup",
+ "users.edit",
+}
+
+func UserHasPermission(user *db.User, node string) bool {
+ for _, permission := range user.Permissions {
+ if permission.PermissionNode == node && permission.HasPermission {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/backend/api/state/state.go b/backend/api/state/state.go
new file mode 100644
index 0000000..754ff56
--- /dev/null
+++ b/backend/api/state/state.go
@@ -0,0 +1,24 @@
+package state
+
+import (
+ "git.terah.dev/imterah/hermes/backend/api/db"
+ "git.terah.dev/imterah/hermes/backend/api/jwt"
+ "github.com/gin-gonic/gin"
+ "github.com/go-playground/validator/v10"
+)
+
+type State struct {
+ DB *db.DB
+ JWT *jwt.JWTCore
+ Engine *gin.Engine
+ Validator *validator.Validate
+}
+
+func New(db *db.DB, jwt *jwt.JWTCore, engine *gin.Engine) *State {
+ return &State{
+ DB: db,
+ JWT: jwt,
+ Engine: engine,
+ Validator: validator.New(),
+ }
+}
diff --git a/backend/backendlauncher/backendlauncher.go b/backend/backendlauncher/backendlauncher.go
new file mode 100644
index 0000000..3543014
--- /dev/null
+++ b/backend/backendlauncher/backendlauncher.go
@@ -0,0 +1,18 @@
+package backendlauncher
+
+import (
+ "fmt"
+ "math/rand/v2"
+ "net"
+)
+
+func GetUnixSocket(folder string) (string, net.Listener, error) {
+ socketPath := fmt.Sprintf("%s/sock-%d.sock", folder, rand.Uint())
+ listener, err := net.Listen("unix", socketPath)
+
+ if err != nil {
+ return "", nil, err
+ }
+
+ return socketPath, listener, nil
+}
diff --git a/backend/backends.dev.json b/backend/backends.dev.json
new file mode 100644
index 0000000..f314c69
--- /dev/null
+++ b/backend/backends.dev.json
@@ -0,0 +1,14 @@
+[
+ {
+ "name": "ssh",
+ "path": "./sshbackend/sshbackend"
+ },
+ {
+ "name": "sshapp",
+ "path": "./sshappbackend/local-code/sshappbackend"
+ },
+ {
+ "name": "dummy",
+ "path": "./dummybackend/dummybackend"
+ }
+]
diff --git a/backend/backends.prod.json b/backend/backends.prod.json
new file mode 100644
index 0000000..0ccfedc
--- /dev/null
+++ b/backend/backends.prod.json
@@ -0,0 +1,10 @@
+[
+ {
+ "name": "ssh",
+ "path": "./sshbackend"
+ },
+ {
+ "name": "sshapp",
+ "path": "./sshappbackend"
+ }
+]
diff --git a/backend/backendutil/application.go b/backend/backendutil/application.go
new file mode 100644
index 0000000..afa3147
--- /dev/null
+++ b/backend/backendutil/application.go
@@ -0,0 +1,248 @@
+package backendutil
+
+import (
+ "net"
+ "os"
+
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+)
+
+type BackendApplicationHelper struct {
+ Backend BackendInterface
+ SocketPath string
+
+ socket net.Conn
+}
+
+func (helper *BackendApplicationHelper) Start() error {
+ log.Debug("BackendApplicationHelper is starting")
+ err := ConfigureProfiling()
+
+ if err != nil {
+ return err
+ }
+
+ log.Debug("Currently waiting for Unix socket connection...")
+
+ helper.socket, err = net.Dial("unix", helper.SocketPath)
+
+ if err != nil {
+ return err
+ }
+
+ log.Debug("Sucessfully connected")
+
+ for {
+ commandRaw, err := commonbackend.Unmarshal(helper.socket)
+
+ if err != nil {
+ return err
+ }
+
+ switch command := commandRaw.(type) {
+ case *commonbackend.Start:
+ ok, err := helper.Backend.StartBackend(command.Arguments)
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.BackendStatusRequest:
+ ok, err := helper.Backend.GetBackendStatus()
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.Stop:
+ ok, err := helper.Backend.StopBackend()
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: !ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.AddProxy:
+ ok, err := helper.Backend.StartProxy(command)
+ var hasAnyFailed bool
+
+ if err != nil {
+ log.Warnf("failed to add proxy (%s:%d -> remote:%d): %s", command.SourceIP, command.SourcePort, command.DestPort, err.Error())
+ hasAnyFailed = true
+ } else if !ok {
+ log.Warnf("failed to add proxy (%s:%d -> remote:%d): StartProxy returned into failure state", command.SourceIP, command.SourcePort, command.DestPort)
+ hasAnyFailed = true
+ }
+
+ response := &commonbackend.ProxyStatusResponse{
+ SourceIP: command.SourceIP,
+ SourcePort: command.SourcePort,
+ DestPort: command.DestPort,
+ Protocol: command.Protocol,
+ IsActive: !hasAnyFailed,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.RemoveProxy:
+ ok, err := helper.Backend.StopProxy(command)
+ var hasAnyFailed bool
+
+ if err != nil {
+ log.Warnf("failed to remove proxy (%s:%d -> remote:%d): %s", command.SourceIP, command.SourcePort, command.DestPort, err.Error())
+ hasAnyFailed = true
+ } else if !ok {
+ log.Warnf("failed to remove proxy (%s:%d -> remote:%d): RemoveProxy returned into failure state", command.SourceIP, command.SourcePort, command.DestPort)
+ hasAnyFailed = true
+ }
+
+ response := &commonbackend.ProxyStatusResponse{
+ SourceIP: command.SourceIP,
+ SourcePort: command.SourcePort,
+ DestPort: command.DestPort,
+ Protocol: command.Protocol,
+ IsActive: hasAnyFailed,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.ProxyConnectionsRequest:
+ connections := helper.Backend.GetAllClientConnections()
+
+ serverParams := &commonbackend.ProxyConnectionsResponse{
+ Connections: connections,
+ }
+
+ byteData, err := commonbackend.Marshal(serverParams)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ case *commonbackend.CheckClientParameters:
+ resp := helper.Backend.CheckParametersForConnections(command)
+ resp.InResponseTo = "checkClientParameters"
+
+ byteData, err := commonbackend.Marshal(resp)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ case *commonbackend.CheckServerParameters:
+ resp := helper.Backend.CheckParametersForBackend(command.Arguments)
+ resp.InResponseTo = "checkServerParameters"
+
+ byteData, err := commonbackend.Marshal(resp)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ default:
+ log.Warnf("Unsupported command recieved: %T", command)
+ }
+ }
+}
+
+func NewHelper(backend BackendInterface) *BackendApplicationHelper {
+ socketPath, ok := os.LookupEnv("HERMES_API_SOCK")
+
+ if !ok {
+ log.Warn("HERMES_API_SOCK is not defined! This will cause an issue unless the backend manually overwrites it")
+ }
+
+ helper := &BackendApplicationHelper{
+ Backend: backend,
+ SocketPath: socketPath,
+ }
+
+ return helper
+}
diff --git a/backend/backendutil/profiling_disabled.go b/backend/backendutil/profiling_disabled.go
new file mode 100644
index 0000000..8538407
--- /dev/null
+++ b/backend/backendutil/profiling_disabled.go
@@ -0,0 +1,9 @@
+//go:build !debug
+
+package backendutil
+
+var endProfileFunc func()
+
+func ConfigureProfiling() error {
+ return nil
+}
diff --git a/backend/backendutil/profiling_enabled.go b/backend/backendutil/profiling_enabled.go
new file mode 100644
index 0000000..6fcb189
--- /dev/null
+++ b/backend/backendutil/profiling_enabled.go
@@ -0,0 +1,91 @@
+//go:build debug
+
+package backendutil
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "os/signal"
+ "runtime/pprof"
+ "syscall"
+ "time"
+
+ "github.com/charmbracelet/log"
+ "golang.org/x/exp/rand"
+)
+
+func ConfigureProfiling() error {
+ profilingMode, err := os.ReadFile("/tmp/hermes.backendlauncher.profilebackends")
+
+ if err != nil && errors.Is(err, os.ErrNotExist) {
+ return nil
+ }
+
+ switch string(profilingMode) {
+ case "cpu":
+ log.Debug("Starting CPU profiling as a background task")
+ go doCPUProfiling()
+ case "mem":
+ log.Debug("Starting memory profiling as a background task")
+ go doMemoryProfiling()
+ default:
+ log.Warnf("Unknown profiling mode: %s", string(profilingMode))
+ return nil
+ }
+
+ return nil
+}
+
+func doCPUProfiling() {
+ // (imterah) WTF? why isn't this being seeded on its own? according to Go docs, this should be seeded automatically...
+ rand.Seed(uint64(time.Now().UnixNano()))
+
+ profileFileName := fmt.Sprintf("/tmp/hermes.backendlauncher.cpu.prof.%d", rand.Int())
+ profileFile, err := os.Create(profileFileName)
+
+ if err != nil {
+ log.Fatalf("Failed to create CPU profiling file: %s", err.Error())
+ }
+
+ log.Debugf("Writing CPU usage profile to '%s'. Will capture when Ctrl+C/SIGTERM is recieved.", profileFileName)
+ pprof.StartCPUProfile(profileFile)
+
+ exitNotification := make(chan os.Signal, 1)
+ signal.Notify(exitNotification, os.Interrupt, syscall.SIGTERM)
+ <-exitNotification
+
+ log.Debug("Recieved SIGTERM. Cleaning up and exiting...")
+
+ pprof.StopCPUProfile()
+ profileFile.Close()
+
+ log.Debug("Exiting...")
+ os.Exit(0)
+}
+
+func doMemoryProfiling() {
+ // (imterah) WTF? why isn't this being seeded on its own? according to Go docs, this should be seeded automatically...
+ rand.Seed(uint64(time.Now().UnixNano()))
+
+ profileFileName := fmt.Sprintf("/tmp/hermes.backendlauncher.mem.prof.%d", rand.Int())
+ profileFile, err := os.Create(profileFileName)
+
+ if err != nil {
+ log.Fatalf("Failed to create memory profiling file: %s", err.Error())
+ }
+
+ log.Debugf("Writing memory profile to '%s'. Will capture when Ctrl+C/SIGTERM is recieved.", profileFileName)
+
+ exitNotification := make(chan os.Signal, 1)
+ signal.Notify(exitNotification, os.Interrupt, syscall.SIGTERM)
+ <-exitNotification
+
+ log.Debug("Recieved SIGTERM. Cleaning up and exiting...")
+
+ pprof.WriteHeapProfile(profileFile)
+ profileFile.Close()
+
+ log.Debug("Exiting...")
+ os.Exit(0)
+}
diff --git a/backend/backendutil/structure.go b/backend/backendutil/structure.go
new file mode 100644
index 0000000..0eb7116
--- /dev/null
+++ b/backend/backendutil/structure.go
@@ -0,0 +1,14 @@
+package backendutil
+
+import "git.terah.dev/imterah/hermes/backend/commonbackend"
+
+type BackendInterface interface {
+ StartBackend(arguments []byte) (bool, error)
+ StopBackend() (bool, error)
+ GetBackendStatus() (bool, error)
+ StartProxy(command *commonbackend.AddProxy) (bool, error)
+ StopProxy(command *commonbackend.RemoveProxy) (bool, error)
+ GetAllClientConnections() []*commonbackend.ProxyClientConnection
+ CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse
+ CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse
+}
diff --git a/backend/build.sh b/backend/build.sh
new file mode 100755
index 0000000..cee4440
--- /dev/null
+++ b/backend/build.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+pushd sshbackend > /dev/null
+echo "building sshbackend"
+go build -ldflags="-s -w" -trimpath .
+popd > /dev/null
+
+pushd dummybackend > /dev/null
+echo "building dummybackend"
+go build -ldflags="-s -w" -trimpath .
+popd > /dev/null
+
+pushd externalbackendlauncher > /dev/null
+echo "building externalbackendlauncher"
+go build -ldflags="-s -w" -trimpath .
+popd > /dev/null
+
+if [ ! -d "sshappbackend/local-code/remote-bin" ]; then
+ mkdir "sshappbackend/local-code/remote-bin"
+fi
+
+pushd sshappbackend/remote-code > /dev/null
+echo "building sshappbackend/remote-code"
+# Disable dynamic linking by disabling CGo.
+# We need to make the remote code as generic as possible, so we do this
+echo " - building for arm64"
+CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags="-s -w" -trimpath -o ../local-code/remote-bin/rt-arm64 .
+echo " - building for arm"
+CGO_ENABLED=0 GOOS=linux GOARCH=arm go build -ldflags="-s -w" -trimpath -o ../local-code/remote-bin/rt-arm .
+echo " - building for amd64"
+CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags="-s -w" -trimpath -o ../local-code/remote-bin/rt-amd64 .
+echo " - building for i386"
+CGO_ENABLED=0 GOOS=linux GOARCH=386 go build -ldflags="-s -w" -trimpath -o ../local-code/remote-bin/rt-386 .
+popd > /dev/null
+
+pushd sshappbackend/local-code > /dev/null
+echo "building sshappbackend/local-code"
+go build -ldflags="-s -w" -trimpath -o sshappbackend .
+popd > /dev/null
+
+pushd api > /dev/null
+echo "building api"
+go build -ldflags="-s -w" -trimpath .
+popd > /dev/null
diff --git a/backend/commonbackend/constants.go b/backend/commonbackend/constants.go
new file mode 100644
index 0000000..6d5362b
--- /dev/null
+++ b/backend/commonbackend/constants.go
@@ -0,0 +1,132 @@
+package commonbackend
+
+type Start struct {
+ Arguments []byte
+}
+
+type Stop struct {
+}
+
+type AddProxy struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type RemoveProxy struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type ProxyStatusRequest struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type ProxyStatusResponse struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+ IsActive bool
+}
+
+type ProxyInstance struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type ProxyInstanceResponse struct {
+ Proxies []*ProxyInstance // List of connections
+}
+
+type ProxyInstanceRequest struct {
+}
+
+type BackendStatusResponse struct {
+ IsRunning bool // True if running, false if not running
+ StatusCode int // Either the 'Success' or 'Failure' constant
+ Message string // String message from the client (ex. failed to dial TCP)
+}
+
+type BackendStatusRequest struct {
+}
+
+type ProxyConnectionsRequest struct {
+}
+
+// Client's connection to a specific proxy
+type ProxyClientConnection struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ ClientIP string
+ ClientPort uint16
+}
+
+type ProxyConnectionsResponse struct {
+ Connections []*ProxyClientConnection // List of connections
+}
+
+type CheckClientParameters struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type CheckServerParameters struct {
+ Arguments []byte
+}
+
+// Sent as a response to either CheckClientParameters or CheckBackendParameters
+type CheckParametersResponse struct {
+ InResponseTo string // Will be either 'checkClientParameters' or 'checkServerParameters'
+ IsValid bool // If true, valid, and if false, invalid
+ Message string // String message from the client (ex. failed to unmarshal JSON: x is not defined)
+}
+
+const (
+ StartID = iota
+ StopID
+ AddProxyID
+ RemoveProxyID
+ ProxyConnectionsResponseID
+ CheckClientParametersID
+ CheckServerParametersID
+ CheckParametersResponseID
+ ProxyConnectionsRequestID
+ BackendStatusResponseID
+ BackendStatusRequestID
+ ProxyStatusRequestID
+ ProxyStatusResponseID
+ ProxyInstanceResponseID
+ ProxyInstanceRequestID
+)
+
+const (
+ TCP = iota
+ UDP
+)
+
+const (
+ StatusSuccess = iota
+ StatusFailure
+)
+
+const (
+ // IP versions
+ IPv4 = 4
+ IPv6 = 6
+
+ // TODO: net has these constants defined already. We should switch to these
+ IPv4Size = 4
+ IPv6Size = 16
+)
diff --git a/backend/commonbackend/marshal.go b/backend/commonbackend/marshal.go
new file mode 100644
index 0000000..7203ee3
--- /dev/null
+++ b/backend/commonbackend/marshal.go
@@ -0,0 +1,417 @@
+package commonbackend
+
+import (
+ "encoding/binary"
+ "fmt"
+ "net"
+)
+
+func marshalIndividualConnectionStruct(conn *ProxyClientConnection) []byte {
+ sourceIPOriginal := net.ParseIP(conn.SourceIP)
+ clientIPOriginal := net.ParseIP(conn.ClientIP)
+
+ var serverIPVer uint8
+ var sourceIP []byte
+
+ if sourceIPOriginal.To4() == nil {
+ serverIPVer = IPv6
+ sourceIP = sourceIPOriginal.To16()
+ } else {
+ serverIPVer = IPv4
+ sourceIP = sourceIPOriginal.To4()
+ }
+
+ var clientIPVer uint8
+ var clientIP []byte
+
+ if clientIPOriginal.To4() == nil {
+ clientIPVer = IPv6
+ clientIP = clientIPOriginal.To16()
+ } else {
+ clientIPVer = IPv4
+ clientIP = clientIPOriginal.To4()
+ }
+
+ connectionBlock := make([]byte, 8+len(sourceIP)+len(clientIP))
+
+ connectionBlock[0] = serverIPVer
+ copy(connectionBlock[1:len(sourceIP)+1], sourceIP)
+
+ binary.BigEndian.PutUint16(connectionBlock[1+len(sourceIP):3+len(sourceIP)], conn.SourcePort)
+ binary.BigEndian.PutUint16(connectionBlock[3+len(sourceIP):5+len(sourceIP)], conn.DestPort)
+
+ connectionBlock[5+len(sourceIP)] = clientIPVer
+ copy(connectionBlock[6+len(sourceIP):6+len(sourceIP)+len(clientIP)], clientIP)
+ binary.BigEndian.PutUint16(connectionBlock[6+len(sourceIP)+len(clientIP):8+len(sourceIP)+len(clientIP)], conn.ClientPort)
+
+ return connectionBlock
+}
+
+func marshalIndividualProxyStruct(conn *ProxyInstance) ([]byte, error) {
+ sourceIPOriginal := net.ParseIP(conn.SourceIP)
+
+ var sourceIPVer uint8
+ var sourceIP []byte
+
+ if sourceIPOriginal.To4() == nil {
+ sourceIPVer = IPv6
+ sourceIP = sourceIPOriginal.To16()
+ } else {
+ sourceIPVer = IPv4
+ sourceIP = sourceIPOriginal.To4()
+ }
+
+ proxyBlock := make([]byte, 6+len(sourceIP))
+
+ proxyBlock[0] = sourceIPVer
+ copy(proxyBlock[1:len(sourceIP)+1], sourceIP)
+
+ binary.BigEndian.PutUint16(proxyBlock[1+len(sourceIP):3+len(sourceIP)], conn.SourcePort)
+ binary.BigEndian.PutUint16(proxyBlock[3+len(sourceIP):5+len(sourceIP)], conn.DestPort)
+
+ var protocolVersion uint8
+
+ if conn.Protocol == "tcp" {
+ protocolVersion = TCP
+ } else if conn.Protocol == "udp" {
+ protocolVersion = UDP
+ } else {
+ return proxyBlock, fmt.Errorf("invalid protocol recieved")
+ }
+
+ proxyBlock[5+len(sourceIP)] = protocolVersion
+
+ return proxyBlock, nil
+}
+
+func Marshal(command interface{}) ([]byte, error) {
+ switch command := command.(type) {
+ case *Start:
+ startCommandBytes := make([]byte, 1+2+len(command.Arguments))
+ startCommandBytes[0] = StartID
+ binary.BigEndian.PutUint16(startCommandBytes[1:3], uint16(len(command.Arguments)))
+ copy(startCommandBytes[3:], command.Arguments)
+
+ return startCommandBytes, nil
+ case *Stop:
+ return []byte{StopID}, nil
+ case *AddProxy:
+ sourceIP := net.ParseIP(command.SourceIP)
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if sourceIP.To4() == nil {
+ ipBytes = sourceIP.To16()
+ ipVer = IPv6
+ } else {
+ ipBytes = sourceIP.To4()
+ ipVer = IPv4
+ }
+
+ addConnectionBytes := make([]byte, 1+1+len(ipBytes)+2+2+1)
+
+ addConnectionBytes[0] = AddProxyID
+ addConnectionBytes[1] = ipVer
+
+ copy(addConnectionBytes[2:2+len(ipBytes)], ipBytes)
+
+ binary.BigEndian.PutUint16(addConnectionBytes[2+len(ipBytes):4+len(ipBytes)], command.SourcePort)
+ binary.BigEndian.PutUint16(addConnectionBytes[4+len(ipBytes):6+len(ipBytes)], command.DestPort)
+
+ var protocol uint8
+
+ if command.Protocol == "tcp" {
+ protocol = TCP
+ } else if command.Protocol == "udp" {
+ protocol = UDP
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ addConnectionBytes[6+len(ipBytes)] = protocol
+
+ return addConnectionBytes, nil
+ case *RemoveProxy:
+ sourceIP := net.ParseIP(command.SourceIP)
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if sourceIP.To4() == nil {
+ ipBytes = sourceIP.To16()
+ ipVer = IPv6
+ } else {
+ ipBytes = sourceIP.To4()
+ ipVer = IPv4
+ }
+
+ removeConnectionBytes := make([]byte, 1+1+len(ipBytes)+2+2+1)
+
+ removeConnectionBytes[0] = RemoveProxyID
+ removeConnectionBytes[1] = ipVer
+ copy(removeConnectionBytes[2:2+len(ipBytes)], ipBytes)
+ binary.BigEndian.PutUint16(removeConnectionBytes[2+len(ipBytes):4+len(ipBytes)], command.SourcePort)
+ binary.BigEndian.PutUint16(removeConnectionBytes[4+len(ipBytes):6+len(ipBytes)], command.DestPort)
+
+ var protocol uint8
+
+ if command.Protocol == "tcp" {
+ protocol = TCP
+ } else if command.Protocol == "udp" {
+ protocol = UDP
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ removeConnectionBytes[6+len(ipBytes)] = protocol
+
+ return removeConnectionBytes, nil
+ case *ProxyConnectionsResponse:
+ connectionsArray := make([][]byte, len(command.Connections))
+ totalSize := 0
+
+ for connIndex, conn := range command.Connections {
+ connectionsArray[connIndex] = marshalIndividualConnectionStruct(conn)
+ totalSize += len(connectionsArray[connIndex]) + 1
+ }
+
+ if totalSize == 0 {
+ totalSize = 1
+ }
+
+ connectionCommandArray := make([]byte, totalSize+1)
+ connectionCommandArray[0] = ProxyConnectionsResponseID
+
+ currentPosition := 1
+
+ for _, connection := range connectionsArray {
+ copy(connectionCommandArray[currentPosition:currentPosition+len(connection)], connection)
+ connectionCommandArray[currentPosition+len(connection)] = '\r'
+ currentPosition += len(connection) + 1
+ }
+
+ connectionCommandArray[totalSize] = '\n'
+ return connectionCommandArray, nil
+ case *CheckClientParameters:
+ sourceIP := net.ParseIP(command.SourceIP)
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if sourceIP.To4() == nil {
+ ipBytes = sourceIP.To16()
+ ipVer = IPv6
+ } else {
+ ipBytes = sourceIP.To4()
+ ipVer = IPv4
+ }
+
+ checkClientBytes := make([]byte, 1+1+len(ipBytes)+2+2+1)
+
+ checkClientBytes[0] = CheckClientParametersID
+ checkClientBytes[1] = ipVer
+ copy(checkClientBytes[2:2+len(ipBytes)], ipBytes)
+ binary.BigEndian.PutUint16(checkClientBytes[2+len(ipBytes):4+len(ipBytes)], command.SourcePort)
+ binary.BigEndian.PutUint16(checkClientBytes[4+len(ipBytes):6+len(ipBytes)], command.DestPort)
+
+ var protocol uint8
+
+ if command.Protocol == "tcp" {
+ protocol = TCP
+ } else if command.Protocol == "udp" {
+ protocol = UDP
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ checkClientBytes[6+len(ipBytes)] = protocol
+
+ return checkClientBytes, nil
+ case *CheckServerParameters:
+ serverCommandBytes := make([]byte, 1+2+len(command.Arguments))
+ serverCommandBytes[0] = CheckServerParametersID
+ binary.BigEndian.PutUint16(serverCommandBytes[1:3], uint16(len(command.Arguments)))
+ copy(serverCommandBytes[3:], command.Arguments)
+
+ return serverCommandBytes, nil
+ case *CheckParametersResponse:
+ var checkMethod uint8
+
+ if command.InResponseTo == "checkClientParameters" {
+ checkMethod = CheckClientParametersID
+ } else if command.InResponseTo == "checkServerParameters" {
+ checkMethod = CheckServerParametersID
+ } else {
+ return nil, fmt.Errorf("invalid mode recieved (must be either checkClientParameters or checkServerParameters)")
+ }
+
+ var isValid uint8
+
+ if command.IsValid {
+ isValid = 1
+ }
+
+ checkResponseBytes := make([]byte, 3+2+len(command.Message))
+ checkResponseBytes[0] = CheckParametersResponseID
+ checkResponseBytes[1] = checkMethod
+ checkResponseBytes[2] = isValid
+
+ binary.BigEndian.PutUint16(checkResponseBytes[3:5], uint16(len(command.Message)))
+
+ if len(command.Message) != 0 {
+ copy(checkResponseBytes[5:], []byte(command.Message))
+ }
+
+ return checkResponseBytes, nil
+ case *BackendStatusResponse:
+ var isRunning uint8
+
+ if command.IsRunning {
+ isRunning = 1
+ } else {
+ isRunning = 0
+ }
+
+ statusResponseBytes := make([]byte, 3+2+len(command.Message))
+ statusResponseBytes[0] = BackendStatusResponseID
+ statusResponseBytes[1] = isRunning
+ statusResponseBytes[2] = byte(command.StatusCode)
+
+ binary.BigEndian.PutUint16(statusResponseBytes[3:5], uint16(len(command.Message)))
+
+ if len(command.Message) != 0 {
+ copy(statusResponseBytes[5:], []byte(command.Message))
+ }
+
+ return statusResponseBytes, nil
+ case *BackendStatusRequest:
+ statusRequestBytes := make([]byte, 1)
+ statusRequestBytes[0] = BackendStatusRequestID
+
+ return statusRequestBytes, nil
+ case *ProxyStatusRequest:
+ sourceIP := net.ParseIP(command.SourceIP)
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if sourceIP.To4() == nil {
+ ipBytes = sourceIP.To16()
+ ipVer = IPv6
+ } else {
+ ipBytes = sourceIP.To4()
+ ipVer = IPv4
+ }
+
+ commandBytes := make([]byte, 1+1+len(ipBytes)+2+2+1)
+
+ commandBytes[0] = ProxyStatusRequestID
+ commandBytes[1] = ipVer
+
+ copy(commandBytes[2:2+len(ipBytes)], ipBytes)
+
+ binary.BigEndian.PutUint16(commandBytes[2+len(ipBytes):4+len(ipBytes)], command.SourcePort)
+ binary.BigEndian.PutUint16(commandBytes[4+len(ipBytes):6+len(ipBytes)], command.DestPort)
+
+ var protocol uint8
+
+ if command.Protocol == "tcp" {
+ protocol = TCP
+ } else if command.Protocol == "udp" {
+ protocol = UDP
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ commandBytes[6+len(ipBytes)] = protocol
+
+ return commandBytes, nil
+ case *ProxyStatusResponse:
+ sourceIP := net.ParseIP(command.SourceIP)
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if sourceIP.To4() == nil {
+ ipBytes = sourceIP.To16()
+ ipVer = IPv6
+ } else {
+ ipBytes = sourceIP.To4()
+ ipVer = IPv4
+ }
+
+ commandBytes := make([]byte, 1+1+len(ipBytes)+2+2+1+1)
+
+ commandBytes[0] = ProxyStatusResponseID
+ commandBytes[1] = ipVer
+
+ copy(commandBytes[2:2+len(ipBytes)], ipBytes)
+
+ binary.BigEndian.PutUint16(commandBytes[2+len(ipBytes):4+len(ipBytes)], command.SourcePort)
+ binary.BigEndian.PutUint16(commandBytes[4+len(ipBytes):6+len(ipBytes)], command.DestPort)
+
+ var protocol uint8
+
+ if command.Protocol == "tcp" {
+ protocol = TCP
+ } else if command.Protocol == "udp" {
+ protocol = UDP
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ commandBytes[6+len(ipBytes)] = protocol
+
+ var isActive uint8
+
+ if command.IsActive {
+ isActive = 1
+ } else {
+ isActive = 0
+ }
+
+ commandBytes[7+len(ipBytes)] = isActive
+
+ return commandBytes, nil
+ case *ProxyInstanceResponse:
+ proxyArray := make([][]byte, len(command.Proxies))
+ totalSize := 0
+
+ for proxyIndex, proxy := range command.Proxies {
+ var err error
+ proxyArray[proxyIndex], err = marshalIndividualProxyStruct(proxy)
+
+ if err != nil {
+ return nil, err
+ }
+
+ totalSize += len(proxyArray[proxyIndex]) + 1
+ }
+
+ if totalSize == 0 {
+ totalSize = 1
+ }
+
+ connectionCommandArray := make([]byte, totalSize+1)
+ connectionCommandArray[0] = ProxyInstanceResponseID
+
+ currentPosition := 1
+
+ for _, connection := range proxyArray {
+ copy(connectionCommandArray[currentPosition:currentPosition+len(connection)], connection)
+ connectionCommandArray[currentPosition+len(connection)] = '\r'
+ currentPosition += len(connection) + 1
+ }
+
+ connectionCommandArray[totalSize] = '\n'
+
+ return connectionCommandArray, nil
+ case *ProxyInstanceRequest:
+ return []byte{ProxyInstanceRequestID}, nil
+ case *ProxyConnectionsRequest:
+ return []byte{ProxyConnectionsRequestID}, nil
+ }
+
+ return nil, fmt.Errorf("couldn't match command type")
+}
diff --git a/backend/commonbackend/marshalling_test.go b/backend/commonbackend/marshalling_test.go
new file mode 100644
index 0000000..c2b6375
--- /dev/null
+++ b/backend/commonbackend/marshalling_test.go
@@ -0,0 +1,666 @@
+package commonbackend
+
+import (
+ "bytes"
+ "log"
+ "os"
+ "testing"
+)
+
+var logLevel = os.Getenv("HERMES_LOG_LEVEL")
+
+func TestStart(t *testing.T) {
+ commandInput := &Start{
+ Arguments: []byte("Hello from automated testing"),
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*Start)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if !bytes.Equal(commandInput.Arguments, commandUnmarshalled.Arguments) {
+ log.Fatalf("Arguments are not equal (orig: '%s', unmsh: '%s')", string(commandInput.Arguments), string(commandUnmarshalled.Arguments))
+ }
+}
+
+func TestStop(t *testing.T) {
+ commandInput := &Stop{}
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ _, ok := commandUnmarshalledRaw.(*Stop)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+}
+
+func TestAddConnection(t *testing.T) {
+ commandInput := &AddProxy{
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*AddProxy)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+}
+
+func TestRemoveConnection(t *testing.T) {
+ commandInput := &RemoveProxy{
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*RemoveProxy)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+}
+
+func TestGetAllConnections(t *testing.T) {
+ commandInput := &ProxyConnectionsResponse{
+ Connections: []*ProxyClientConnection{
+ {
+ SourceIP: "127.0.0.1",
+ SourcePort: 19132,
+ DestPort: 19132,
+ ClientIP: "127.0.0.1",
+ ClientPort: 12321,
+ },
+ {
+ SourceIP: "127.0.0.1",
+ SourcePort: 19132,
+ DestPort: 19132,
+ ClientIP: "192.168.0.168",
+ ClientPort: 23457,
+ },
+ {
+ SourceIP: "127.0.0.1",
+ SourcePort: 19132,
+ DestPort: 19132,
+ ClientIP: "68.42.203.47",
+ ClientPort: 38721,
+ },
+ },
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionsResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ for commandIndex, originalConnection := range commandInput.Connections {
+ remoteConnection := commandUnmarshalled.Connections[commandIndex]
+
+ if originalConnection.SourceIP != remoteConnection.SourceIP {
+ t.Fail()
+ log.Printf("(in #%d) SourceIP's are not equal (orig: %s, unmsh: %s)", commandIndex, originalConnection.SourceIP, remoteConnection.SourceIP)
+ }
+
+ if originalConnection.SourcePort != remoteConnection.SourcePort {
+ t.Fail()
+ log.Printf("(in #%d) SourcePort's are not equal (orig: %d, unmsh: %d)", commandIndex, originalConnection.SourcePort, remoteConnection.SourcePort)
+ }
+
+ if originalConnection.DestPort != remoteConnection.DestPort {
+ t.Fail()
+ log.Printf("(in #%d) DestPort's are not equal (orig: %d, unmsh: %d)", commandIndex, originalConnection.DestPort, remoteConnection.DestPort)
+ }
+
+ if originalConnection.ClientIP != remoteConnection.ClientIP {
+ t.Fail()
+ log.Printf("(in #%d) ClientIP's are not equal (orig: %s, unmsh: %s)", commandIndex, originalConnection.ClientIP, remoteConnection.ClientIP)
+ }
+
+ if originalConnection.ClientPort != remoteConnection.ClientPort {
+ t.Fail()
+ log.Printf("(in #%d) ClientPort's are not equal (orig: %d, unmsh: %d)", commandIndex, originalConnection.ClientPort, remoteConnection.ClientPort)
+ }
+ }
+}
+
+func TestCheckClientParameters(t *testing.T) {
+ commandInput := &CheckClientParameters{
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*CheckClientParameters)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+}
+
+func TestCheckServerParameters(t *testing.T) {
+ commandInput := &CheckServerParameters{
+ Arguments: []byte("Hello from automated testing"),
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*CheckServerParameters)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if !bytes.Equal(commandInput.Arguments, commandUnmarshalled.Arguments) {
+ log.Fatalf("Arguments are not equal (orig: '%s', unmsh: '%s')", string(commandInput.Arguments), string(commandUnmarshalled.Arguments))
+ }
+}
+
+func TestCheckParametersResponse(t *testing.T) {
+ commandInput := &CheckParametersResponse{
+ InResponseTo: "checkClientParameters",
+ IsValid: true,
+ Message: "Hello from automated testing",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*CheckParametersResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.InResponseTo != commandUnmarshalled.InResponseTo {
+ t.Fail()
+ log.Printf("InResponseTo's are not equal (orig: %s, unmsh: %s)", commandInput.InResponseTo, commandUnmarshalled.InResponseTo)
+ }
+
+ if commandInput.IsValid != commandUnmarshalled.IsValid {
+ t.Fail()
+ log.Printf("IsValid's are not equal (orig: %t, unmsh: %t)", commandInput.IsValid, commandUnmarshalled.IsValid)
+ }
+
+ if commandInput.Message != commandUnmarshalled.Message {
+ t.Fail()
+ log.Printf("Messages are not equal (orig: %s, unmsh: %s)", commandInput.Message, commandUnmarshalled.Message)
+ }
+}
+
+func TestBackendStatusRequest(t *testing.T) {
+ commandInput := &BackendStatusRequest{}
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ _, ok := commandUnmarshalledRaw.(*BackendStatusRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+}
+
+func TestBackendStatusResponse(t *testing.T) {
+ commandInput := &BackendStatusResponse{
+ IsRunning: true,
+ StatusCode: StatusFailure,
+ Message: "Hello from automated testing",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*BackendStatusResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.IsRunning != commandUnmarshalled.IsRunning {
+ t.Fail()
+ log.Printf("IsRunning's are not equal (orig: %t, unmsh: %t)", commandInput.IsRunning, commandUnmarshalled.IsRunning)
+ }
+
+ if commandInput.StatusCode != commandUnmarshalled.StatusCode {
+ t.Fail()
+ log.Printf("StatusCodes are not equal (orig: %d, unmsh: %d)", commandInput.StatusCode, commandUnmarshalled.StatusCode)
+ }
+
+ if commandInput.Message != commandUnmarshalled.Message {
+ t.Fail()
+ log.Printf("Messages are not equal (orig: %s, unmsh: %s)", commandInput.Message, commandUnmarshalled.Message)
+ }
+}
+
+func TestProxyStatusRequest(t *testing.T) {
+ commandInput := &ProxyStatusRequest{
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyStatusRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+}
+
+func TestProxyStatusResponse(t *testing.T) {
+ commandInput := &ProxyStatusResponse{
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ IsActive: true,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyStatusResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+
+ if commandInput.IsActive != commandUnmarshalled.IsActive {
+ t.Fail()
+ log.Printf("IsActive's are not equal (orig: %t, unmsh: %t)", commandInput.IsActive, commandUnmarshalled.IsActive)
+ }
+}
+
+func TestProxyConnectionRequest(t *testing.T) {
+ commandInput := &ProxyInstanceRequest{}
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ _, ok := commandUnmarshalledRaw.(*ProxyInstanceRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+}
+
+func TestProxyConnectionResponse(t *testing.T) {
+ commandInput := &ProxyInstanceResponse{
+ Proxies: []*ProxyInstance{
+ {
+ SourceIP: "192.168.0.168",
+ SourcePort: 25565,
+ DestPort: 25565,
+ Protocol: "tcp",
+ },
+ {
+ SourceIP: "127.0.0.1",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "udp",
+ },
+ {
+ SourceIP: "68.42.203.47",
+ SourcePort: 22,
+ DestPort: 2222,
+ Protocol: "tcp",
+ },
+ },
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyInstanceResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ for proxyIndex, originalProxy := range commandInput.Proxies {
+ remoteProxy := commandUnmarshalled.Proxies[proxyIndex]
+
+ if originalProxy.SourceIP != remoteProxy.SourceIP {
+ t.Fail()
+ log.Printf("(in #%d) SourceIP's are not equal (orig: %s, unmsh: %s)", proxyIndex, originalProxy.SourceIP, remoteProxy.SourceIP)
+ }
+
+ if originalProxy.SourcePort != remoteProxy.SourcePort {
+ t.Fail()
+ log.Printf("(in #%d) SourcePort's are not equal (orig: %d, unmsh: %d)", proxyIndex, originalProxy.SourcePort, remoteProxy.SourcePort)
+ }
+
+ if originalProxy.DestPort != remoteProxy.DestPort {
+ t.Fail()
+ log.Printf("(in #%d) DestPort's are not equal (orig: %d, unmsh: %d)", proxyIndex, originalProxy.DestPort, remoteProxy.DestPort)
+ }
+
+ if originalProxy.Protocol != remoteProxy.Protocol {
+ t.Fail()
+ log.Printf("(in #%d) ClientIP's are not equal (orig: %s, unmsh: %s)", proxyIndex, originalProxy.Protocol, remoteProxy.Protocol)
+ }
+ }
+}
diff --git a/backend/commonbackend/unmarshal.go b/backend/commonbackend/unmarshal.go
new file mode 100644
index 0000000..6bb5af4
--- /dev/null
+++ b/backend/commonbackend/unmarshal.go
@@ -0,0 +1,646 @@
+package commonbackend
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+ "net"
+)
+
+func unmarshalIndividualConnectionStruct(conn io.Reader) (*ProxyClientConnection, error) {
+ serverIPVersion := make([]byte, 1)
+
+ if _, err := conn.Read(serverIPVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read server IP version")
+ }
+
+ var serverIPSize uint8
+
+ if serverIPVersion[0] == 4 {
+ serverIPSize = IPv4Size
+ } else if serverIPVersion[0] == 6 {
+ serverIPSize = IPv6Size
+ } else if serverIPVersion[0] == '\n' {
+ return nil, fmt.Errorf("no data found")
+ } else {
+ return nil, fmt.Errorf("invalid server IP version recieved")
+ }
+
+ serverIP := make(net.IP, serverIPSize)
+
+ if _, err := conn.Read(serverIP); err != nil {
+ return nil, fmt.Errorf("couldn't read server IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destinationPort := make([]byte, 2)
+
+ if _, err := conn.Read(destinationPort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ clientIPVersion := make([]byte, 1)
+
+ if _, err := conn.Read(clientIPVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read server IP version")
+ }
+
+ var clientIPSize uint8
+
+ if clientIPVersion[0] == 4 {
+ clientIPSize = IPv4Size
+ } else if clientIPVersion[0] == 6 {
+ clientIPSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid server IP version recieved")
+ }
+
+ clientIP := make(net.IP, clientIPSize)
+
+ if _, err := conn.Read(clientIP); err != nil {
+ return nil, fmt.Errorf("couldn't read server IP")
+ }
+
+ clientPort := make([]byte, 2)
+
+ if _, err := conn.Read(clientPort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ return &ProxyClientConnection{
+ SourceIP: serverIP.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destinationPort),
+ ClientIP: clientIP.String(),
+ ClientPort: binary.BigEndian.Uint16(clientPort),
+ }, nil
+}
+
+func unmarshalIndividualProxyStruct(conn io.Reader) (*ProxyInstance, error) {
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else if ipVersion[0] == '\n' {
+ return nil, fmt.Errorf("no data found")
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ return &ProxyInstance{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ }, nil
+}
+
+func Unmarshal(conn io.Reader) (interface{}, error) {
+ commandType := make([]byte, 1)
+
+ if _, err := conn.Read(commandType); err != nil {
+ return nil, fmt.Errorf("couldn't read command")
+ }
+
+ switch commandType[0] {
+ case StartID:
+ argumentsLength := make([]byte, 2)
+
+ if _, err := conn.Read(argumentsLength); err != nil {
+ return nil, fmt.Errorf("couldn't read argument length")
+ }
+
+ arguments := make([]byte, binary.BigEndian.Uint16(argumentsLength))
+
+ if _, err := conn.Read(arguments); err != nil {
+ return nil, fmt.Errorf("couldn't read arguments")
+ }
+
+ return &Start{
+ Arguments: arguments,
+ }, nil
+ case StopID:
+ return &Stop{}, nil
+ case AddProxyID:
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ return &AddProxy{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ }, nil
+ case RemoveProxyID:
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ return &RemoveProxy{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ }, nil
+ case ProxyConnectionsResponseID:
+ connections := []*ProxyClientConnection{}
+ delimiter := make([]byte, 1)
+ var errorReturn error
+
+ // Infinite loop because we don't know the length
+ for {
+ connection, err := unmarshalIndividualConnectionStruct(conn)
+
+ if err != nil {
+ if err.Error() == "no data found" {
+ break
+ }
+
+ return nil, err
+ }
+
+ connections = append(connections, connection)
+
+ if _, err := conn.Read(delimiter); err != nil {
+ return nil, fmt.Errorf("couldn't read delimiter")
+ }
+
+ if delimiter[0] == '\r' {
+ continue
+ } else if delimiter[0] == '\n' {
+ break
+ } else {
+ // WTF? This shouldn't happen. Break out and return, but give an error
+ errorReturn = fmt.Errorf("invalid delimiter recieved while processing stream")
+ break
+ }
+ }
+
+ return &ProxyConnectionsResponse{
+ Connections: connections,
+ }, errorReturn
+ case CheckClientParametersID:
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ return &CheckClientParameters{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ }, nil
+ case CheckServerParametersID:
+ argumentsLength := make([]byte, 2)
+
+ if _, err := conn.Read(argumentsLength); err != nil {
+ return nil, fmt.Errorf("couldn't read argument length")
+ }
+
+ arguments := make([]byte, binary.BigEndian.Uint16(argumentsLength))
+
+ if _, err := conn.Read(arguments); err != nil {
+ return nil, fmt.Errorf("couldn't read arguments")
+ }
+
+ return &CheckServerParameters{
+ Arguments: arguments,
+ }, nil
+ case CheckParametersResponseID:
+ checkMethodByte := make([]byte, 1)
+
+ if _, err := conn.Read(checkMethodByte); err != nil {
+ return nil, fmt.Errorf("couldn't read check method byte")
+ }
+
+ var checkMethod string
+
+ if checkMethodByte[0] == CheckClientParametersID {
+ checkMethod = "checkClientParameters"
+ } else if checkMethodByte[0] == CheckServerParametersID {
+ checkMethod = "checkServerParameters"
+ } else {
+ return nil, fmt.Errorf("invalid check method recieved")
+ }
+
+ isValid := make([]byte, 1)
+
+ if _, err := conn.Read(isValid); err != nil {
+ return nil, fmt.Errorf("couldn't read isValid byte")
+ }
+
+ messageLengthBytes := make([]byte, 2)
+
+ if _, err := conn.Read(messageLengthBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read message length")
+ }
+
+ messageLength := binary.BigEndian.Uint16(messageLengthBytes)
+ var message string
+
+ if messageLength != 0 {
+ messageBytes := make([]byte, messageLength)
+
+ if _, err := conn.Read(messageBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read message")
+ }
+
+ message = string(messageBytes)
+ }
+
+ return &CheckParametersResponse{
+ InResponseTo: checkMethod,
+ IsValid: isValid[0] == 1,
+ Message: message,
+ }, nil
+ case BackendStatusResponseID:
+ isRunning := make([]byte, 1)
+
+ if _, err := conn.Read(isRunning); err != nil {
+ return nil, fmt.Errorf("couldn't read isRunning field")
+ }
+
+ statusCode := make([]byte, 1)
+
+ if _, err := conn.Read(statusCode); err != nil {
+ return nil, fmt.Errorf("couldn't read status code field")
+ }
+
+ messageLengthBytes := make([]byte, 2)
+
+ if _, err := conn.Read(messageLengthBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read message length")
+ }
+
+ messageLength := binary.BigEndian.Uint16(messageLengthBytes)
+ var message string
+
+ if messageLength != 0 {
+ messageBytes := make([]byte, messageLength)
+
+ if _, err := conn.Read(messageBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read message")
+ }
+
+ message = string(messageBytes)
+ }
+
+ return &BackendStatusResponse{
+ IsRunning: isRunning[0] == 1,
+ StatusCode: int(statusCode[0]),
+ Message: message,
+ }, nil
+ case BackendStatusRequestID:
+ return &BackendStatusRequest{}, nil
+ case ProxyStatusRequestID:
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ return &ProxyStatusRequest{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ }, nil
+ case ProxyStatusResponseID:
+ ipVersion := make([]byte, 1)
+
+ if _, err := conn.Read(ipVersion); err != nil {
+ return nil, fmt.Errorf("couldn't read ip version")
+ }
+
+ var ipSize uint8
+
+ if ipVersion[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVersion[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version recieved")
+ }
+
+ ip := make(net.IP, ipSize)
+
+ if _, err := conn.Read(ip); err != nil {
+ return nil, fmt.Errorf("couldn't read source IP")
+ }
+
+ sourcePort := make([]byte, 2)
+
+ if _, err := conn.Read(sourcePort); err != nil {
+ return nil, fmt.Errorf("couldn't read source port")
+ }
+
+ destPort := make([]byte, 2)
+
+ if _, err := conn.Read(destPort); err != nil {
+ return nil, fmt.Errorf("couldn't read destination port")
+ }
+
+ protocolBytes := make([]byte, 1)
+
+ if _, err := conn.Read(protocolBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read protocol")
+ }
+
+ var protocol string
+
+ if protocolBytes[0] == TCP {
+ protocol = "tcp"
+ } else if protocolBytes[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol")
+ }
+
+ isActive := make([]byte, 1)
+
+ if _, err := conn.Read(isActive); err != nil {
+ return nil, fmt.Errorf("couldn't read isActive field")
+ }
+
+ return &ProxyStatusResponse{
+ SourceIP: ip.String(),
+ SourcePort: binary.BigEndian.Uint16(sourcePort),
+ DestPort: binary.BigEndian.Uint16(destPort),
+ Protocol: protocol,
+ IsActive: isActive[0] == 1,
+ }, nil
+ case ProxyInstanceRequestID:
+ return &ProxyInstanceRequest{}, nil
+ case ProxyInstanceResponseID:
+ proxies := []*ProxyInstance{}
+ delimiter := make([]byte, 1)
+ var errorReturn error
+
+ // Infinite loop because we don't know the length
+ for {
+ proxy, err := unmarshalIndividualProxyStruct(conn)
+
+ if err != nil {
+ if err.Error() == "no data found" {
+ break
+ }
+
+ return nil, err
+ }
+
+ proxies = append(proxies, proxy)
+
+ if _, err := conn.Read(delimiter); err != nil {
+ return nil, fmt.Errorf("couldn't read delimiter")
+ }
+
+ if delimiter[0] == '\r' {
+ continue
+ } else if delimiter[0] == '\n' {
+ break
+ } else {
+ // WTF? This shouldn't happen. Break out and return, but give an error
+ errorReturn = fmt.Errorf("invalid delimiter recieved while processing stream")
+ break
+ }
+ }
+
+ return &ProxyInstanceResponse{
+ Proxies: proxies,
+ }, errorReturn
+ case ProxyConnectionsRequestID:
+ return &ProxyConnectionsRequest{}, nil
+ }
+
+ return nil, fmt.Errorf("couldn't match command ID")
+}
diff --git a/backend/dev.env b/backend/dev.env
new file mode 100644
index 0000000..8d127a3
--- /dev/null
+++ b/backend/dev.env
@@ -0,0 +1,8 @@
+HERMES_DATABASE_BACKEND=sqlite
+HERMES_SQLITE_FILEPATH=../.tmp/sqlite.db
+HERMES_JWT_SECRET=thisisnotproductionreadydonotusethisinproduction
+HERMES_LOG_LEVEL=debug
+HERMES_DEVELOPMENT_MODE=true
+HERMES_SIGNUP_ENABLED=true
+HERMES_UNSAFE_ADMIN_SIGNUP_ENABLED=true
+HERMES_FORCE_DISABLE_REFRESH_TOKEN_EXPIRY=true
diff --git a/backend/dummybackend/main.go b/backend/dummybackend/main.go
new file mode 100644
index 0000000..f28615c
--- /dev/null
+++ b/backend/dummybackend/main.go
@@ -0,0 +1,87 @@
+package main
+
+import (
+ "os"
+
+ "git.terah.dev/imterah/hermes/backend/backendutil"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+)
+
+type DummyBackend struct {
+}
+
+func (backend *DummyBackend) StartBackend(byte []byte) (bool, error) {
+ return true, nil
+}
+
+func (backend *DummyBackend) StopBackend() (bool, error) {
+ return true, nil
+}
+
+func (backend *DummyBackend) GetBackendStatus() (bool, error) {
+ return true, nil
+}
+
+func (backend *DummyBackend) StartProxy(command *commonbackend.AddProxy) (bool, error) {
+ return true, nil
+}
+
+func (backend *DummyBackend) StopProxy(command *commonbackend.RemoveProxy) (bool, error) {
+ return true, nil
+}
+
+func (backend *DummyBackend) GetAllClientConnections() []*commonbackend.ProxyClientConnection {
+ return []*commonbackend.ProxyClientConnection{}
+}
+
+func (backend *DummyBackend) CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse {
+ // You don't have to specify Type and InReplyTo. Those will be handled for you.
+ // Message is optional.
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ Message: "Valid!",
+ }
+}
+
+func (backend *DummyBackend) CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse {
+ // You don't have to specify Type and InReplyTo. Those will be handled for you.
+ // Message is optional.
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ Message: "Valid!",
+ }
+}
+
+func main() {
+ // When using logging, you should use charmbracelet/log, because that's what everything else uses in this ecosystem of a project. - imterah
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ backend := &DummyBackend{}
+
+ application := backendutil.NewHelper(backend)
+ err := application.Start()
+
+ if err != nil {
+ log.Fatalf("failed execution in application: %s", err.Error())
+ }
+}
diff --git a/backend/externalbackendlauncher/main.go b/backend/externalbackendlauncher/main.go
new file mode 100644
index 0000000..c196866
--- /dev/null
+++ b/backend/externalbackendlauncher/main.go
@@ -0,0 +1,308 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/backendlauncher"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/urfave/cli/v2"
+)
+
+type ProxyInstance struct {
+ SourceIP string `json:"sourceIP"`
+ SourcePort uint16 `json:"sourcePort"`
+ DestPort uint16 `json:"destPort"`
+ Protocol string `json:"protocol"`
+}
+
+type WriteLogger struct{}
+
+func (writer WriteLogger) Write(p []byte) (n int, err error) {
+ logSplit := strings.Split(string(p), "\n")
+
+ for _, line := range logSplit {
+ if line == "" {
+ continue
+ }
+
+ log.Infof("application: %s", line)
+ }
+
+ return len(p), err
+}
+
+var (
+ tempDir string
+ logLevel string
+)
+
+func entrypoint(cCtx *cli.Context) error {
+ executablePath := cCtx.Args().Get(0)
+
+ if executablePath == "" {
+ return fmt.Errorf("executable file is not set")
+ }
+
+ executableParamsPath := cCtx.String("params-path")
+
+ if executablePath == "" {
+ return fmt.Errorf("executable parameters is not set")
+ }
+
+ proxyFilePath := cCtx.String("proxies")
+ proxies := []ProxyInstance{}
+
+ if proxyFilePath != "" {
+ proxyFile, err := os.ReadFile(proxyFilePath)
+
+ if err != nil {
+ return fmt.Errorf("failed to read proxy file: %s", err.Error())
+ }
+
+ err = json.Unmarshal(proxyFile, &proxies)
+
+ if err != nil {
+ return fmt.Errorf("failed to parse proxy file: %s", err.Error())
+ }
+ }
+
+ log.Debugf("discovered %d proxies.", len(proxies))
+
+ backendParameters, err := os.ReadFile(executableParamsPath)
+
+ if err != nil {
+ return fmt.Errorf("could not read backend parameters: %s", err.Error())
+ }
+
+ _, err = os.Stat(executablePath)
+
+ if err != nil {
+ return fmt.Errorf("failed to get backend executable information: %s", err.Error())
+ }
+
+ log.Debug("running socket acquisition")
+
+ sockPath, sockListener, err := backendlauncher.GetUnixSocket(tempDir)
+
+ if err != nil {
+ return fmt.Errorf("failed to acquire unix socket: %s", err.Error())
+ }
+
+ log.Debugf("acquisition was successful: %s", sockPath)
+
+ go func() {
+ log.Debug("entering execution loop (in auxiliary goroutine)...")
+
+ for {
+ log.Info("waiting for Unix socket connections...")
+ sock, err := sockListener.Accept()
+ log.Info("recieved connection. initializing...")
+
+ if err != nil {
+ log.Warnf("failed to accept socket connection: %s", err.Error())
+ continue
+ }
+
+ defer sock.Close()
+
+ startCommand := &commonbackend.Start{
+ Arguments: backendParameters,
+ }
+
+ startMarshalledCommand, err := commonbackend.Marshal(startCommand)
+
+ if err != nil {
+ log.Errorf("failed to generate start command: %s", err.Error())
+ continue
+ }
+
+ if _, err = sock.Write(startMarshalledCommand); err != nil {
+ log.Errorf("failed to write to socket: %s", err.Error())
+ continue
+ }
+
+ commandRaw, err := commonbackend.Unmarshal(sock)
+
+ if err != nil {
+ log.Errorf("failed to read from/unmarshal from socket: %s", err.Error())
+ continue
+ }
+
+ command, ok := commandRaw.(*commonbackend.BackendStatusResponse)
+
+ if !ok {
+ log.Error("failed to typecast response")
+ continue
+ }
+
+ if !command.IsRunning {
+ var status string
+
+ if command.StatusCode == commonbackend.StatusSuccess {
+ status = "Success"
+ } else {
+ status = "Failure"
+ }
+
+ log.Errorf("failed to start backend (status: %s): %s", status, command.Message)
+ continue
+ }
+
+ log.Info("successfully started backend.")
+
+ hasAnyFailed := false
+
+ for _, proxy := range proxies {
+ log.Infof("initializing proxy %s:%d -> remote:%d", proxy.SourceIP, proxy.SourcePort, proxy.DestPort)
+
+ proxyAddCommand := &commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestPort,
+ Protocol: proxy.Protocol,
+ }
+
+ marshalledProxyCommand, err := commonbackend.Marshal(proxyAddCommand)
+
+ if err != nil {
+ log.Errorf("failed to generate start command: %s", err.Error())
+ hasAnyFailed = true
+ continue
+ }
+
+ if _, err = sock.Write(marshalledProxyCommand); err != nil {
+ log.Errorf("failed to write to socket: %s", err.Error())
+ hasAnyFailed = true
+ continue
+ }
+
+ commandRaw, err := commonbackend.Unmarshal(sock)
+
+ if err != nil {
+ log.Errorf("failed to read from/unmarshal from socket: %s", err.Error())
+ hasAnyFailed = true
+ continue
+ }
+
+ command, ok := commandRaw.(*commonbackend.ProxyStatusResponse)
+
+ if !ok {
+ log.Error("failed to typecast response")
+ hasAnyFailed = true
+ continue
+ }
+
+ if !command.IsActive {
+ log.Error("failed to activate: isActive is false in response to AddProxy{} call")
+ hasAnyFailed = true
+ continue
+ }
+
+ log.Infof("successfully initialized proxy %s:%d -> remote:%d", proxy.SourceIP, proxy.SourcePort, proxy.DestPort)
+ }
+
+ if hasAnyFailed {
+ log.Error("failed to initialize all proxies (read logs above)")
+ } else {
+ log.Info("successfully initialized all proxies")
+ }
+
+ log.Debug("entering infinite keepalive loop...")
+
+ for {
+ }
+ }
+ }()
+
+ log.Debug("entering execution loop (in main goroutine)...")
+
+ stdout := WriteLogger{}
+ stderr := WriteLogger{}
+
+ for {
+ log.Info("starting process...")
+ // TODO: can we reuse cmd?
+
+ cmd := exec.Command(executablePath)
+ cmd.Env = append(cmd.Env, fmt.Sprintf("HERMES_API_SOCK=%s", sockPath), fmt.Sprintf("HERMES_LOG_LEVEL=%s", logLevel))
+
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+
+ err := cmd.Run()
+
+ if err != nil {
+ if err, ok := err.(*exec.ExitError); ok {
+ log.Warnf("backend died with exit code '%d' and with error '%s'", err.ExitCode(), err.Error())
+ } else {
+ log.Warnf("backend died with error: %s", err.Error())
+ }
+ } else {
+ log.Info("process exited gracefully.")
+ }
+
+ log.Info("sleeping 5 seconds, and then restarting process")
+ time.Sleep(5 * time.Second)
+ }
+}
+
+func main() {
+ logLevel = os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel == "" {
+ logLevel = "fatal"
+ }
+
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+
+ var err error
+ tempDir, err = os.MkdirTemp("", "hermes-sockets-")
+
+ if err != nil {
+ log.Fatalf("failed to create sockets directory: %s", err.Error())
+ }
+
+ app := &cli.App{
+ Name: "externalbackendlauncher",
+ Usage: "for development purposes only -- external backend launcher for Hermes",
+ Action: entrypoint,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "params-path",
+ Aliases: []string{"params", "pp"},
+ Usage: "file containing the parameters that are sent to the backend",
+ Required: true,
+ },
+ &cli.StringFlag{
+ Name: "proxies",
+ Aliases: []string{"p"},
+ Usage: "file that contains the list of proxies to setup in JSON format",
+ },
+ },
+ }
+
+ if err := app.Run(os.Args); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/backend/sshappbackend/datacommands/constants.go b/backend/sshappbackend/datacommands/constants.go
new file mode 100644
index 0000000..6385e98
--- /dev/null
+++ b/backend/sshappbackend/datacommands/constants.go
@@ -0,0 +1,90 @@
+package datacommands
+
+// DO NOT USE
+type ProxyStatusRequest struct {
+ ProxyID uint16
+}
+
+type ProxyStatusResponse struct {
+ ProxyID uint16
+ IsActive bool
+}
+
+type RemoveProxy struct {
+ ProxyID uint16
+}
+
+type ProxyInstanceResponse struct {
+ Proxies []uint16
+}
+
+type ProxyConnectionsRequest struct {
+ ProxyID uint16
+}
+
+type ProxyConnectionsResponse struct {
+ Connections []uint16
+}
+
+type TCPConnectionOpened struct {
+ ProxyID uint16
+ ConnectionID uint16
+}
+
+type TCPConnectionClosed struct {
+ ProxyID uint16
+ ConnectionID uint16
+}
+
+type TCPProxyData struct {
+ ProxyID uint16
+ ConnectionID uint16
+ DataLength uint16
+}
+
+type UDPProxyData struct {
+ ProxyID uint16
+ ClientIP string
+ ClientPort uint16
+ DataLength uint16
+}
+
+type ProxyInformationRequest struct {
+ ProxyID uint16
+}
+
+type ProxyInformationResponse struct {
+ Exists bool
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+}
+
+type ProxyConnectionInformationRequest struct {
+ ProxyID uint16
+ ConnectionID uint16
+}
+
+type ProxyConnectionInformationResponse struct {
+ Exists bool
+ ClientIP string
+ ClientPort uint16
+}
+
+const (
+ ProxyStatusRequestID = iota + 100
+ ProxyStatusResponseID
+ RemoveProxyID
+ ProxyInstanceResponseID
+ ProxyConnectionsRequestID
+ ProxyConnectionsResponseID
+ TCPConnectionOpenedID
+ TCPConnectionClosedID
+ TCPProxyDataID
+ UDPProxyDataID
+ ProxyInformationRequestID
+ ProxyInformationResponseID
+ ProxyConnectionInformationRequestID
+ ProxyConnectionInformationResponseID
+)
diff --git a/backend/sshappbackend/datacommands/marshal.go b/backend/sshappbackend/datacommands/marshal.go
new file mode 100644
index 0000000..a2c13bf
--- /dev/null
+++ b/backend/sshappbackend/datacommands/marshal.go
@@ -0,0 +1,323 @@
+package datacommands
+
+import (
+ "encoding/binary"
+ "fmt"
+ "net"
+)
+
+// Example size and protocol constants — adjust as needed.
+const (
+ IPv4Size = 4
+ IPv6Size = 16
+
+ TCP = 1
+ UDP = 2
+)
+
+// Marshal takes a command (pointer to one of our structs) and converts it to a byte slice.
+func Marshal(command interface{}) ([]byte, error) {
+ switch cmd := command.(type) {
+ // ProxyStatusRequest: 1 byte for the command ID + 2 bytes for the ProxyID.
+ case *ProxyStatusRequest:
+ buf := make([]byte, 1+2)
+
+ buf[0] = ProxyStatusRequestID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+
+ return buf, nil
+
+ // ProxyStatusResponse: 1 byte for the command ID, 2 bytes for ProxyID, and 1 byte for IsActive.
+ case *ProxyStatusResponse:
+ buf := make([]byte, 1+2+1)
+
+ buf[0] = ProxyStatusResponseID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+
+ if cmd.IsActive {
+ buf[3] = 1
+ } else {
+ buf[3] = 0
+ }
+
+ return buf, nil
+
+ // RemoveProxy: 1 byte for the command ID + 2 bytes for the ProxyID.
+ case *RemoveProxy:
+ buf := make([]byte, 1+2)
+
+ buf[0] = RemoveProxyID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+
+ return buf, nil
+
+ // ProxyConnectionsRequest: 1 byte for the command ID + 2 bytes for the ProxyID.
+ case *ProxyConnectionsRequest:
+ buf := make([]byte, 1+2)
+
+ buf[0] = ProxyConnectionsRequestID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+
+ return buf, nil
+
+ // ProxyConnectionsResponse: 1 byte for the command ID + 2 bytes length of the Connections + 2 bytes for each
+ // number in the Connection array.
+ case *ProxyConnectionsResponse:
+ buf := make([]byte, 1+((len(cmd.Connections)+1)*2))
+
+ buf[0] = ProxyConnectionsResponseID
+ binary.BigEndian.PutUint16(buf[1:], uint16(len(cmd.Connections)))
+
+ for connectionIndex, connection := range cmd.Connections {
+ binary.BigEndian.PutUint16(buf[3+(connectionIndex*2):], connection)
+ }
+
+ return buf, nil
+
+ // ProxyConnectionsResponse: 1 byte for the command ID + 2 bytes length of the Proxies + 2 bytes for each
+ // number in the Proxies array.
+ case *ProxyInstanceResponse:
+ buf := make([]byte, 1+((len(cmd.Proxies)+1)*2))
+
+ buf[0] = ProxyInstanceResponseID
+ binary.BigEndian.PutUint16(buf[1:], uint16(len(cmd.Proxies)))
+
+ for connectionIndex, connection := range cmd.Proxies {
+ binary.BigEndian.PutUint16(buf[3+(connectionIndex*2):], connection)
+ }
+
+ return buf, nil
+
+ // TCPConnectionOpened: 1 byte for the command ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case *TCPConnectionOpened:
+ buf := make([]byte, 1+2+2)
+
+ buf[0] = TCPConnectionOpenedID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+ binary.BigEndian.PutUint16(buf[3:], cmd.ConnectionID)
+
+ return buf, nil
+
+ // TCPConnectionClosed: 1 byte for the command ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case *TCPConnectionClosed:
+ buf := make([]byte, 1+2+2)
+
+ buf[0] = TCPConnectionClosedID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+ binary.BigEndian.PutUint16(buf[3:], cmd.ConnectionID)
+
+ return buf, nil
+
+ // TCPProxyData: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID + 2 bytes DataLength.
+ case *TCPProxyData:
+ buf := make([]byte, 1+2+2+2)
+
+ buf[0] = TCPProxyDataID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+ binary.BigEndian.PutUint16(buf[3:], cmd.ConnectionID)
+ binary.BigEndian.PutUint16(buf[5:], cmd.DataLength)
+
+ return buf, nil
+
+ // UDPProxyData:
+ // Format: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID +
+ // 1 byte IP version + IP bytes + 2 bytes ClientPort + 2 bytes DataLength.
+ case *UDPProxyData:
+ ip := net.ParseIP(cmd.ClientIP)
+ if ip == nil {
+ return nil, fmt.Errorf("invalid client IP: %v", cmd.ClientIP)
+ }
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if ip4 := ip.To4(); ip4 != nil {
+ ipBytes = ip4
+ ipVer = 4
+ } else if ip16 := ip.To16(); ip16 != nil {
+ ipBytes = ip16
+ ipVer = 6
+ } else {
+ return nil, fmt.Errorf("unable to detect IP version for: %v", cmd.ClientIP)
+ }
+
+ totalSize := 1 + // id
+ 2 + // ProxyID
+ 1 + // IP version
+ len(ipBytes) + // client IP bytes
+ 2 + // ClientPort
+ 2 // DataLength
+
+ buf := make([]byte, totalSize)
+ offset := 0
+ buf[offset] = UDPProxyDataID
+ offset++
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.ProxyID)
+ offset += 2
+
+ buf[offset] = ipVer
+ offset++
+
+ copy(buf[offset:], ipBytes)
+ offset += len(ipBytes)
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.ClientPort)
+ offset += 2
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.DataLength)
+
+ return buf, nil
+
+ // ProxyInformationRequest: 1 byte ID + 2 bytes ProxyID.
+ case *ProxyInformationRequest:
+ buf := make([]byte, 1+2)
+ buf[0] = ProxyInformationRequestID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+ return buf, nil
+
+ // ProxyInformationResponse:
+ // Format: 1 byte ID + 1 byte Exists + (if exists:)
+ // 1 byte IP version + IP bytes + 2 bytes SourcePort + 2 bytes DestPort + 1 byte Protocol.
+ // (For simplicity, this marshaller always writes the IP and port info even if !Exists.)
+ case *ProxyInformationResponse:
+ if !cmd.Exists {
+ buf := make([]byte, 1+1)
+ buf[0] = ProxyInformationResponseID
+ buf[1] = 0 /* false */
+
+ return buf, nil
+ }
+
+ ip := net.ParseIP(cmd.SourceIP)
+
+ if ip == nil {
+ return nil, fmt.Errorf("invalid source IP: %v", cmd.SourceIP)
+ }
+
+ var ipVer uint8
+ var ipBytes []byte
+
+ if ip4 := ip.To4(); ip4 != nil {
+ ipBytes = ip4
+ ipVer = 4
+ } else if ip16 := ip.To16(); ip16 != nil {
+ ipBytes = ip16
+ ipVer = 6
+ } else {
+ return nil, fmt.Errorf("unable to detect IP version for: %v", cmd.SourceIP)
+ }
+
+ totalSize := 1 + // id
+ 1 + // Exists flag
+ 1 + // IP version
+ len(ipBytes) +
+ 2 + // SourcePort
+ 2 + // DestPort
+ 1 // Protocol
+
+ buf := make([]byte, totalSize)
+
+ offset := 0
+ buf[offset] = ProxyInformationResponseID
+ offset++
+
+ // We already handle this above
+ buf[offset] = 1 /* true */
+ offset++
+
+ buf[offset] = ipVer
+ offset++
+
+ copy(buf[offset:], ipBytes)
+ offset += len(ipBytes)
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.SourcePort)
+ offset += 2
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.DestPort)
+ offset += 2
+
+ // Encode protocol as 1 byte.
+ switch cmd.Protocol {
+ case "tcp":
+ buf[offset] = TCP
+ case "udp":
+ buf[offset] = UDP
+ default:
+ return nil, fmt.Errorf("invalid protocol: %v", cmd.Protocol)
+ }
+
+ // offset++ (not needed since we are at the end)
+ return buf, nil
+
+ // ProxyConnectionInformationRequest: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case *ProxyConnectionInformationRequest:
+ buf := make([]byte, 1+2+2)
+
+ buf[0] = ProxyConnectionInformationRequestID
+ binary.BigEndian.PutUint16(buf[1:], cmd.ProxyID)
+ binary.BigEndian.PutUint16(buf[3:], cmd.ConnectionID)
+
+ return buf, nil
+
+ // ProxyConnectionInformationResponse:
+ // Format: 1 byte ID + 1 byte Exists + (if exists:)
+ // 1 byte IP version + IP bytes + 2 bytes ClientPort.
+ // This marshaller only writes the rest of the data if Exists.
+ case *ProxyConnectionInformationResponse:
+ if !cmd.Exists {
+ buf := make([]byte, 1+1)
+ buf[0] = ProxyConnectionInformationResponseID
+ buf[1] = 0 /* false */
+
+ return buf, nil
+ }
+
+ ip := net.ParseIP(cmd.ClientIP)
+
+ if ip == nil {
+ return nil, fmt.Errorf("invalid client IP: %v", cmd.ClientIP)
+ }
+
+ var ipVer uint8
+ var ipBytes []byte
+ if ip4 := ip.To4(); ip4 != nil {
+ ipBytes = ip4
+ ipVer = 4
+ } else if ip16 := ip.To16(); ip16 != nil {
+ ipBytes = ip16
+ ipVer = 6
+ } else {
+ return nil, fmt.Errorf("unable to detect IP version for: %v", cmd.ClientIP)
+ }
+
+ totalSize := 1 + // id
+ 1 + // Exists flag
+ 1 + // IP version
+ len(ipBytes) +
+ 2 // ClientPort
+
+ buf := make([]byte, totalSize)
+ offset := 0
+ buf[offset] = ProxyConnectionInformationResponseID
+ offset++
+
+ // We already handle this above
+ buf[offset] = 1 /* true */
+ offset++
+
+ buf[offset] = ipVer
+ offset++
+
+ copy(buf[offset:], ipBytes)
+ offset += len(ipBytes)
+
+ binary.BigEndian.PutUint16(buf[offset:], cmd.ClientPort)
+
+ return buf, nil
+
+ default:
+ return nil, fmt.Errorf("unsupported command type")
+ }
+}
diff --git a/backend/sshappbackend/datacommands/marshalling_test.go b/backend/sshappbackend/datacommands/marshalling_test.go
new file mode 100644
index 0000000..5b2e5ab
--- /dev/null
+++ b/backend/sshappbackend/datacommands/marshalling_test.go
@@ -0,0 +1,652 @@
+package datacommands
+
+import (
+ "bytes"
+ "log"
+ "os"
+ "testing"
+)
+
+var logLevel = os.Getenv("HERMES_LOG_LEVEL")
+
+func TestProxyStatusRequest(t *testing.T) {
+ commandInput := &ProxyStatusRequest{
+ ProxyID: 19132,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyStatusRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+}
+
+func TestProxyStatusResponse(t *testing.T) {
+ commandInput := &ProxyStatusResponse{
+ ProxyID: 19132,
+ IsActive: true,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyStatusResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.IsActive != commandUnmarshalled.IsActive {
+ t.Fail()
+ log.Printf("IsActive's are not equal (orig: '%t', unmsh: '%t')", commandInput.IsActive, commandUnmarshalled.IsActive)
+ }
+}
+
+func TestRemoveProxy(t *testing.T) {
+ commandInput := &RemoveProxy{
+ ProxyID: 19132,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*RemoveProxy)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+}
+
+func TestProxyConnectionsRequest(t *testing.T) {
+ commandInput := &ProxyConnectionsRequest{
+ ProxyID: 19132,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionsRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+}
+
+func TestProxyConnectionsResponse(t *testing.T) {
+ commandInput := &ProxyConnectionsResponse{
+ Connections: []uint16{12831, 9455, 64219, 12, 32},
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionsResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ for connectionIndex, originalConnection := range commandInput.Connections {
+ remoteConnection := commandUnmarshalled.Connections[connectionIndex]
+
+ if originalConnection != remoteConnection {
+ t.Fail()
+ log.Printf("(in #%d) SourceIP's are not equal (orig: %d, unmsh: %d)", connectionIndex, originalConnection, connectionIndex)
+ }
+ }
+}
+
+func TestProxyInstanceResponse(t *testing.T) {
+ commandInput := &ProxyInstanceResponse{
+ Proxies: []uint16{12831, 9455, 64219, 12, 32},
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyInstanceResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ for proxyIndex, originalProxy := range commandInput.Proxies {
+ remoteProxy := commandUnmarshalled.Proxies[proxyIndex]
+
+ if originalProxy != remoteProxy {
+ t.Fail()
+ log.Printf("(in #%d) Proxy IDs are not equal (orig: %d, unmsh: %d)", proxyIndex, originalProxy, remoteProxy)
+ }
+ }
+}
+
+func TestTCPConnectionOpened(t *testing.T) {
+ commandInput := &TCPConnectionOpened{
+ ProxyID: 19132,
+ ConnectionID: 25565,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*TCPConnectionOpened)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.ConnectionID != commandUnmarshalled.ConnectionID {
+ t.Fail()
+ log.Printf("ConnectionID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ConnectionID, commandUnmarshalled.ConnectionID)
+ }
+}
+
+func TestTCPConnectionClosed(t *testing.T) {
+ commandInput := &TCPConnectionClosed{
+ ProxyID: 19132,
+ ConnectionID: 25565,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*TCPConnectionClosed)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.ConnectionID != commandUnmarshalled.ConnectionID {
+ t.Fail()
+ log.Printf("ConnectionID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ConnectionID, commandUnmarshalled.ConnectionID)
+ }
+}
+
+func TestTCPProxyData(t *testing.T) {
+ commandInput := &TCPProxyData{
+ ProxyID: 19132,
+ ConnectionID: 25565,
+ DataLength: 1234,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*TCPProxyData)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.ConnectionID != commandUnmarshalled.ConnectionID {
+ t.Fail()
+ log.Printf("ConnectionID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ConnectionID, commandUnmarshalled.ConnectionID)
+ }
+
+ if commandInput.DataLength != commandUnmarshalled.DataLength {
+ t.Fail()
+ log.Printf("DataLength's are not equal (orig: '%d', unmsh: '%d')", commandInput.DataLength, commandUnmarshalled.DataLength)
+ }
+}
+
+func TestUDPProxyData(t *testing.T) {
+ commandInput := &UDPProxyData{
+ ProxyID: 19132,
+ ClientIP: "68.51.23.54",
+ ClientPort: 28173,
+ DataLength: 1234,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*UDPProxyData)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.ClientIP != commandUnmarshalled.ClientIP {
+ t.Fail()
+ log.Printf("ClientIP's are not equal (orig: '%s', unmsh: '%s')", commandInput.ClientIP, commandUnmarshalled.ClientIP)
+ }
+
+ if commandInput.ClientPort != commandUnmarshalled.ClientPort {
+ t.Fail()
+ log.Printf("ClientPort's are not equal (orig: '%d', unmsh: '%d')", commandInput.ClientPort, commandUnmarshalled.ClientPort)
+ }
+
+ if commandInput.DataLength != commandUnmarshalled.DataLength {
+ t.Fail()
+ log.Printf("DataLength's are not equal (orig: '%d', unmsh: '%d')", commandInput.DataLength, commandUnmarshalled.DataLength)
+ }
+}
+
+func TestProxyInformationRequest(t *testing.T) {
+ commandInput := &ProxyInformationRequest{
+ ProxyID: 19132,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyInformationRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+}
+
+func TestProxyInformationResponseExists(t *testing.T) {
+ commandInput := &ProxyInformationResponse{
+ Exists: true,
+ SourceIP: "192.168.0.139",
+ SourcePort: 19132,
+ DestPort: 19132,
+ Protocol: "tcp",
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyInformationResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.Exists != commandUnmarshalled.Exists {
+ t.Fail()
+ log.Printf("Exists's are not equal (orig: '%t', unmsh: '%t')", commandInput.Exists, commandUnmarshalled.Exists)
+ }
+
+ if commandInput.SourceIP != commandUnmarshalled.SourceIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.SourceIP, commandUnmarshalled.SourceIP)
+ }
+
+ if commandInput.SourcePort != commandUnmarshalled.SourcePort {
+ t.Fail()
+ log.Printf("SourcePort's are not equal (orig: %d, unmsh: %d)", commandInput.SourcePort, commandUnmarshalled.SourcePort)
+ }
+
+ if commandInput.DestPort != commandUnmarshalled.DestPort {
+ t.Fail()
+ log.Printf("DestPort's are not equal (orig: %d, unmsh: %d)", commandInput.DestPort, commandUnmarshalled.DestPort)
+ }
+
+ if commandInput.Protocol != commandUnmarshalled.Protocol {
+ t.Fail()
+ log.Printf("Protocols are not equal (orig: %s, unmsh: %s)", commandInput.Protocol, commandUnmarshalled.Protocol)
+ }
+}
+
+func TestProxyInformationResponseNoExist(t *testing.T) {
+ commandInput := &ProxyInformationResponse{
+ Exists: false,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyInformationResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.Exists != commandUnmarshalled.Exists {
+ t.Fail()
+ log.Printf("Exists's are not equal (orig: '%t', unmsh: '%t')", commandInput.Exists, commandUnmarshalled.Exists)
+ }
+}
+
+func TestProxyConnectionInformationRequest(t *testing.T) {
+ commandInput := &ProxyConnectionInformationRequest{
+ ProxyID: 19132,
+ ConnectionID: 25565,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionInformationRequest)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.ProxyID != commandUnmarshalled.ProxyID {
+ t.Fail()
+ log.Printf("ProxyID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ProxyID, commandUnmarshalled.ProxyID)
+ }
+
+ if commandInput.ConnectionID != commandUnmarshalled.ConnectionID {
+ t.Fail()
+ log.Printf("ConnectionID's are not equal (orig: '%d', unmsh: '%d')", commandInput.ConnectionID, commandUnmarshalled.ConnectionID)
+ }
+}
+
+func TestProxyConnectionInformationResponseExists(t *testing.T) {
+ commandInput := &ProxyConnectionInformationResponse{
+ Exists: true,
+ ClientIP: "192.168.0.139",
+ ClientPort: 19132,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionInformationResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.Exists != commandUnmarshalled.Exists {
+ t.Fail()
+ log.Printf("Exists's are not equal (orig: '%t', unmsh: '%t')", commandInput.Exists, commandUnmarshalled.Exists)
+ }
+
+ if commandInput.ClientIP != commandUnmarshalled.ClientIP {
+ t.Fail()
+ log.Printf("SourceIP's are not equal (orig: %s, unmsh: %s)", commandInput.ClientIP, commandUnmarshalled.ClientIP)
+ }
+
+ if commandInput.ClientPort != commandUnmarshalled.ClientPort {
+ t.Fail()
+ log.Printf("ClientPort's are not equal (orig: %d, unmsh: %d)", commandInput.ClientPort, commandUnmarshalled.ClientPort)
+ }
+}
+
+func TestProxyConnectionInformationResponseNoExists(t *testing.T) {
+ commandInput := &ProxyConnectionInformationResponse{
+ Exists: false,
+ }
+
+ commandMarshalled, err := Marshal(commandInput)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ if logLevel == "debug" {
+ log.Printf("Generated array contents: %v", commandMarshalled)
+ }
+
+ buf := bytes.NewBuffer(commandMarshalled)
+ commandUnmarshalledRaw, err := Unmarshal(buf)
+
+ if err != nil {
+ t.Fatal(err.Error())
+ }
+
+ commandUnmarshalled, ok := commandUnmarshalledRaw.(*ProxyConnectionInformationResponse)
+
+ if !ok {
+ t.Fatal("failed typecast")
+ }
+
+ if commandInput.Exists != commandUnmarshalled.Exists {
+ t.Fail()
+ log.Printf("Exists's are not equal (orig: '%t', unmsh: '%t')", commandInput.Exists, commandUnmarshalled.Exists)
+ }
+}
diff --git a/backend/sshappbackend/datacommands/unmarshal.go b/backend/sshappbackend/datacommands/unmarshal.go
new file mode 100644
index 0000000..d9d0523
--- /dev/null
+++ b/backend/sshappbackend/datacommands/unmarshal.go
@@ -0,0 +1,422 @@
+package datacommands
+
+import (
+ "encoding/binary"
+ "fmt"
+ "io"
+ "net"
+)
+
+// Unmarshal reads from the provided connection and returns
+// the message type (as a string), the unmarshalled struct, or an error.
+func Unmarshal(conn io.Reader) (interface{}, error) {
+ // Every command starts with a 1-byte command ID.
+ header := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, header); err != nil {
+ return nil, fmt.Errorf("couldn't read command ID: %w", err)
+ }
+
+ cmdID := header[0]
+ switch cmdID {
+ // ProxyStatusRequest: 1 byte ID + 2 bytes ProxyID.
+ case ProxyStatusRequestID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyStatusRequest ProxyID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+
+ return &ProxyStatusRequest{
+ ProxyID: proxyID,
+ }, nil
+
+ // ProxyStatusResponse: 1 byte ID + 2 bytes ProxyID + 1 byte IsActive.
+ case ProxyStatusResponseID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyStatusResponse ProxyID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+ boolBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, boolBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyStatusResponse IsActive: %w", err)
+ }
+
+ isActive := boolBuf[0] != 0
+
+ return &ProxyStatusResponse{
+ ProxyID: proxyID,
+ IsActive: isActive,
+ }, nil
+
+ // RemoveProxy: 1 byte ID + 2 bytes ProxyID.
+ case RemoveProxyID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read RemoveProxy ProxyID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+
+ return &RemoveProxy{
+ ProxyID: proxyID,
+ }, nil
+
+ // ProxyConnectionsRequest: 1 byte ID + 2 bytes ProxyID.
+ case ProxyConnectionsRequestID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionsRequest ProxyID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+
+ return &ProxyConnectionsRequest{
+ ProxyID: proxyID,
+ }, nil
+
+ // ProxyConnectionsResponse: 1 byte ID + 2 bytes Connections length + 2 bytes for each Connection in Connections.
+ case ProxyConnectionsResponseID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionsResponse length: %w", err)
+ }
+
+ length := binary.BigEndian.Uint16(buf)
+ connections := make([]uint16, length)
+
+ var failedDuringReading error
+
+ for connectionIndex := range connections {
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ failedDuringReading = fmt.Errorf("couldn't read ProxyConnectionsResponse with position of %d: %w", connectionIndex, err)
+ break
+ }
+
+ connections[connectionIndex] = binary.BigEndian.Uint16(buf)
+ }
+
+ return &ProxyConnectionsResponse{
+ Connections: connections,
+ }, failedDuringReading
+
+ // ProxyInstanceResponse: 1 byte ID + 2 bytes Proxies length + 2 bytes for each Proxy in Proxies.
+ case ProxyInstanceResponseID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionsResponse length: %w", err)
+ }
+
+ length := binary.BigEndian.Uint16(buf)
+ proxies := make([]uint16, length)
+
+ var failedDuringReading error
+
+ for connectionIndex := range proxies {
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ failedDuringReading = fmt.Errorf("couldn't read ProxyConnectionsResponse with position of %d: %w", connectionIndex, err)
+ break
+ }
+
+ proxies[connectionIndex] = binary.BigEndian.Uint16(buf)
+ }
+
+ return &ProxyInstanceResponse{
+ Proxies: proxies,
+ }, failedDuringReading
+
+ // TCPConnectionOpened: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case TCPConnectionOpenedID:
+ buf := make([]byte, 2+2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read TCPConnectionOpened fields: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf[0:2])
+ connectionID := binary.BigEndian.Uint16(buf[2:4])
+
+ return &TCPConnectionOpened{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }, nil
+
+ // TCPConnectionClosed: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case TCPConnectionClosedID:
+ buf := make([]byte, 2+2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read TCPConnectionClosed fields: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf[0:2])
+ connectionID := binary.BigEndian.Uint16(buf[2:4])
+
+ return &TCPConnectionClosed{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }, nil
+
+ // TCPProxyData: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID + 2 bytes DataLength.
+ case TCPProxyDataID:
+ buf := make([]byte, 2+2+2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read TCPProxyData fields: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf[0:2])
+ connectionID := binary.BigEndian.Uint16(buf[2:4])
+ dataLength := binary.BigEndian.Uint16(buf[4:6])
+
+ return &TCPProxyData{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ DataLength: dataLength,
+ }, nil
+
+ // UDPProxyData:
+ // Format: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID +
+ // 1 byte IP version + IP bytes + 2 bytes ClientPort + 2 bytes DataLength.
+ case UDPProxyDataID:
+ // Read 2 bytes ProxyID + 2 bytes ConnectionID.
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read UDPProxyData ProxyID/ConnectionID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+
+ // Read IP version.
+ ipVerBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, ipVerBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read UDPProxyData IP version: %w", err)
+ }
+
+ var ipSize int
+
+ if ipVerBuf[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVerBuf[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version received: %v", ipVerBuf[0])
+ }
+
+ // Read the IP bytes.
+ ipBytes := make([]byte, ipSize)
+ if _, err := io.ReadFull(conn, ipBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read UDPProxyData IP bytes: %w", err)
+ }
+ clientIP := net.IP(ipBytes).String()
+
+ // Read ClientPort.
+ portBuf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, portBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read UDPProxyData ClientPort: %w", err)
+ }
+
+ clientPort := binary.BigEndian.Uint16(portBuf)
+
+ // Read DataLength.
+ dataLengthBuf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, dataLengthBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read UDPProxyData DataLength: %w", err)
+ }
+
+ dataLength := binary.BigEndian.Uint16(dataLengthBuf)
+
+ return &UDPProxyData{
+ ProxyID: proxyID,
+ ClientIP: clientIP,
+ ClientPort: clientPort,
+ DataLength: dataLength,
+ }, nil
+
+ // ProxyInformationRequest: 1 byte ID + 2 bytes ProxyID.
+ case ProxyInformationRequestID:
+ buf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationRequest ProxyID: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf)
+
+ return &ProxyInformationRequest{
+ ProxyID: proxyID,
+ }, nil
+
+ // ProxyInformationResponse:
+ // Format: 1 byte ID + 1 byte Exists +
+ // 1 byte IP version + IP bytes + 2 bytes SourcePort + 2 bytes DestPort + 1 byte Protocol.
+ case ProxyInformationResponseID:
+ // Read Exists flag.
+ boolBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, boolBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationResponse Exists flag: %w", err)
+ }
+
+ exists := boolBuf[0] != 0
+
+ if !exists {
+ return &ProxyInformationResponse{
+ Exists: exists,
+ }, nil
+ }
+
+ // Read IP version.
+ ipVerBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, ipVerBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationResponse IP version: %w", err)
+ }
+
+ var ipSize int
+
+ if ipVerBuf[0] == 4 {
+ ipSize = IPv4Size
+ } else if ipVerBuf[0] == 6 {
+ ipSize = IPv6Size
+ } else {
+ return nil, fmt.Errorf("invalid IP version in ProxyInformationResponse: %v", ipVerBuf[0])
+ }
+
+ // Read the source IP bytes.
+ ipBytes := make([]byte, ipSize)
+
+ if _, err := io.ReadFull(conn, ipBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationResponse IP bytes: %w", err)
+ }
+
+ sourceIP := net.IP(ipBytes).String()
+
+ // Read SourcePort and DestPort.
+ portsBuf := make([]byte, 2+2)
+
+ if _, err := io.ReadFull(conn, portsBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationResponse ports: %w", err)
+ }
+
+ sourcePort := binary.BigEndian.Uint16(portsBuf[0:2])
+ destPort := binary.BigEndian.Uint16(portsBuf[2:4])
+
+ // Read protocol.
+ protoBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, protoBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyInformationResponse protocol: %w", err)
+ }
+
+ var protocol string
+
+ if protoBuf[0] == TCP {
+ protocol = "tcp"
+ } else if protoBuf[0] == UDP {
+ protocol = "udp"
+ } else {
+ return nil, fmt.Errorf("invalid protocol value in ProxyInformationResponse: %d", protoBuf[0])
+ }
+
+ return &ProxyInformationResponse{
+ Exists: exists,
+ SourceIP: sourceIP,
+ SourcePort: sourcePort,
+ DestPort: destPort,
+ Protocol: protocol,
+ }, nil
+
+ // ProxyConnectionInformationRequest: 1 byte ID + 2 bytes ProxyID + 2 bytes ConnectionID.
+ case ProxyConnectionInformationRequestID:
+ buf := make([]byte, 2+2)
+
+ if _, err := io.ReadFull(conn, buf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionInformationRequest fields: %w", err)
+ }
+
+ proxyID := binary.BigEndian.Uint16(buf[0:2])
+ connectionID := binary.BigEndian.Uint16(buf[2:4])
+
+ return &ProxyConnectionInformationRequest{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }, nil
+
+ // ProxyConnectionInformationResponse:
+ // Format: 1 byte ID + 1 byte Exists + 1 byte IP version + IP bytes + 2 bytes ClientPort.
+ case ProxyConnectionInformationResponseID:
+ // Read Exists flag.
+ boolBuf := make([]byte, 1)
+ if _, err := io.ReadFull(conn, boolBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionInformationResponse Exists flag: %w", err)
+ }
+
+ exists := boolBuf[0] != 0
+
+ if !exists {
+ return &ProxyConnectionInformationResponse{
+ Exists: exists,
+ }, nil
+ }
+
+ // Read IP version.
+ ipVerBuf := make([]byte, 1)
+
+ if _, err := io.ReadFull(conn, ipVerBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionInformationResponse IP version: %w", err)
+ }
+
+ if ipVerBuf[0] != 4 && ipVerBuf[0] != 6 {
+ return nil, fmt.Errorf("invalid IP version in ProxyConnectionInformationResponse: %v", ipVerBuf[0])
+ }
+
+ var ipSize int
+
+ if ipVerBuf[0] == 4 {
+ ipSize = IPv4Size
+ } else {
+ ipSize = IPv6Size
+ }
+
+ // Read IP bytes.
+ ipBytes := make([]byte, ipSize)
+
+ if _, err := io.ReadFull(conn, ipBytes); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionInformationResponse IP bytes: %w", err)
+ }
+
+ clientIP := net.IP(ipBytes).String()
+
+ // Read ClientPort.
+ portBuf := make([]byte, 2)
+
+ if _, err := io.ReadFull(conn, portBuf); err != nil {
+ return nil, fmt.Errorf("couldn't read ProxyConnectionInformationResponse ClientPort: %w", err)
+ }
+
+ clientPort := binary.BigEndian.Uint16(portBuf)
+
+ return &ProxyConnectionInformationResponse{
+ Exists: exists,
+ ClientIP: clientIP,
+ ClientPort: clientPort,
+ }, nil
+ default:
+ return nil, fmt.Errorf("unknown command id: %v", cmdID)
+ }
+}
diff --git a/backend/sshappbackend/gaslighter/gaslighter.go b/backend/sshappbackend/gaslighter/gaslighter.go
new file mode 100644
index 0000000..ecccec7
--- /dev/null
+++ b/backend/sshappbackend/gaslighter/gaslighter.go
@@ -0,0 +1,30 @@
+package gaslighter
+
+import "io"
+
+type Gaslighter struct {
+ Byte byte
+ HasGaslit bool
+ ProxiedReader io.Reader
+}
+
+func (gaslighter *Gaslighter) Read(p []byte) (n int, err error) {
+ if gaslighter.HasGaslit {
+ return gaslighter.ProxiedReader.Read(p)
+ }
+
+ if len(p) == 0 {
+ return 0, nil
+ }
+
+ p[0] = gaslighter.Byte
+ gaslighter.HasGaslit = true
+
+ if len(p) > 1 {
+ n, err := gaslighter.ProxiedReader.Read(p[1:])
+
+ return n + 1, err
+ } else {
+ return 1, nil
+ }
+}
diff --git a/backend/sshappbackend/local-code/fs.go b/backend/sshappbackend/local-code/fs.go
new file mode 100644
index 0000000..7fe43e4
--- /dev/null
+++ b/backend/sshappbackend/local-code/fs.go
@@ -0,0 +1,8 @@
+package main
+
+import (
+ "embed"
+)
+
+//go:embed remote-bin
+var binFiles embed.FS
diff --git a/backend/sshappbackend/local-code/logger.go b/backend/sshappbackend/local-code/logger.go
new file mode 100644
index 0000000..d8ed3f9
--- /dev/null
+++ b/backend/sshappbackend/local-code/logger.go
@@ -0,0 +1,23 @@
+package main
+
+import (
+ "strings"
+
+ "github.com/charmbracelet/log"
+)
+
+type WriteLogger struct{}
+
+func (writer WriteLogger) Write(p []byte) (n int, err error) {
+ logSplit := strings.Split(string(p), "\n")
+
+ for _, line := range logSplit {
+ if line == "" {
+ continue
+ }
+
+ log.Infof("Process: %s", line)
+ }
+
+ return len(p), err
+}
diff --git a/backend/sshappbackend/local-code/main.go b/backend/sshappbackend/local-code/main.go
new file mode 100644
index 0000000..34a85d0
--- /dev/null
+++ b/backend/sshappbackend/local-code/main.go
@@ -0,0 +1,868 @@
+package main
+
+import (
+ "bytes"
+ "crypto/md5"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "math/rand/v2"
+ "net"
+ "os"
+ "strings"
+ "sync"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/backendutil"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/datacommands"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/gaslighter"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/local-code/porttranslation"
+ "github.com/charmbracelet/log"
+ "github.com/go-playground/validator/v10"
+ "github.com/pkg/sftp"
+ "golang.org/x/crypto/ssh"
+)
+
+var validatorInstance *validator.Validate
+
+type TCPProxy struct {
+ proxyInformation *commonbackend.AddProxy
+ connections map[uint16]net.Conn
+}
+
+type UDPProxy struct {
+ proxyInformation *commonbackend.AddProxy
+ portTranslation *porttranslation.PortTranslation
+}
+
+type SSHAppBackendData struct {
+ IP string `json:"ip" validate:"required"`
+ Port uint16 `json:"port" validate:"required"`
+ Username string `json:"username" validate:"required"`
+ PrivateKey string `json:"privateKey" validate:"required"`
+ ListenOnIPs []string `json:"listenOnIPs"`
+}
+
+type SSHAppBackend struct {
+ config *SSHAppBackendData
+ conn *ssh.Client
+ listener net.Listener
+ currentSock net.Conn
+
+ tcpProxies map[uint16]*TCPProxy
+ udpProxies map[uint16]*UDPProxy
+
+ // globalNonCriticalMessageLock: Locks all messages that don't need low-latency transmissions & high
+ // speed behind a lock. This ensures safety when it comes to handling messages correctly.
+ globalNonCriticalMessageLock sync.Mutex
+ // globalNonCriticalMessageChan: Channel for handling messages that need a reply / aren't critical.
+ globalNonCriticalMessageChan chan interface{}
+}
+
+func (backend *SSHAppBackend) StartBackend(configBytes []byte) (bool, error) {
+ log.Info("SSHAppBackend is initializing...")
+
+ if validatorInstance == nil {
+ validatorInstance = validator.New()
+ }
+
+ backend.globalNonCriticalMessageChan = make(chan interface{})
+ backend.tcpProxies = map[uint16]*TCPProxy{}
+ backend.udpProxies = map[uint16]*UDPProxy{}
+
+ var backendData SSHAppBackendData
+
+ if err := json.Unmarshal(configBytes, &backendData); err != nil {
+ return false, err
+ }
+
+ if err := validatorInstance.Struct(&backendData); err != nil {
+ return false, err
+ }
+
+ backend.config = &backendData
+
+ if len(backend.config.ListenOnIPs) == 0 {
+ backend.config.ListenOnIPs = []string{"0.0.0.0"}
+ }
+
+ signer, err := ssh.ParsePrivateKey([]byte(backendData.PrivateKey))
+
+ if err != nil {
+ log.Warnf("Failed to initialize: %s", err.Error())
+ return false, err
+ }
+
+ auth := ssh.PublicKeys(signer)
+
+ config := &ssh.ClientConfig{
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ User: backendData.Username,
+ Auth: []ssh.AuthMethod{
+ auth,
+ },
+ }
+
+ conn, err := ssh.Dial("tcp", fmt.Sprintf("%s:%d", backendData.IP, backendData.Port), config)
+
+ if err != nil {
+ log.Warnf("Failed to initialize: %s", err.Error())
+ return false, err
+ }
+
+ backend.conn = conn
+
+ log.Debug("SSHAppBackend has connected successfully.")
+ log.Debug("Getting CPU architecture...")
+
+ session, err := backend.conn.NewSession()
+
+ if err != nil {
+ log.Warnf("Failed to create session: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ var stdoutBuf bytes.Buffer
+ session.Stdout = &stdoutBuf
+
+ err = session.Run("uname -m")
+
+ if err != nil {
+ log.Warnf("Failed to run uname command: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ cpuArchBytes := make([]byte, stdoutBuf.Len())
+ stdoutBuf.Read(cpuArchBytes)
+
+ cpuArch := string(cpuArchBytes)
+ cpuArch = cpuArch[:len(cpuArch)-1]
+
+ var backendBinary string
+
+ // Ordered in (subjective) popularity
+ if cpuArch == "x86_64" {
+ backendBinary = "remote-bin/rt-amd64"
+ } else if cpuArch == "aarch64" {
+ backendBinary = "remote-bin/rt-arm64"
+ } else if cpuArch == "arm" {
+ backendBinary = "remote-bin/rt-arm"
+ } else if len(cpuArch) == 4 && string(cpuArch[0]) == "i" && strings.HasSuffix(cpuArch, "86") {
+ backendBinary = "remote-bin/rt-386"
+ } else {
+ log.Warn("Failed to determine executable to use: CPU architecture not compiled/supported currently")
+ conn.Close()
+ backend.conn = nil
+ return false, fmt.Errorf("CPU architecture not compiled/supported currently")
+ }
+
+ log.Debug("Checking if we need to copy the application...")
+
+ var binary []byte
+ needsToCopyBinary := true
+
+ session, err = backend.conn.NewSession()
+
+ if err != nil {
+ log.Warnf("Failed to create session: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ session.Stdout = &stdoutBuf
+
+ err = session.Start("[ -f /tmp/sshappbackend.runtime ] && md5sum /tmp/sshappbackend.runtime | cut -d \" \" -f 1")
+
+ if err != nil {
+ log.Warnf("Failed to calculate hash of possibly existing backend: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ fileExists := stdoutBuf.Len() != 0
+
+ if fileExists {
+ remoteMD5HashStringBuf := make([]byte, stdoutBuf.Len())
+ stdoutBuf.Read(remoteMD5HashStringBuf)
+
+ remoteMD5HashString := string(remoteMD5HashStringBuf)
+ remoteMD5HashString = remoteMD5HashString[:len(remoteMD5HashString)-1]
+
+ remoteMD5Hash, err := hex.DecodeString(remoteMD5HashString)
+
+ if err != nil {
+ log.Warnf("Failed to decode hex: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ binary, err = binFiles.ReadFile(backendBinary)
+
+ if err != nil {
+ log.Warnf("Failed to read file in the embedded FS: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, fmt.Errorf("(embedded FS): %s", err.Error())
+ }
+
+ localMD5Hash := md5.Sum(binary)
+
+ log.Infof("remote: %s, local: %s", remoteMD5HashString, hex.EncodeToString(localMD5Hash[:]))
+
+ if bytes.Compare(localMD5Hash[:], remoteMD5Hash) == 0 {
+ needsToCopyBinary = false
+ }
+ }
+
+ if needsToCopyBinary {
+ log.Debug("Copying binary...")
+
+ sftpInstance, err := sftp.NewClient(conn)
+
+ if err != nil {
+ log.Warnf("Failed to initialize SFTP: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ defer sftpInstance.Close()
+
+ if len(binary) == 0 {
+ binary, err = binFiles.ReadFile(backendBinary)
+
+ if err != nil {
+ log.Warnf("Failed to read file in the embedded FS: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, fmt.Errorf("(embedded FS): %s", err.Error())
+ }
+ }
+
+ var file *sftp.File
+
+ if fileExists {
+ file, err = sftpInstance.OpenFile("/tmp/sshappbackend.runtime", os.O_WRONLY)
+ } else {
+ file, err = sftpInstance.Create("/tmp/sshappbackend.runtime")
+ }
+
+ if err != nil {
+ log.Warnf("Failed to create (or open) file: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ _, err = file.Write(binary)
+
+ if err != nil {
+ log.Warnf("Failed to write file: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ err = file.Chmod(0755)
+
+ if err != nil {
+ log.Warnf("Failed to change permissions on file: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ log.Debug("Done copying file.")
+ sftpInstance.Close()
+ } else {
+ log.Debug("Skipping copying as there's a copy on disk already.")
+ }
+
+ log.Debug("Initializing Unix socket...")
+
+ socketPath := fmt.Sprintf("/tmp/sock-%d.sock", rand.Uint())
+ listener, err := conn.ListenUnix(socketPath)
+
+ if err != nil {
+ log.Warnf("Failed to listen on socket: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ log.Debug("Starting process...")
+
+ session, err = backend.conn.NewSession()
+
+ if err != nil {
+ log.Warnf("Failed to create session: %s", err.Error())
+ conn.Close()
+ backend.conn = nil
+ return false, err
+ }
+
+ backend.listener = listener
+
+ session.Stdout = WriteLogger{}
+ session.Stderr = WriteLogger{}
+
+ go func() {
+ for {
+ err := session.Run(fmt.Sprintf("HERMES_LOG_LEVEL=\"%s\" HERMES_API_SOCK=\"%s\" /tmp/sshappbackend.runtime", os.Getenv("HERMES_LOG_LEVEL"), socketPath))
+
+ if err != nil && !errors.Is(err, &ssh.ExitError{}) && !errors.Is(err, &ssh.ExitMissingError{}) {
+ log.Errorf("Critically failed during execution of remote code: %s", err.Error())
+ return
+ } else {
+ log.Warn("Remote code failed for an unknown reason. Restarting...")
+ }
+ }
+ }()
+
+ go backend.sockServerHandler()
+
+ log.Debug("Started process. Waiting for Unix socket connection...")
+
+ for backend.currentSock == nil {
+ time.Sleep(10 * time.Millisecond)
+ }
+
+ log.Debug("Detected connection. Sending initialization command...")
+
+ proxyStatusRaw, err := backend.SendNonCriticalMessage(&commonbackend.Start{
+ Arguments: []byte{},
+ })
+
+ if err != nil {
+ return false, err
+ }
+
+ proxyStatus, ok := proxyStatusRaw.(*commonbackend.BackendStatusResponse)
+
+ if !ok {
+ return false, fmt.Errorf("recieved invalid response type: %T", proxyStatusRaw)
+ }
+
+ if proxyStatus.StatusCode == commonbackend.StatusFailure {
+ if proxyStatus.Message == "" {
+ return false, fmt.Errorf("failed to initialize backend in remote code")
+ } else {
+ return false, fmt.Errorf("failed to initialize backend in remote code: %s", proxyStatus.Message)
+ }
+ }
+
+ log.Info("SSHAppBackend has initialized successfully.")
+
+ return true, nil
+}
+
+func (backend *SSHAppBackend) StopBackend() (bool, error) {
+ err := backend.conn.Close()
+
+ if err != nil {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func (backend *SSHAppBackend) GetBackendStatus() (bool, error) {
+ return backend.conn != nil, nil
+}
+
+func (backend *SSHAppBackend) StartProxy(command *commonbackend.AddProxy) (bool, error) {
+ proxyStatusRaw, err := backend.SendNonCriticalMessage(command)
+
+ if err != nil {
+ return false, err
+ }
+
+ proxyStatus, ok := proxyStatusRaw.(*datacommands.ProxyStatusResponse)
+
+ if !ok {
+ return false, fmt.Errorf("recieved invalid response type: %T", proxyStatusRaw)
+ }
+
+ if !proxyStatus.IsActive {
+ return false, fmt.Errorf("failed to initialize proxy in remote code")
+ }
+
+ if command.Protocol == "tcp" {
+ backend.tcpProxies[proxyStatus.ProxyID] = &TCPProxy{
+ proxyInformation: command,
+ }
+
+ backend.tcpProxies[proxyStatus.ProxyID].connections = map[uint16]net.Conn{}
+ } else if command.Protocol == "udp" {
+ backend.udpProxies[proxyStatus.ProxyID] = &UDPProxy{
+ proxyInformation: command,
+ portTranslation: &porttranslation.PortTranslation{},
+ }
+
+ backend.udpProxies[proxyStatus.ProxyID].portTranslation.UDPAddr = &net.UDPAddr{
+ IP: net.ParseIP(command.SourceIP),
+ Port: int(command.SourcePort),
+ }
+
+ udpMessageCommand := &datacommands.UDPProxyData{}
+ udpMessageCommand.ProxyID = proxyStatus.ProxyID
+
+ backend.udpProxies[proxyStatus.ProxyID].portTranslation.WriteFrom = func(ip string, port uint16, data []byte) {
+ udpMessageCommand.ClientIP = ip
+ udpMessageCommand.ClientPort = port
+ udpMessageCommand.DataLength = uint16(len(data))
+
+ marshalledCommand, err := datacommands.Marshal(udpMessageCommand)
+
+ if err != nil {
+ log.Warnf("Failed to marshal UDP message header")
+ return
+ }
+
+ if _, err := backend.currentSock.Write(marshalledCommand); err != nil {
+ log.Warnf("Failed to write UDP message header")
+ return
+ }
+
+ if _, err := backend.currentSock.Write(data); err != nil {
+ log.Warnf("Failed to write UDP message")
+ return
+ }
+ }
+
+ go func() {
+ for {
+ time.Sleep(3 * time.Minute)
+
+ // Checks if the proxy still exists before continuing
+ _, ok := backend.udpProxies[proxyStatus.ProxyID]
+
+ if !ok {
+ return
+ }
+
+ // Then attempt to run cleanup tasks
+ log.Debug("Running UDP proxy cleanup tasks (invoking CleanupPorts() on portTranslation)")
+ backend.udpProxies[proxyStatus.ProxyID].portTranslation.CleanupPorts()
+ }
+ }()
+ }
+
+ return true, nil
+}
+
+func (backend *SSHAppBackend) StopProxy(command *commonbackend.RemoveProxy) (bool, error) {
+ if command.Protocol == "tcp" {
+ for proxyIndex, proxy := range backend.tcpProxies {
+ if proxy.proxyInformation.DestPort != command.DestPort {
+ continue
+ }
+
+ onDisconnect := &datacommands.TCPConnectionClosed{
+ ProxyID: proxyIndex,
+ }
+
+ for connectionIndex, connection := range proxy.connections {
+ connection.Close()
+ delete(proxy.connections, connectionIndex)
+
+ onDisconnect.ConnectionID = connectionIndex
+ disconnectionCommandMarshalled, err := datacommands.Marshal(onDisconnect)
+
+ if err != nil {
+ log.Errorf("failed to marshal disconnection message: %s", err.Error())
+ }
+
+ backend.currentSock.Write(disconnectionCommandMarshalled)
+ }
+
+ proxyStatusRaw, err := backend.SendNonCriticalMessage(&datacommands.RemoveProxy{
+ ProxyID: proxyIndex,
+ })
+
+ if err != nil {
+ return false, err
+ }
+
+ proxyStatus, ok := proxyStatusRaw.(*datacommands.ProxyStatusResponse)
+
+ if !ok {
+ log.Warn("Failed to stop proxy: typecast failed")
+ return true, fmt.Errorf("failed to stop proxy: typecast failed")
+ }
+
+ if proxyStatus.IsActive {
+ log.Warn("Failed to stop proxy: still running")
+ return true, fmt.Errorf("failed to stop proxy: still running")
+ }
+ }
+ } else if command.Protocol == "udp" {
+ for proxyIndex, proxy := range backend.udpProxies {
+ if proxy.proxyInformation.DestPort != command.DestPort {
+ continue
+ }
+
+ proxyStatusRaw, err := backend.SendNonCriticalMessage(&datacommands.RemoveProxy{
+ ProxyID: proxyIndex,
+ })
+
+ if err != nil {
+ return false, err
+ }
+
+ proxyStatus, ok := proxyStatusRaw.(*datacommands.ProxyStatusResponse)
+
+ if !ok {
+ log.Warn("Failed to stop proxy: typecast failed")
+ return true, fmt.Errorf("failed to stop proxy: typecast failed")
+ }
+
+ if proxyStatus.IsActive {
+ log.Warn("Failed to stop proxy: still running")
+ return true, fmt.Errorf("failed to stop proxy: still running")
+ }
+
+ proxy.portTranslation.StopAllPorts()
+ delete(backend.udpProxies, proxyIndex)
+ }
+ }
+
+ return false, fmt.Errorf("could not find the proxy")
+}
+
+func (backend *SSHAppBackend) GetAllClientConnections() []*commonbackend.ProxyClientConnection {
+ connections := []*commonbackend.ProxyClientConnection{}
+ informationRequest := &datacommands.ProxyConnectionInformationRequest{}
+
+ for proxyID, tcpProxy := range backend.tcpProxies {
+ informationRequest.ProxyID = proxyID
+
+ for connectionID := range tcpProxy.connections {
+ informationRequest.ConnectionID = connectionID
+
+ proxyStatusRaw, err := backend.SendNonCriticalMessage(informationRequest)
+
+ if err != nil {
+ log.Warnf("Failed to get connection information for Proxy ID: %d, Connection ID: %d: %s", proxyID, connectionID, err.Error())
+ return connections
+ }
+
+ connectionStatus, ok := proxyStatusRaw.(*datacommands.ProxyConnectionInformationResponse)
+
+ if !ok {
+ log.Warn("Failed to get connection response: typecast failed")
+ return connections
+ }
+
+ if !connectionStatus.Exists {
+ log.Warnf("Connection with proxy ID: %d, Connection ID: %d is reported to not exist!", proxyID, connectionID)
+ tcpProxy.connections[connectionID].Close()
+ }
+
+ connections = append(connections, &commonbackend.ProxyClientConnection{
+ SourceIP: tcpProxy.proxyInformation.SourceIP,
+ SourcePort: tcpProxy.proxyInformation.SourcePort,
+ DestPort: tcpProxy.proxyInformation.DestPort,
+ ClientIP: connectionStatus.ClientIP,
+ ClientPort: connectionStatus.ClientPort,
+ })
+ }
+ }
+
+ return connections
+}
+
+// We don't have any parameter limitations, so we should be good.
+func (backend *SSHAppBackend) CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHAppBackend) CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse {
+ var backendData SSHAppBackendData
+
+ if validatorInstance == nil {
+ validatorInstance = validator.New()
+ }
+
+ if err := json.Unmarshal(arguments, &backendData); err != nil {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: false,
+ Message: fmt.Sprintf("could not read json: %s", err.Error()),
+ }
+ }
+
+ if err := validatorInstance.Struct(&backendData); err != nil {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: false,
+ Message: fmt.Sprintf("failed validation of parameters: %s", err.Error()),
+ }
+ }
+
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHAppBackend) OnTCPConnectionOpened(proxyID, connectionID uint16) {
+ conn, err := net.Dial("tcp", fmt.Sprintf("%s:%d", backend.tcpProxies[proxyID].proxyInformation.SourceIP, backend.tcpProxies[proxyID].proxyInformation.SourcePort))
+
+ if err != nil {
+ log.Warnf("failed to dial sock: %s", err.Error())
+ }
+
+ go func() {
+ dataBuf := make([]byte, 65535)
+
+ tcpData := &datacommands.TCPProxyData{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }
+
+ for {
+ len, err := conn.Read(dataBuf)
+
+ if err != nil {
+ if errors.Is(err, net.ErrClosed) {
+ return
+ } else if err.Error() != "EOF" {
+ log.Warnf("failed to read from sock: %s", err.Error())
+ }
+
+ conn.Close()
+ break
+ }
+
+ tcpData.DataLength = uint16(len)
+ marshalledMessageCommand, err := datacommands.Marshal(tcpData)
+
+ if err != nil {
+ log.Warnf("failed to marshal message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+
+ if _, err := backend.currentSock.Write(marshalledMessageCommand); err != nil {
+ log.Warnf("failed to send marshalled message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+
+ if _, err := backend.currentSock.Write(dataBuf[:len]); err != nil {
+ log.Warnf("failed to send raw message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+ }
+
+ onDisconnect := &datacommands.TCPConnectionClosed{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }
+
+ disconnectionCommandMarshalled, err := datacommands.Marshal(onDisconnect)
+
+ if err != nil {
+ log.Errorf("failed to marshal disconnection message: %s", err.Error())
+ }
+
+ backend.currentSock.Write(disconnectionCommandMarshalled)
+ }()
+
+ backend.tcpProxies[proxyID].connections[connectionID] = conn
+}
+
+func (backend *SSHAppBackend) OnTCPConnectionClosed(proxyID, connectionID uint16) {
+ proxy, ok := backend.tcpProxies[proxyID]
+
+ if !ok {
+ log.Warn("Could not find TCP proxy")
+ }
+
+ connection, ok := proxy.connections[connectionID]
+
+ if !ok {
+ log.Warn("Could not find connection in TCP proxy")
+ }
+
+ connection.Close()
+ delete(proxy.connections, connectionID)
+}
+
+func (backend *SSHAppBackend) HandleTCPMessage(message *datacommands.TCPProxyData, data []byte) {
+ proxy, ok := backend.tcpProxies[message.ProxyID]
+
+ if !ok {
+ log.Warn("Could not find TCP proxy")
+ }
+
+ connection, ok := proxy.connections[message.ConnectionID]
+
+ if !ok {
+ log.Warn("Could not find connection in TCP proxy")
+ }
+
+ connection.Write(data)
+}
+
+func (backend *SSHAppBackend) HandleUDPMessage(message *datacommands.UDPProxyData, data []byte) {
+ proxy, ok := backend.udpProxies[message.ProxyID]
+
+ if !ok {
+ log.Warn("Could not find UDP proxy")
+ }
+
+ if _, err := proxy.portTranslation.WriteTo(message.ClientIP, message.ClientPort, data); err != nil {
+ log.Warnf("Failed to write to UDP: %s", err.Error())
+ }
+}
+
+func (backend *SSHAppBackend) SendNonCriticalMessage(iface interface{}) (interface{}, error) {
+ if backend.currentSock == nil {
+ return nil, fmt.Errorf("socket connection not initialized yet")
+ }
+
+ bytes, err := datacommands.Marshal(iface)
+
+ if err != nil && err.Error() == "unsupported command type" {
+ bytes, err = commonbackend.Marshal(iface)
+
+ if err != nil {
+ return nil, err
+ }
+ } else if err != nil {
+ return nil, err
+ }
+
+ backend.globalNonCriticalMessageLock.Lock()
+
+ if _, err := backend.currentSock.Write(bytes); err != nil {
+ backend.globalNonCriticalMessageLock.Unlock()
+ return nil, fmt.Errorf("failed to write message: %s", err.Error())
+ }
+
+ reply, ok := <-backend.globalNonCriticalMessageChan
+
+ if !ok {
+ backend.globalNonCriticalMessageLock.Unlock()
+ return nil, fmt.Errorf("failed to get reply back: chan not OK")
+ }
+
+ backend.globalNonCriticalMessageLock.Unlock()
+ return reply, nil
+}
+
+func (backend *SSHAppBackend) sockServerHandler() {
+ for {
+ conn, err := backend.listener.Accept()
+
+ if err != nil {
+ log.Warnf("Failed to accept remote connection: %s", err.Error())
+ }
+
+ log.Debug("Successfully connected.")
+
+ backend.currentSock = conn
+
+ commandID := make([]byte, 1)
+
+ gaslighter := &gaslighter.Gaslighter{}
+ gaslighter.ProxiedReader = conn
+
+ dataBuffer := make([]byte, 65535)
+
+ var commandRaw interface{}
+
+ for {
+ if _, err := conn.Read(commandID); err != nil {
+ log.Warnf("Failed to read command ID: %s", err.Error())
+ return
+ }
+
+ gaslighter.Byte = commandID[0]
+ gaslighter.HasGaslit = false
+
+ if gaslighter.Byte > 100 {
+ commandRaw, err = datacommands.Unmarshal(gaslighter)
+ } else {
+ commandRaw, err = commonbackend.Unmarshal(gaslighter)
+ }
+
+ if err != nil {
+ log.Warnf("Failed to parse command: %s", err.Error())
+ }
+
+ switch command := commandRaw.(type) {
+ case *datacommands.TCPConnectionOpened:
+ backend.OnTCPConnectionOpened(command.ProxyID, command.ConnectionID)
+ case *datacommands.TCPConnectionClosed:
+ backend.OnTCPConnectionClosed(command.ProxyID, command.ConnectionID)
+ case *datacommands.TCPProxyData:
+ if _, err := io.ReadFull(conn, dataBuffer[:command.DataLength]); err != nil {
+ log.Warnf("Failed to read entire data buffer: %s", err.Error())
+ break
+ }
+
+ backend.HandleTCPMessage(command, dataBuffer[:command.DataLength])
+ case *datacommands.UDPProxyData:
+ if _, err := io.ReadFull(conn, dataBuffer[:command.DataLength]); err != nil {
+ log.Warnf("Failed to read entire data buffer: %s", err.Error())
+ break
+ }
+
+ backend.HandleUDPMessage(command, dataBuffer[:command.DataLength])
+ default:
+ select {
+ case backend.globalNonCriticalMessageChan <- command:
+ default:
+ }
+ }
+ }
+ }
+}
+
+func main() {
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ backend := &SSHAppBackend{}
+
+ application := backendutil.NewHelper(backend)
+ err := application.Start()
+
+ if err != nil {
+ log.Fatalf("failed execution in application: %s", err.Error())
+ }
+}
diff --git a/backend/sshappbackend/local-code/porttranslation/translation.go b/backend/sshappbackend/local-code/porttranslation/translation.go
new file mode 100644
index 0000000..b8c0454
--- /dev/null
+++ b/backend/sshappbackend/local-code/porttranslation/translation.go
@@ -0,0 +1,112 @@
+package porttranslation
+
+import (
+ "fmt"
+ "net"
+ "sync"
+ "time"
+)
+
+type connectionData struct {
+ udpConn *net.UDPConn
+ buf []byte
+ hasBeenAliveFor time.Time
+}
+
+type PortTranslation struct {
+ UDPAddr *net.UDPAddr
+ WriteFrom func(ip string, port uint16, data []byte)
+
+ newConnectionLock sync.Mutex
+ connections map[string]map[uint16]*connectionData
+}
+
+func (translation *PortTranslation) CleanupPorts() {
+ if translation.connections == nil {
+ translation.connections = map[string]map[uint16]*connectionData{}
+ return
+ }
+
+ for connectionIPIndex, connectionPorts := range translation.connections {
+ anyAreAlive := false
+
+ for connectionPortIndex, connectionData := range connectionPorts {
+ if time.Now().Before(connectionData.hasBeenAliveFor.Add(3 * time.Minute)) {
+ anyAreAlive = true
+ continue
+ }
+
+ connectionData.udpConn.Close()
+ delete(connectionPorts, connectionPortIndex)
+ }
+
+ if !anyAreAlive {
+ delete(translation.connections, connectionIPIndex)
+ }
+ }
+}
+
+func (translation *PortTranslation) StopAllPorts() {
+ if translation.connections == nil {
+ return
+ }
+
+ for connectionIPIndex, connectionPorts := range translation.connections {
+ for connectionPortIndex, connectionData := range connectionPorts {
+ connectionData.udpConn.Close()
+ delete(connectionPorts, connectionPortIndex)
+ }
+
+ delete(translation.connections, connectionIPIndex)
+ }
+
+ translation.connections = nil
+}
+
+func (translation *PortTranslation) WriteTo(ip string, port uint16, data []byte) (int, error) {
+ if translation.connections == nil {
+ translation.connections = map[string]map[uint16]*connectionData{}
+ }
+
+ connectionPortData, ok := translation.connections[ip]
+
+ if !ok {
+ translation.connections[ip] = map[uint16]*connectionData{}
+ connectionPortData = translation.connections[ip]
+ }
+
+ connectionStruct, ok := connectionPortData[port]
+
+ if !ok {
+ connectionPortData[port] = &connectionData{}
+ connectionStruct = connectionPortData[port]
+
+ udpConn, err := net.DialUDP("udp", nil, translation.UDPAddr)
+
+ if err != nil {
+ return 0, fmt.Errorf("failed to initialize UDP socket: %s", err.Error())
+ }
+
+ connectionStruct.udpConn = udpConn
+ connectionStruct.buf = make([]byte, 65535)
+
+ go func() {
+ for {
+ n, err := udpConn.Read(connectionStruct.buf)
+
+ if err != nil {
+ udpConn.Close()
+ delete(connectionPortData, port)
+
+ return
+ }
+
+ connectionStruct.hasBeenAliveFor = time.Now()
+ translation.WriteFrom(ip, port, connectionStruct.buf[:n])
+ }
+ }()
+ }
+
+ connectionStruct.hasBeenAliveFor = time.Now()
+ return connectionStruct.udpConn.Write(data)
+}
diff --git a/backend/sshappbackend/remote-code/backendutil_custom/application.go b/backend/sshappbackend/remote-code/backendutil_custom/application.go
new file mode 100644
index 0000000..2747f28
--- /dev/null
+++ b/backend/sshappbackend/remote-code/backendutil_custom/application.go
@@ -0,0 +1,306 @@
+package backendutil_custom
+
+import (
+ "io"
+ "net"
+ "os"
+
+ "git.terah.dev/imterah/hermes/backend/backendutil"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/datacommands"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/gaslighter"
+ "github.com/charmbracelet/log"
+)
+
+type BackendApplicationHelper struct {
+ Backend BackendInterface
+ SocketPath string
+
+ socket net.Conn
+}
+
+func (helper *BackendApplicationHelper) Start() error {
+ log.Debug("BackendApplicationHelper is starting")
+ err := backendutil.ConfigureProfiling()
+
+ if err != nil {
+ return err
+ }
+
+ log.Debug("Currently waiting for Unix socket connection...")
+
+ helper.socket, err = net.Dial("unix", helper.SocketPath)
+
+ if err != nil {
+ return err
+ }
+
+ helper.Backend.OnSocketConnection(helper.socket)
+
+ log.Debug("Sucessfully connected")
+
+ gaslighter := &gaslighter.Gaslighter{}
+ gaslighter.ProxiedReader = helper.socket
+
+ commandID := make([]byte, 1)
+
+ for {
+ if _, err := helper.socket.Read(commandID); err != nil {
+ return err
+ }
+
+ gaslighter.Byte = commandID[0]
+ gaslighter.HasGaslit = false
+
+ var commandRaw interface{}
+
+ if gaslighter.Byte > 100 {
+ commandRaw, err = datacommands.Unmarshal(gaslighter)
+ } else {
+ commandRaw, err = commonbackend.Unmarshal(gaslighter)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ switch command := commandRaw.(type) {
+ case *datacommands.ProxyConnectionsRequest:
+ connections := helper.Backend.GetAllClientConnections(command.ProxyID)
+
+ serverParams := &datacommands.ProxyConnectionsResponse{
+ Connections: connections,
+ }
+
+ byteData, err := datacommands.Marshal(serverParams)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ case *datacommands.RemoveProxy:
+ ok, err := helper.Backend.StopProxy(command)
+ var hasAnyFailed bool
+
+ if !ok {
+ log.Warnf("failed to remove proxy (ID %d): RemoveProxy returned into failure state", command.ProxyID)
+ hasAnyFailed = true
+ } else if err != nil {
+ log.Warnf("failed to remove proxy (ID %d): %s", command.ProxyID, err.Error())
+ hasAnyFailed = true
+ }
+
+ response := &datacommands.ProxyStatusResponse{
+ ProxyID: command.ProxyID,
+ IsActive: hasAnyFailed,
+ }
+
+ responseMarshalled, err := datacommands.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *datacommands.ProxyInformationRequest:
+ response := helper.Backend.ResolveProxy(command.ProxyID)
+ responseMarshalled, err := datacommands.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *datacommands.ProxyConnectionInformationRequest:
+ response := helper.Backend.ResolveConnection(command.ProxyID, command.ConnectionID)
+ responseMarshalled, err := datacommands.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *datacommands.TCPConnectionClosed:
+ helper.Backend.OnTCPConnectionClosed(command.ProxyID, command.ConnectionID)
+ case *datacommands.TCPProxyData:
+ bytes := make([]byte, command.DataLength)
+ _, err := io.ReadFull(helper.socket, bytes)
+
+ if err != nil {
+ log.Warn("failed to read TCP data")
+ }
+
+ helper.Backend.HandleTCPMessage(command, bytes)
+ case *datacommands.UDPProxyData:
+ bytes := make([]byte, command.DataLength)
+ _, err := io.ReadFull(helper.socket, bytes)
+
+ if err != nil {
+ log.Warn("failed to read TCP data")
+ }
+
+ helper.Backend.HandleUDPMessage(command, bytes)
+ case *commonbackend.Start:
+ ok, err := helper.Backend.StartBackend(command.Arguments)
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.Stop:
+ ok, err := helper.Backend.StopBackend()
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: !ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.BackendStatusRequest:
+ ok, err := helper.Backend.GetBackendStatus()
+
+ var (
+ message string
+ statusCode int
+ )
+
+ if err != nil {
+ message = err.Error()
+ statusCode = commonbackend.StatusFailure
+ } else {
+ statusCode = commonbackend.StatusSuccess
+ }
+
+ response := &commonbackend.BackendStatusResponse{
+ IsRunning: ok,
+ StatusCode: statusCode,
+ Message: message,
+ }
+
+ responseMarshalled, err := commonbackend.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.AddProxy:
+ id, ok, err := helper.Backend.StartProxy(command)
+ var hasAnyFailed bool
+
+ if !ok {
+ log.Warnf("failed to add proxy (%s:%d -> remote:%d): StartProxy returned into failure state", command.SourceIP, command.SourcePort, command.DestPort)
+ hasAnyFailed = true
+ } else if err != nil {
+ log.Warnf("failed to add proxy (%s:%d -> remote:%d): %s", command.SourceIP, command.SourcePort, command.DestPort, err.Error())
+ hasAnyFailed = true
+ }
+
+ response := &datacommands.ProxyStatusResponse{
+ ProxyID: id,
+ IsActive: !hasAnyFailed,
+ }
+
+ responseMarshalled, err := datacommands.Marshal(response)
+
+ if err != nil {
+ log.Error("failed to marshal response: %s", err.Error())
+ continue
+ }
+
+ helper.socket.Write(responseMarshalled)
+ case *commonbackend.CheckClientParameters:
+ resp := helper.Backend.CheckParametersForConnections(command)
+ resp.InResponseTo = "checkClientParameters"
+
+ byteData, err := commonbackend.Marshal(resp)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ case *commonbackend.CheckServerParameters:
+ resp := helper.Backend.CheckParametersForBackend(command.Arguments)
+ resp.InResponseTo = "checkServerParameters"
+
+ byteData, err := commonbackend.Marshal(resp)
+
+ if err != nil {
+ return err
+ }
+
+ if _, err = helper.socket.Write(byteData); err != nil {
+ return err
+ }
+ default:
+ log.Warnf("Unsupported command recieved: %T", command)
+ }
+ }
+}
+
+func NewHelper(backend BackendInterface) *BackendApplicationHelper {
+ socketPath, ok := os.LookupEnv("HERMES_API_SOCK")
+
+ if !ok {
+ log.Warn("HERMES_API_SOCK is not defined! This will cause an issue unless the backend manually overwrites it")
+ }
+
+ helper := &BackendApplicationHelper{
+ Backend: backend,
+ SocketPath: socketPath,
+ }
+
+ return helper
+}
diff --git a/backend/sshappbackend/remote-code/backendutil_custom/structure.go b/backend/sshappbackend/remote-code/backendutil_custom/structure.go
new file mode 100644
index 0000000..65c5a23
--- /dev/null
+++ b/backend/sshappbackend/remote-code/backendutil_custom/structure.go
@@ -0,0 +1,26 @@
+package backendutil_custom
+
+import (
+ "net"
+
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/datacommands"
+)
+
+type BackendInterface interface {
+ StartBackend(arguments []byte) (bool, error)
+ StopBackend() (bool, error)
+ GetBackendStatus() (bool, error)
+ StartProxy(command *commonbackend.AddProxy) (uint16, bool, error)
+ StopProxy(command *datacommands.RemoveProxy) (bool, error)
+ GetAllProxies() []uint16
+ ResolveProxy(proxyID uint16) *datacommands.ProxyInformationResponse
+ GetAllClientConnections(proxyID uint16) []uint16
+ ResolveConnection(proxyID, connectionID uint16) *datacommands.ProxyConnectionInformationResponse
+ CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse
+ CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse
+ OnTCPConnectionClosed(proxyID, connectionID uint16)
+ HandleTCPMessage(message *datacommands.TCPProxyData, data []byte)
+ HandleUDPMessage(message *datacommands.UDPProxyData, data []byte)
+ OnSocketConnection(sock net.Conn)
+}
diff --git a/backend/sshappbackend/remote-code/main.go b/backend/sshappbackend/remote-code/main.go
new file mode 100644
index 0000000..d56a7a3
--- /dev/null
+++ b/backend/sshappbackend/remote-code/main.go
@@ -0,0 +1,460 @@
+package main
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/datacommands"
+ "git.terah.dev/imterah/hermes/backend/sshappbackend/remote-code/backendutil_custom"
+ "github.com/charmbracelet/log"
+)
+
+type TCPProxy struct {
+ connectionIDIndex uint16
+ connectionIDLock sync.Mutex
+
+ proxyInformation *commonbackend.AddProxy
+ connections map[uint16]net.Conn
+ server net.Listener
+}
+
+type UDPProxy struct {
+ server *net.UDPConn
+ proxyInformation *commonbackend.AddProxy
+}
+
+type SSHRemoteAppBackend struct {
+ proxyIDIndex uint16
+ proxyIDLock sync.Mutex
+
+ tcpProxies map[uint16]*TCPProxy
+ udpProxies map[uint16]*UDPProxy
+
+ isRunning bool
+
+ sock net.Conn
+}
+
+func (backend *SSHRemoteAppBackend) StartBackend(byte []byte) (bool, error) {
+ backend.tcpProxies = map[uint16]*TCPProxy{}
+ backend.udpProxies = map[uint16]*UDPProxy{}
+
+ backend.isRunning = true
+
+ return true, nil
+}
+
+func (backend *SSHRemoteAppBackend) StopBackend() (bool, error) {
+ for tcpProxyIndex, tcpProxy := range backend.tcpProxies {
+ for _, tcpConnection := range tcpProxy.connections {
+ tcpConnection.Close()
+ }
+
+ tcpProxy.server.Close()
+ delete(backend.tcpProxies, tcpProxyIndex)
+ }
+
+ for udpProxyIndex, udpProxy := range backend.udpProxies {
+ udpProxy.server.Close()
+ delete(backend.udpProxies, udpProxyIndex)
+ }
+
+ backend.isRunning = false
+ return true, nil
+}
+
+func (backend *SSHRemoteAppBackend) GetBackendStatus() (bool, error) {
+ return backend.isRunning, nil
+}
+
+func (backend *SSHRemoteAppBackend) StartProxy(command *commonbackend.AddProxy) (uint16, bool, error) {
+ // Allocate a new proxy ID
+ backend.proxyIDLock.Lock()
+ proxyID := backend.proxyIDIndex
+ backend.proxyIDIndex++
+ backend.proxyIDLock.Unlock()
+
+ if command.Protocol == "tcp" {
+ backend.tcpProxies[proxyID] = &TCPProxy{
+ connections: map[uint16]net.Conn{},
+ proxyInformation: command,
+ }
+
+ server, err := net.Listen("tcp", fmt.Sprintf(":%d", command.DestPort))
+
+ if err != nil {
+ return 0, false, fmt.Errorf("failed to open server: %s", err.Error())
+ }
+
+ backend.tcpProxies[proxyID].server = server
+
+ go func() {
+ for {
+ conn, err := server.Accept()
+
+ if err != nil {
+ log.Warnf("failed to accept connection: %s", err.Error())
+ return
+ }
+
+ go func() {
+ backend.tcpProxies[proxyID].connectionIDLock.Lock()
+ connectionID := backend.tcpProxies[proxyID].connectionIDIndex
+ backend.tcpProxies[proxyID].connectionIDIndex++
+ backend.tcpProxies[proxyID].connectionIDLock.Unlock()
+
+ backend.tcpProxies[proxyID].connections[connectionID] = conn
+
+ dataBuf := make([]byte, 65535)
+
+ onConnection := &datacommands.TCPConnectionOpened{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }
+
+ connectionCommandMarshalled, err := datacommands.Marshal(onConnection)
+
+ if err != nil {
+ log.Errorf("failed to marshal connection message: %s", err.Error())
+ }
+
+ backend.sock.Write(connectionCommandMarshalled)
+
+ tcpData := &datacommands.TCPProxyData{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }
+
+ for {
+ len, err := conn.Read(dataBuf)
+
+ if err != nil {
+ if errors.Is(err, net.ErrClosed) {
+ return
+ } else if err.Error() != "EOF" {
+ log.Warnf("failed to read from sock: %s", err.Error())
+ }
+
+ conn.Close()
+ break
+ }
+
+ tcpData.DataLength = uint16(len)
+ marshalledMessageCommand, err := datacommands.Marshal(tcpData)
+
+ if err != nil {
+ log.Warnf("failed to marshal message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+
+ if _, err := backend.sock.Write(marshalledMessageCommand); err != nil {
+ log.Warnf("failed to send marshalled message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+
+ if _, err := backend.sock.Write(dataBuf[:len]); err != nil {
+ log.Warnf("failed to send raw message data: %s", err.Error())
+
+ conn.Close()
+ break
+ }
+ }
+
+ onDisconnect := &datacommands.TCPConnectionClosed{
+ ProxyID: proxyID,
+ ConnectionID: connectionID,
+ }
+
+ disconnectionCommandMarshalled, err := datacommands.Marshal(onDisconnect)
+
+ if err != nil {
+ log.Errorf("failed to marshal disconnection message: %s", err.Error())
+ }
+
+ backend.sock.Write(disconnectionCommandMarshalled)
+ }()
+ }
+ }()
+ } else if command.Protocol == "udp" {
+ backend.udpProxies[proxyID] = &UDPProxy{
+ proxyInformation: command,
+ }
+
+ server, err := net.ListenUDP("udp", &net.UDPAddr{
+ IP: net.IPv4(0, 0, 0, 0),
+ Port: int(command.DestPort),
+ })
+
+ if err != nil {
+ return 0, false, fmt.Errorf("failed to open server: %s", err.Error())
+ }
+
+ backend.udpProxies[proxyID].server = server
+ dataBuf := make([]byte, 65535)
+
+ udpProxyData := &datacommands.UDPProxyData{
+ ProxyID: proxyID,
+ }
+
+ go func() {
+ for {
+ len, addr, err := server.ReadFromUDP(dataBuf)
+
+ if err != nil {
+ log.Warnf("failed to read from UDP socket: %s", err.Error())
+ continue
+ }
+
+ udpProxyData.ClientIP = addr.IP.String()
+ udpProxyData.ClientPort = uint16(addr.Port)
+ udpProxyData.DataLength = uint16(len)
+
+ marshalledMessageCommand, err := datacommands.Marshal(udpProxyData)
+
+ if err != nil {
+ log.Warnf("failed to marshal message data: %s", err.Error())
+ continue
+ }
+
+ if _, err := backend.sock.Write(marshalledMessageCommand); err != nil {
+ log.Warnf("failed to send marshalled message data: %s", err.Error())
+ continue
+ }
+
+ if _, err := backend.sock.Write(dataBuf[:len]); err != nil {
+ log.Warnf("failed to send raw message data: %s", err.Error())
+ continue
+ }
+ }
+ }()
+ }
+
+ return proxyID, true, nil
+}
+
+func (backend *SSHRemoteAppBackend) StopProxy(command *datacommands.RemoveProxy) (bool, error) {
+ tcpProxy, ok := backend.tcpProxies[command.ProxyID]
+
+ if !ok {
+ udpProxy, ok := backend.udpProxies[command.ProxyID]
+
+ if !ok {
+ return ok, fmt.Errorf("could not find proxy")
+ }
+
+ udpProxy.server.Close()
+ delete(backend.udpProxies, command.ProxyID)
+ } else {
+ for _, tcpConnection := range tcpProxy.connections {
+ tcpConnection.Close()
+ }
+
+ tcpProxy.server.Close()
+ delete(backend.tcpProxies, command.ProxyID)
+ }
+
+ return true, nil
+}
+
+func (backend *SSHRemoteAppBackend) GetAllProxies() []uint16 {
+ proxyList := make([]uint16, len(backend.tcpProxies)+len(backend.udpProxies))
+
+ currentPos := 0
+
+ for tcpProxy := range backend.tcpProxies {
+ proxyList[currentPos] = tcpProxy
+ currentPos += 1
+ }
+
+ for udpProxy := range backend.udpProxies {
+ proxyList[currentPos] = udpProxy
+ currentPos += 1
+ }
+
+ return proxyList
+}
+
+func (backend *SSHRemoteAppBackend) ResolveProxy(proxyID uint16) *datacommands.ProxyInformationResponse {
+ var proxyInformation *commonbackend.AddProxy
+ response := &datacommands.ProxyInformationResponse{}
+
+ tcpProxy, ok := backend.tcpProxies[proxyID]
+
+ if !ok {
+ udpProxy, ok := backend.udpProxies[proxyID]
+
+ if !ok {
+ response.Exists = false
+ return response
+ }
+
+ proxyInformation = udpProxy.proxyInformation
+ } else {
+ proxyInformation = tcpProxy.proxyInformation
+ }
+
+ response.Exists = true
+ response.SourceIP = proxyInformation.SourceIP
+ response.SourcePort = proxyInformation.SourcePort
+ response.DestPort = proxyInformation.DestPort
+ response.Protocol = proxyInformation.Protocol
+
+ return response
+}
+
+func (backend *SSHRemoteAppBackend) GetAllClientConnections(proxyID uint16) []uint16 {
+ tcpProxy, ok := backend.tcpProxies[proxyID]
+
+ if !ok {
+ return []uint16{}
+ }
+
+ connectionsArray := make([]uint16, len(tcpProxy.connections))
+ currentPos := 0
+
+ for connectionIndex := range tcpProxy.connections {
+ connectionsArray[currentPos] = connectionIndex
+ currentPos++
+ }
+
+ return connectionsArray
+}
+
+func (backend *SSHRemoteAppBackend) ResolveConnection(proxyID, connectionID uint16) *datacommands.ProxyConnectionInformationResponse {
+ response := &datacommands.ProxyConnectionInformationResponse{}
+ tcpProxy, ok := backend.tcpProxies[proxyID]
+
+ if !ok {
+ response.Exists = false
+ return response
+ }
+
+ connection, ok := tcpProxy.connections[connectionID]
+
+ if !ok {
+ response.Exists = false
+ return response
+ }
+
+ addr := connection.RemoteAddr().String()
+ ip := addr[:strings.LastIndex(addr, ":")]
+ port, err := strconv.Atoi(addr[strings.LastIndex(addr, ":")+1:])
+
+ if err != nil {
+ log.Warnf("failed to parse client port: %s", err.Error())
+ response.Exists = false
+
+ return response
+ }
+
+ response.ClientIP = ip
+ response.ClientPort = uint16(port)
+
+ return response
+}
+
+func (backend *SSHRemoteAppBackend) CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHRemoteAppBackend) CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHRemoteAppBackend) HandleTCPMessage(message *datacommands.TCPProxyData, data []byte) {
+ tcpProxy, ok := backend.tcpProxies[message.ProxyID]
+
+ if !ok {
+ log.Warnf("could not find tcp proxy (ID %d)", message.ProxyID)
+ return
+ }
+
+ connection, ok := tcpProxy.connections[message.ConnectionID]
+
+ if !ok {
+ log.Warnf("could not find tcp proxy (ID %d) with connection ID (%d)", message.ProxyID, message.ConnectionID)
+ return
+ }
+
+ connection.Write(data)
+}
+
+func (backend *SSHRemoteAppBackend) HandleUDPMessage(message *datacommands.UDPProxyData, data []byte) {
+ udpProxy, ok := backend.udpProxies[message.ProxyID]
+
+ if !ok {
+ return
+ }
+
+ udpProxy.server.WriteToUDP(data, &net.UDPAddr{
+ IP: net.ParseIP(message.ClientIP),
+ Port: int(message.ClientPort),
+ })
+}
+
+func (backend *SSHRemoteAppBackend) OnTCPConnectionClosed(proxyID, connectionID uint16) {
+ tcpProxy, ok := backend.tcpProxies[proxyID]
+
+ if !ok {
+ return
+ }
+
+ connection, ok := tcpProxy.connections[connectionID]
+
+ if !ok {
+ return
+ }
+
+ connection.Close()
+ delete(tcpProxy.connections, connectionID)
+}
+
+func (backend *SSHRemoteAppBackend) OnSocketConnection(sock net.Conn) {
+ backend.sock = sock
+}
+
+func main() {
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ backend := &SSHRemoteAppBackend{}
+
+ application := backendutil_custom.NewHelper(backend)
+ err := application.Start()
+
+ if err != nil {
+ log.Fatalf("failed execution in application: %s", err.Error())
+ }
+}
diff --git a/backend/sshbackend/main.go b/backend/sshbackend/main.go
new file mode 100644
index 0000000..d46b330
--- /dev/null
+++ b/backend/sshbackend/main.go
@@ -0,0 +1,619 @@
+package main
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net"
+ "os"
+ "slices"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "git.terah.dev/imterah/hermes/backend/backendutil"
+ "git.terah.dev/imterah/hermes/backend/commonbackend"
+ "github.com/charmbracelet/log"
+ "github.com/go-playground/validator/v10"
+ "golang.org/x/crypto/ssh"
+)
+
+var validatorInstance *validator.Validate
+
+type ConnWithTimeout struct {
+ net.Conn
+ ReadTimeout time.Duration
+ WriteTimeout time.Duration
+}
+
+func (c *ConnWithTimeout) Read(b []byte) (int, error) {
+ err := c.Conn.SetReadDeadline(time.Now().Add(c.ReadTimeout))
+
+ if err != nil {
+ return 0, err
+ }
+
+ return c.Conn.Read(b)
+}
+
+func (c *ConnWithTimeout) Write(b []byte) (int, error) {
+ err := c.Conn.SetWriteDeadline(time.Now().Add(c.WriteTimeout))
+
+ if err != nil {
+ return 0, err
+ }
+
+ return c.Conn.Write(b)
+}
+
+type SSHListener struct {
+ SourceIP string
+ SourcePort uint16
+ DestPort uint16
+ Protocol string // Will be either 'tcp' or 'udp'
+ Listeners []net.Listener
+}
+
+type SSHBackendData struct {
+ IP string `json:"ip" validate:"required"`
+ Port uint16 `json:"port" validate:"required"`
+ Username string `json:"username" validate:"required"`
+ PrivateKey string `json:"privateKey" validate:"required"`
+ DisablePIDCheck bool `json:"disablePIDCheck"`
+ ListenOnIPs []string `json:"listenOnIPs"`
+}
+
+type SSHBackend struct {
+ config *SSHBackendData
+ conn *ssh.Client
+ clients []*commonbackend.ProxyClientConnection
+ proxies []*SSHListener
+ arrayPropMutex sync.Mutex
+ pid int
+ isReady bool
+ inReinitLoop bool
+}
+
+func (backend *SSHBackend) StartBackend(bytes []byte) (bool, error) {
+ log.Info("SSHBackend is initializing...")
+
+ if validatorInstance == nil {
+ validatorInstance = validator.New()
+ }
+
+ if backend.inReinitLoop {
+ for !backend.isReady {
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+
+ var backendData SSHBackendData
+
+ if err := json.Unmarshal(bytes, &backendData); err != nil {
+ return false, err
+ }
+
+ if err := validatorInstance.Struct(&backendData); err != nil {
+ return false, err
+ }
+
+ backend.config = &backendData
+
+ if len(backend.config.ListenOnIPs) == 0 {
+ backend.config.ListenOnIPs = []string{"0.0.0.0"}
+ }
+
+ signer, err := ssh.ParsePrivateKey([]byte(backendData.PrivateKey))
+
+ if err != nil {
+ return false, err
+ }
+
+ auth := ssh.PublicKeys(signer)
+
+ config := &ssh.ClientConfig{
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ User: backendData.Username,
+ Auth: []ssh.AuthMethod{
+ auth,
+ },
+ }
+
+ addr := fmt.Sprintf("%s:%d", backendData.IP, backendData.Port)
+ timeout := time.Duration(10 * time.Second)
+
+ rawTCPConn, err := net.DialTimeout("tcp", addr, timeout)
+
+ if err != nil {
+ return false, err
+ }
+
+ connWithTimeout := &ConnWithTimeout{
+ Conn: rawTCPConn,
+ ReadTimeout: timeout,
+ WriteTimeout: timeout,
+ }
+
+ c, chans, reqs, err := ssh.NewClientConn(connWithTimeout, addr, config)
+
+ if err != nil {
+ return false, err
+ }
+
+ client := ssh.NewClient(c, chans, reqs)
+ backend.conn = client
+
+ if !backendData.DisablePIDCheck {
+ if backend.pid != 0 {
+ session, err := client.NewSession()
+
+ if err != nil {
+ return false, err
+ }
+
+ err = session.Run(fmt.Sprintf("kill -9 %d", backend.pid))
+
+ if err != nil {
+ log.Warnf("Failed to kill process: %s", err.Error())
+ }
+ }
+
+ session, err := client.NewSession()
+
+ if err != nil {
+ return false, err
+ }
+
+ // Get the parent PID of the shell so we can kill it if we disconnect
+ output, err := session.Output("ps --no-headers -fp $$ | awk '{print $3}'")
+
+ if err != nil {
+ return false, err
+ }
+
+ // Strip the new line and convert to int
+ backend.pid, err = strconv.Atoi(string(output)[:len(output)-1])
+
+ if err != nil {
+ return false, err
+ }
+ }
+
+ go backend.backendDisconnectHandler()
+ go backend.backendKeepaliveHandler()
+
+ log.Info("SSHBackend has initialized successfully.")
+
+ return true, nil
+}
+
+func (backend *SSHBackend) StopBackend() (bool, error) {
+ err := backend.conn.Close()
+
+ if err != nil {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func (backend *SSHBackend) GetBackendStatus() (bool, error) {
+ return backend.conn != nil, nil
+}
+
+func (backend *SSHBackend) StartProxy(command *commonbackend.AddProxy) (bool, error) {
+ listenerObject := &SSHListener{
+ SourceIP: command.SourceIP,
+ SourcePort: command.SourcePort,
+ DestPort: command.DestPort,
+ Protocol: command.Protocol,
+ Listeners: []net.Listener{},
+ }
+
+ for _, ipListener := range backend.config.ListenOnIPs {
+ ip := net.TCPAddr{
+ IP: net.ParseIP(ipListener),
+ Port: int(command.DestPort),
+ }
+
+ listener, err := backend.conn.ListenTCP(&ip)
+
+ if err != nil {
+ // Incase we error out, we clean up all the other listeners
+ for _, listener := range listenerObject.Listeners {
+ err = listener.Close()
+
+ if err != nil {
+ log.Warnf("failed to close listener upon failure cleanup: %s", err.Error())
+ }
+ }
+
+ return false, err
+ }
+
+ listenerObject.Listeners = append(listenerObject.Listeners, listener)
+
+ go func() {
+ for {
+ forwardedConn, err := listener.Accept()
+
+ if err != nil {
+ log.Warnf("failed to accept listener connection: %s", err.Error())
+
+ if err.Error() == "EOF" {
+ return
+ }
+
+ continue
+ }
+
+ sourceConn, err := net.Dial("tcp", fmt.Sprintf("%s:%d", command.SourceIP, command.SourcePort))
+
+ if err != nil {
+ log.Warnf("failed to dial source connection: %s", err.Error())
+ continue
+ }
+
+ clientIPAndPort := forwardedConn.RemoteAddr().String()
+ clientIP := clientIPAndPort[:strings.LastIndex(clientIPAndPort, ":")]
+ clientPort, err := strconv.Atoi(clientIPAndPort[strings.LastIndex(clientIPAndPort, ":")+1:])
+
+ if err != nil {
+ log.Warnf("failed to parse client port: %s", err.Error())
+ continue
+ }
+
+ advertisedConn := &commonbackend.ProxyClientConnection{
+ SourceIP: command.SourceIP,
+ SourcePort: command.SourcePort,
+ DestPort: command.DestPort,
+ ClientIP: clientIP,
+ ClientPort: uint16(clientPort),
+
+ // FIXME (imterah): shouldn't protocol be in here?
+ // Protocol: command.Protocol,
+ }
+
+ backend.arrayPropMutex.Lock()
+ backend.clients = append(backend.clients, advertisedConn)
+ backend.arrayPropMutex.Unlock()
+
+ cleanupJob := func() {
+ defer backend.arrayPropMutex.Unlock()
+ err := sourceConn.Close()
+
+ if err != nil {
+ log.Warnf("failed to close source connection: %s", err.Error())
+ }
+
+ err = forwardedConn.Close()
+
+ if err != nil {
+ log.Warnf("failed to close forwarded/proxied connection: %s", err.Error())
+ }
+
+ backend.arrayPropMutex.Lock()
+
+ for clientIndex, clientInstance := range backend.clients {
+ // Check if memory addresses are equal for the pointer
+ if clientInstance == advertisedConn {
+ // Splice out the clientInstance by clientIndex
+
+ // TODO: change approach. It works but it's a bit wonky imho
+ backend.clients = slices.Delete(backend.clients, clientIndex, clientIndex+1)
+ return
+ }
+ }
+
+ log.Warn("failed to delete client from clients metadata: couldn't find client in the array")
+ }
+
+ sourceBuffer := make([]byte, 65535)
+ forwardedBuffer := make([]byte, 65535)
+
+ go func() {
+ defer cleanupJob()
+
+ for {
+ len, err := forwardedConn.Read(forwardedBuffer)
+
+ if err != nil {
+ if err.Error() != "EOF" && !errors.Is(err, net.ErrClosed) {
+ log.Errorf("failed to read from forwarded connection: %s", err.Error())
+ }
+
+ return
+ }
+
+ if _, err = sourceConn.Write(forwardedBuffer[:len]); err != nil {
+ if err.Error() != "EOF" && !errors.Is(err, net.ErrClosed) {
+ log.Errorf("failed to write to source connection: %s", err.Error())
+ }
+
+ return
+ }
+ }
+ }()
+
+ go func() {
+ defer cleanupJob()
+
+ for {
+ len, err := sourceConn.Read(sourceBuffer)
+
+ if err != nil {
+ if err.Error() != "EOF" && !errors.Is(err, net.ErrClosed) {
+ log.Errorf("failed to read from source connection: %s", err.Error())
+ }
+
+ return
+ }
+
+ if _, err = forwardedConn.Write(sourceBuffer[:len]); err != nil {
+ if err.Error() != "EOF" && !errors.Is(err, net.ErrClosed) {
+ log.Errorf("failed to write to forwarded connection: %s", err.Error())
+ }
+
+ return
+ }
+ }
+ }()
+ }
+ }()
+ }
+
+ backend.arrayPropMutex.Lock()
+ backend.proxies = append(backend.proxies, listenerObject)
+ backend.arrayPropMutex.Unlock()
+
+ return true, nil
+}
+
+func (backend *SSHBackend) StopProxy(command *commonbackend.RemoveProxy) (bool, error) {
+ defer backend.arrayPropMutex.Unlock()
+ backend.arrayPropMutex.Lock()
+
+ for proxyIndex, proxy := range backend.proxies {
+ if command.SourceIP == proxy.SourceIP && command.SourcePort == proxy.SourcePort && command.DestPort == proxy.DestPort && command.Protocol == proxy.Protocol {
+ for _, listener := range proxy.Listeners {
+ err := listener.Close()
+
+ if err != nil {
+ log.Warnf("failed to stop listener in StopProxy: %s", err.Error())
+ }
+ }
+
+ // Splice out the proxy instance by proxyIndex
+ // TODO: change approach. It works but it's a bit wonky imho
+ backend.proxies = slices.Delete(backend.proxies, proxyIndex, proxyIndex+1)
+ return true, nil
+ }
+ }
+
+ return false, fmt.Errorf("could not find the proxy")
+}
+
+func (backend *SSHBackend) GetAllClientConnections() []*commonbackend.ProxyClientConnection {
+ defer backend.arrayPropMutex.Unlock()
+ backend.arrayPropMutex.Lock()
+
+ return backend.clients
+}
+
+func (backend *SSHBackend) CheckParametersForConnections(clientParameters *commonbackend.CheckClientParameters) *commonbackend.CheckParametersResponse {
+ if clientParameters.Protocol != "tcp" {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: false,
+ Message: "Only TCP is supported for SSH",
+ }
+ }
+
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHBackend) CheckParametersForBackend(arguments []byte) *commonbackend.CheckParametersResponse {
+ var backendData SSHBackendData
+
+ if validatorInstance == nil {
+ validatorInstance = validator.New()
+ }
+
+ if err := json.Unmarshal(arguments, &backendData); err != nil {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: false,
+ Message: fmt.Sprintf("could not read json: %s", err.Error()),
+ }
+ }
+
+ if err := validatorInstance.Struct(&backendData); err != nil {
+ return &commonbackend.CheckParametersResponse{
+ IsValid: false,
+ Message: fmt.Sprintf("failed validation of parameters: %s", err.Error()),
+ }
+ }
+
+ return &commonbackend.CheckParametersResponse{
+ IsValid: true,
+ }
+}
+
+func (backend *SSHBackend) backendKeepaliveHandler() {
+ for {
+ if backend.conn != nil {
+ _, _, err := backend.conn.SendRequest("keepalive@openssh.com", true, nil)
+
+ if err != nil {
+ log.Warn("Keepalive message failed!")
+ return
+ }
+ }
+
+ time.Sleep(5 * time.Second)
+ }
+}
+
+func (backend *SSHBackend) backendDisconnectHandler() {
+ for {
+ if backend.conn != nil {
+ backend.conn.Wait()
+ backend.conn.Close()
+
+ backend.isReady = false
+ backend.inReinitLoop = true
+
+ log.Info("Disconnected from the remote SSH server. Attempting to reconnect in 5 seconds...")
+ } else {
+ log.Info("Retrying reconnection in 5 seconds...")
+ }
+
+ time.Sleep(5 * time.Second)
+
+ // Make the connection nil to accurately report our status incase GetBackendStatus is called
+ backend.conn = nil
+
+ // Use the last half of the code from the main initialization
+ signer, err := ssh.ParsePrivateKey([]byte(backend.config.PrivateKey))
+
+ if err != nil {
+ log.Errorf("Failed to parse private key: %s", err.Error())
+ return
+ }
+
+ auth := ssh.PublicKeys(signer)
+
+ config := &ssh.ClientConfig{
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ User: backend.config.Username,
+ Auth: []ssh.AuthMethod{
+ auth,
+ },
+ }
+
+ addr := fmt.Sprintf("%s:%d", backend.config.IP, backend.config.Port)
+ timeout := time.Duration(10 * time.Second)
+
+ rawTCPConn, err := net.DialTimeout("tcp", addr, timeout)
+
+ if err != nil {
+ log.Errorf("Failed to establish connection to the server: %s", err.Error())
+ continue
+ }
+
+ connWithTimeout := &ConnWithTimeout{
+ Conn: rawTCPConn,
+ ReadTimeout: timeout,
+ WriteTimeout: timeout,
+ }
+
+ c, chans, reqs, err := ssh.NewClientConn(connWithTimeout, addr, config)
+
+ if err != nil {
+ log.Errorf("Failed to create SSH client connection: %s", err.Error())
+ rawTCPConn.Close()
+ continue
+ }
+
+ client := ssh.NewClient(c, chans, reqs)
+ backend.conn = client
+
+ if !backend.config.DisablePIDCheck {
+ if backend.pid != 0 {
+ session, err := client.NewSession()
+
+ if err != nil {
+ log.Warnf("Failed to create SSH command session: %s", err.Error())
+ return
+ }
+
+ err = session.Run(fmt.Sprintf("kill -9 %d", backend.pid))
+
+ if err != nil {
+ log.Warnf("Failed to kill process: %s", err.Error())
+ }
+ }
+
+ session, err := client.NewSession()
+
+ if err != nil {
+ log.Warnf("Failed to create SSH command session: %s", err.Error())
+ return
+ }
+
+ // Get the parent PID of the shell so we can kill it if we disconnect
+ output, err := session.Output("ps --no-headers -fp $$ | awk '{print $3}'")
+
+ if err != nil {
+ log.Warnf("Failed to execute command to fetch PID: %s", err.Error())
+ return
+ }
+
+ // Strip the new line and convert to int
+ backend.pid, err = strconv.Atoi(string(output)[:len(output)-1])
+
+ if err != nil {
+ log.Warnf("Failed to parse PID: %s", err.Error())
+ return
+ }
+ }
+
+ go backend.backendKeepaliveHandler()
+
+ log.Info("SSHBackend has reconnected successfully. Attempting to set up proxies again...")
+
+ for _, proxy := range backend.proxies {
+ ok, err := backend.StartProxy(&commonbackend.AddProxy{
+ SourceIP: proxy.SourceIP,
+ SourcePort: proxy.SourcePort,
+ DestPort: proxy.DestPort,
+ Protocol: proxy.Protocol,
+ })
+
+ if err != nil {
+ log.Errorf("Failed to set up proxy: %s", err.Error())
+ continue
+ }
+
+ if !ok {
+ log.Errorf("Failed to set up proxy: OK status is false")
+ continue
+ }
+ }
+
+ log.Info("SSHBackend has reinitialized and restored state successfully.")
+ }
+}
+
+func main() {
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ backend := &SSHBackend{}
+
+ application := backendutil.NewHelper(backend)
+ err := application.Start()
+
+ if err != nil {
+ log.Fatalf("failed execution in application: %s", err.Error())
+ }
+}
diff --git a/docker-compose.yml b/docker-compose.yml
index 9cf879f..a549035 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,30 +1,19 @@
services:
api:
- image: ghcr.io/greysoh/nextnet:latest
- container_name: nextnet-api
+ image: ghcr.io/imterah/hermes:latest
+ container_name: hermes-api
restart: always
- env_file:
- - .env
+ environment:
+ DATABASE_URL: postgresql://${POSTGRES_USERNAME}:${POSTGRES_PASSWORD}@nextnet-postgres:5432/${POSTGRES_DB}?schema=nextnet
+ HERMES_POSTGRES_DSN: postgres://${POSTGRES_USERNAME}:${POSTGRES_PASSWORD}@nextnet-postgres:5432/${POSTGRES_DB}
+ HERMES_JWT_SECRET: ${JWT_SECRET}
+ HERMES_DATABASE_BACKEND: postgresql
depends_on:
- db
ports:
- 3000:3000
-
- # NOTE: For this to work correctly, the nextnet-api must be version > 0.1.1
- # or have a version with backported username support, incl. logins
- lom:
- image: ghcr.io/greysoh/nextnet-lom:latest
- container_name: nextnet-lom
- restart: always
- ports:
- - 2222:2222
- depends_on:
- - api
- volumes:
- - ssh_key_data:/app/keys
-
db:
- image: postgres:15.4
+ image: postgres:17.2
container_name: nextnet-postgres
restart: unless-stopped
environment:
@@ -33,7 +22,6 @@ services:
POSTGRES_USER: ${POSTGRES_USERNAME}
volumes:
- postgres_data:/var/lib/postgresql/data
-
volumes:
postgres_data:
ssh_key_data:
diff --git a/docs/nextnet_to_hermes_migration.md b/docs/nextnet_to_hermes_migration.md
new file mode 100644
index 0000000..dd15318
--- /dev/null
+++ b/docs/nextnet_to_hermes_migration.md
@@ -0,0 +1,43 @@
+# NextNet to Hermes migration
+## Other Environment Variables
+Below are existing environment variables that need to be migrated over from NextNet to Hermes, untouched:
+ * `IS_SIGNUP_ENABLED` -> `HERMES_SIGNUP_ENABLED`
+ * `UNSAFE_ADMIN_SIGNUP` -> `HERMES_UNSAFE_ADMIN_SIGNUP_ENABLED`
+Below are new environment variables that may need to be set up:
+ * `HERMES_FORCE_DISABLE_REFRESH_TOKEN_EXPIRY`: Disables refresh token expiry for Hermes. Instead of the singular token structure used
+ by NextNet, there is now a refresh token and JWT token combination.
+ * `HERMES_LOG_LEVEL`: Log level for Hermes & Hermes backends to run at.
+ * `HERMES_DEVELOPMENT_MODE`: Development mode for Hermes, disabling security features.
+ * `HERMES_LISTENING_ADDRESS`: Address to listen on for the API server. Example: `0.0.0.0:8000`.
+ * `HERMES_TRUSTED_HTTP_PROXIES`: List of trusted HTTP proxies separated by commas.
+## Database-Related Environment Variables
+ * `HERMES_DATABASE_BACKEND`: Can be either `sqlite` for the embedded SQLite-compliant database, or `postgresql` for PostgreSQL support.
+ * `HERMES_SQLITE_FILEPATH`: Path for the SQLite database to use.
+ * `HERMES_POSTGRES_DSN`: PostgreSQL DSN for Golang. An example value which should work with minimal changes for PostgreSQL databases is `postgres://username:password@localhost:5432/database_name`.
+## Migration steps
+1. Remove all old environment variables.
+2. Add these variables:
+ - `HERMES_MIGRATE_POSTGRES_DATABASE` -> `${POSTGRES_DB}`
+ - `HERMES_DATABASE_BACKEND` -> `postgresql`
+ - `HERMES_POSTGRES_DSN` -> `postgres://${POSTGRES_USERNAME}:${POSTGRES_PASSWORD}@nextnet-postgres:5432/${POSTGRES_DB}`
+ - `DATABASE_URL` -> `postgresql://${POSTGRES_USERNAME}:${POSTGRES_PASSWORD}@nextnet-postgres:5432/${POSTGRES_DB}?schema=nextnet`
+ - `HERMES_JWT_SECRET` -> Random data (recommended to use `head -c 500 /dev/random | sha512sum | cut -d " " -f 1` to seed the data)
+3. Switch the API docker image from `ghcr.io/imterah/nextnet:latest` to `ghcr.io/imterah/hermes-backend-migration:latest`
+4. Change the exposed ports from `3000:3000` to `3000:8000`.
+5. Start the Docker compose stack.
+6. Go get the container logs, and make sure no errors get output to the console.
+7. Copy the backup as instructed in the log file.
+8. DO NOT RESTART THE CONTAINER IF SUCCESSFUL. YOU WILL LOSE ALL YOUR DATA. If the migration fails, follow the steps mentioned in the logs. You do not need to copy the DB backup if it failed to connect or read the database.
+9. If successful, remove the environment variables `HERMES_MIGRATE_POSTGRES_DATABASE` and `DATABASE_URL`.
+10. Switch the API docker image from `ghcr.io/imterah/hermes-backend-migration:latest` to `ghcr.io/imterah/hermes:latest`.
+11. Start the backend.
+## Failed Migration / Manual Restoration Steps
+1. Get to step 4 in the ordinary migration setps.
+2. Add the `entrypoint` option in the API compose section, and set it to `/bin/bash`
+3. Add the `command` option in the API compose section, and set it to `"-c 'sleep 10000'"`
+4. Get a shell in the container (likely named `nextnet-api`): `docker exec -it nextnet-api /bin/bash`
+5. Copy the base64 section (excluding the `BEGIN` and `END` portions) of the backup, and run the following command to begin the transfer: `cat >> /tmp/db.json.gz.b64 << EOF`
+6. Paste in the base64 data, and then press enter, type `EOF`, and then press enter again. This should return you to the shell prompt.
+7. Decode the base64 backup: `cat /tmp/db.json.gz.b64 | base64 -d > /tmp/db.json.gz`
+8. Run the migration script: `./entrypoint.sh`
+9. When done, remove the `entrypoint` and `command` sections, and then jump to step 9 in the ordinary migration steps.
diff --git a/docs/profiling.md b/docs/profiling.md
new file mode 100644
index 0000000..06ceb7d
--- /dev/null
+++ b/docs/profiling.md
@@ -0,0 +1,6 @@
+# Profiling
+To profile any backend code based on `backendutil`, follow these steps:
+1. Rebuild the backend with the `debug` flag: `cd $BACKEND_HERE; GOOS=linux go build -tags debug .; cd ..`
+2. Copy the binary to the target machine (if applicable), and stop the API server.
+3. If you want to profile the CPU utilization, write `cpu` to the file `/tmp/hermes.backendlauncher.profilebackends`: `echo -n "cpu" > /tmp/hermes.backendlauncher.profilebackends`. Else, replace `cpu` with `mem`.
+4. Start the API server, with development mode and debug logging enabled.
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
new file mode 100644
index 0000000..75324d3
--- /dev/null
+++ b/docs/troubleshooting.md
@@ -0,0 +1,4 @@
+# Troubleshooting
+
+* I'm using SSH tunneling, and I can't reach any of the tunnels publicly.
+ - Be sure to enable GatewayPorts in your sshd config (in `/etc/ssh/sshd_config` on most systems). Also, be sure to check your firewall rules on your system and your network.
diff --git a/frontend/commands/users/create.go b/frontend/commands/users/create.go
new file mode 100644
index 0000000..3d6c94b
--- /dev/null
+++ b/frontend/commands/users/create.go
@@ -0,0 +1,115 @@
+package users
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "syscall"
+
+ "git.terah.dev/imterah/hermes/apiclient"
+ "git.terah.dev/imterah/hermes/frontend/config"
+ "github.com/charmbracelet/log"
+ "github.com/urfave/cli/v2"
+ "golang.org/x/term"
+ "gopkg.in/yaml.v3"
+)
+
+func CreateUserCommand(cCtx *cli.Context) error {
+ configPath := cCtx.String("config-path")
+
+ var configContents *config.Config
+
+ _, err := os.Stat(configPath)
+
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ configContents = &config.Config{}
+ } else {
+ return fmt.Errorf("failed to get configuration file information: %s", err.Error())
+ }
+ } else {
+ configContents, err = config.ReadAndParseConfig(configPath)
+
+ if err != nil {
+ return fmt.Errorf("failed to read and parse configuration file: %s", err.Error())
+ }
+ }
+
+ username := cCtx.String("username")
+
+ if username == "" {
+ if configContents.Username == "" {
+ return fmt.Errorf("username not specified and username is not in the configuration file")
+ }
+
+ username = configContents.Username
+ }
+
+ var password string
+
+ if cCtx.Bool("ask-password") {
+ fmt.Print("Password: ")
+ passwordBytes, err := term.ReadPassword(int(syscall.Stdin))
+ fmt.Print("\n")
+
+ if err != nil {
+ return fmt.Errorf("failed to read password from console: %s", err.Error())
+ }
+
+ password = string(passwordBytes)
+ } else {
+ password = cCtx.String("password")
+
+ if password == "" {
+ return fmt.Errorf("password is not specified and password asking is not enabled")
+ }
+ }
+
+ var serverURL string
+
+ if cCtx.String("server-url") == "" {
+ if configContents.APIPath == "" {
+ return fmt.Errorf("server URL not specified and server URL is not in the configuration file")
+ }
+
+ serverURL = configContents.APIPath
+ } else {
+ serverURL = cCtx.String("server-url")
+ }
+
+ fullName := cCtx.String("full-name")
+ email := cCtx.String("email")
+ isBot := cCtx.Bool("user-is-bot")
+
+ log.Info("Creating user...")
+
+ api := &apiclient.HermesAPIClient{
+ URL: serverURL,
+ }
+
+ refreshToken, err := api.UserCreate(fullName, username, email, password, isBot)
+
+ if err != nil {
+ return fmt.Errorf("failed to create user: %s", err.Error())
+ }
+
+ log.Info("Successfully created user.")
+
+ if cCtx.Bool("do-not-save-configuration") {
+ return nil
+ }
+
+ configContents.Username = username
+ configContents.RefreshToken = refreshToken
+ configContents.APIPath = serverURL
+
+ data, err := yaml.Marshal(configContents)
+
+ if err != nil {
+ return fmt.Errorf("failed to marshal configuration data: %s", err.Error())
+ }
+
+ os.WriteFile(configPath, data, 0644)
+
+ return nil
+}
diff --git a/frontend/commands/users/login.go b/frontend/commands/users/login.go
new file mode 100644
index 0000000..8248b1f
--- /dev/null
+++ b/frontend/commands/users/login.go
@@ -0,0 +1,98 @@
+package users
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "syscall"
+
+ "git.terah.dev/imterah/hermes/apiclient"
+ "git.terah.dev/imterah/hermes/frontend/config"
+ "github.com/charmbracelet/log"
+ "github.com/urfave/cli/v2"
+ "golang.org/x/term"
+ "gopkg.in/yaml.v3"
+)
+
+func GetRefreshTokenCommand(cCtx *cli.Context) error {
+ configPath := cCtx.String("config-path")
+
+ var configContents *config.Config
+
+ _, err := os.Stat(configPath)
+
+ if err != nil {
+ if errors.Is(err, os.ErrNotExist) {
+ configContents = &config.Config{}
+ } else {
+ return fmt.Errorf("failed to get configuration file information: %s", err.Error())
+ }
+ } else {
+ configContents, err = config.ReadAndParseConfig(configPath)
+
+ if err != nil {
+ return fmt.Errorf("failed to read and parse configuration file: %s", err.Error())
+ }
+ }
+
+ var username string
+ var password string
+
+ if cCtx.String("username") == "" {
+ if configContents.Username == "" {
+ return fmt.Errorf("username not specified and username is not in the configuration file")
+ }
+
+ username = configContents.Username
+ } else {
+ username = cCtx.String("username")
+ }
+
+ if cCtx.Bool("ask-password") {
+ fmt.Print("Password: ")
+ passwordBytes, err := term.ReadPassword(int(syscall.Stdin))
+ fmt.Print("\n")
+
+ if err != nil {
+ return fmt.Errorf("failed to read password from console: %s", err.Error())
+ }
+
+ password = string(passwordBytes)
+ } else {
+ password = cCtx.String("password")
+
+ if password == "" {
+ return fmt.Errorf("password is not specified and password asking is not enabled")
+ }
+ }
+
+ serverURL := cCtx.String("server-url")
+ log.Info("Authenticating with API...")
+
+ api := &apiclient.HermesAPIClient{
+ URL: serverURL,
+ }
+
+ refreshToken, err := api.UserGetRefreshToken(&username, nil, password)
+
+ if err != nil {
+ return fmt.Errorf("failed to authenticate with the API: %s", err.Error())
+ }
+
+ configContents.Username = username
+ configContents.RefreshToken = refreshToken
+ configContents.APIPath = serverURL
+
+ log.Info("Writing configuration file...")
+ data, err := yaml.Marshal(configContents)
+
+ if err != nil {
+ return fmt.Errorf("failed to marshal configuration data: %s", err.Error())
+ }
+
+ log.Infof("config path: %s", configPath)
+
+ os.WriteFile(configPath, data, 0644)
+
+ return nil
+}
diff --git a/frontend/config/config.go b/frontend/config/config.go
new file mode 100644
index 0000000..ff9cb69
--- /dev/null
+++ b/frontend/config/config.go
@@ -0,0 +1,30 @@
+package config
+
+import (
+ "os"
+
+ "gopkg.in/yaml.v3"
+)
+
+type Config struct {
+ Username string `json:"username"`
+ RefreshToken string `json:"token"`
+ APIPath string `json:"api_path"`
+}
+
+func ReadAndParseConfig(configFile string) (*Config, error) {
+ configFileContents, err := os.ReadFile(configFile)
+
+ if err != nil {
+ return nil, err
+ }
+
+ config := &Config{}
+ err = yaml.Unmarshal(configFileContents, config)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return config, nil
+}
diff --git a/frontend/dev.env b/frontend/dev.env
new file mode 100644
index 0000000..ccd7c30
--- /dev/null
+++ b/frontend/dev.env
@@ -0,0 +1 @@
+HERMES_LOG_LEVEL=debug
diff --git a/frontend/main.go b/frontend/main.go
new file mode 100644
index 0000000..95b366f
--- /dev/null
+++ b/frontend/main.go
@@ -0,0 +1,143 @@
+package main
+
+import (
+ "os"
+ "path"
+
+ "git.terah.dev/imterah/hermes/frontend/commands/users"
+ "github.com/charmbracelet/log"
+ "github.com/urfave/cli/v2"
+)
+
+func main() {
+ logLevel := os.Getenv("HERMES_LOG_LEVEL")
+
+ if logLevel != "" {
+ switch logLevel {
+ case "debug":
+ log.SetLevel(log.DebugLevel)
+
+ case "info":
+ log.SetLevel(log.InfoLevel)
+
+ case "warn":
+ log.SetLevel(log.WarnLevel)
+
+ case "error":
+ log.SetLevel(log.ErrorLevel)
+
+ case "fatal":
+ log.SetLevel(log.FatalLevel)
+ }
+ }
+
+ configDir, err := os.UserConfigDir()
+
+ if err != nil {
+ log.Fatalf("Failed to get configuration directory: %s", err.Error())
+ }
+
+ app := &cli.App{
+ Name: "hermcli",
+ Usage: "client for Hermes -- port forwarding across boundaries",
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "config-path",
+ Aliases: []string{"config", "cp", "c"},
+ Value: path.Join(configDir, "hermcli.yml"),
+ },
+ },
+ Commands: []*cli.Command{
+ {
+ Name: "login",
+ Usage: "log in to the API",
+ Action: users.GetRefreshTokenCommand,
+ Aliases: []string{"l"},
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"user", "u"},
+ Usage: "username to authenticate as",
+ },
+ &cli.StringFlag{
+ Name: "password",
+ Aliases: []string{"pass", "p"},
+ Usage: "password to authenticate with",
+ },
+ &cli.StringFlag{
+ Name: "server-url",
+ Aliases: []string{"server", "s"},
+ Usage: "URL of the server to authenticate with",
+ },
+ &cli.BoolFlag{
+ Name: "ask-password",
+ Aliases: []string{"ask-pass", "ap"},
+ Usage: "asks you the password to authenticate with",
+ },
+ },
+ },
+ {
+ Name: "users",
+ Usage: "user management commands",
+ Aliases: []string{"u"},
+ Subcommands: []*cli.Command{
+ {
+ Name: "create",
+ Aliases: []string{"c"},
+ Usage: "create a user",
+ Action: users.CreateUserCommand,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "full-name",
+ Aliases: []string{"name", "n"},
+ Usage: "full name for the user",
+ Required: true,
+ },
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"user", "us"},
+ Usage: "username to give the user",
+ Required: true,
+ },
+ &cli.StringFlag{
+ Name: "email",
+ Aliases: []string{"e"},
+ Usage: "email to give the user",
+ Required: true,
+ },
+ &cli.StringFlag{
+ Name: "password",
+ Aliases: []string{"pass", "p"},
+ Usage: "password to give the user",
+ },
+ &cli.StringFlag{
+ Name: "server-url",
+ Aliases: []string{"server", "s"},
+ Usage: "URL of the server to connect with",
+ },
+ &cli.BoolFlag{
+ Name: "ask-password",
+ Aliases: []string{"ask-pass", "ap"},
+ Usage: "asks you the password to give the user",
+ },
+ &cli.BoolFlag{
+ Name: "user-is-bot",
+ Aliases: []string{"user-bot", "ub", "u"},
+ Usage: "if set, makes the user flagged as a bot",
+ },
+ &cli.BoolFlag{
+ Name: "do-not-save-configuration",
+ Aliases: []string{"no-save", "ns"},
+ Usage: "doesn't save the authenticated user credentials",
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ if err := app.Run(os.Args); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..b390542
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,67 @@
+module git.terah.dev/imterah/hermes
+
+go 1.23.3
+
+require (
+ github.com/charmbracelet/log v0.4.0
+ github.com/gin-gonic/gin v1.10.0
+ github.com/go-playground/validator/v10 v10.23.0
+ github.com/golang-jwt/jwt/v5 v5.2.1
+ github.com/pkg/sftp v1.13.7
+ github.com/urfave/cli/v2 v2.27.5
+ golang.org/x/crypto v0.31.0
+ golang.org/x/exp v0.0.0-20231006140011-7918f672742d
+ golang.org/x/term v0.28.0
+ gopkg.in/yaml.v3 v3.0.1
+ gorm.io/driver/postgres v1.5.11
+ gorm.io/driver/sqlite v1.5.7
+ gorm.io/gorm v1.25.12
+)
+
+require (
+ github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
+ github.com/bytedance/sonic v1.12.6 // indirect
+ github.com/bytedance/sonic/loader v0.2.1 // indirect
+ github.com/charmbracelet/lipgloss v0.10.0 // indirect
+ github.com/cloudwego/base64x v0.1.4 // indirect
+ github.com/cloudwego/iasm v0.2.0 // indirect
+ github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect
+ github.com/gabriel-vasile/mimetype v1.4.7 // indirect
+ github.com/gin-contrib/sse v0.1.0 // indirect
+ github.com/go-logfmt/logfmt v0.6.0 // indirect
+ github.com/go-playground/locales v0.14.1 // indirect
+ github.com/go-playground/universal-translator v0.18.1 // indirect
+ github.com/goccy/go-json v0.10.4 // indirect
+ github.com/jackc/pgpassfile v1.0.0 // indirect
+ github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
+ github.com/jackc/pgx/v5 v5.7.2 // indirect
+ github.com/jackc/puddle/v2 v2.2.2 // indirect
+ github.com/jinzhu/inflection v1.0.0 // indirect
+ github.com/jinzhu/now v1.1.5 // indirect
+ github.com/json-iterator/go v1.1.12 // indirect
+ github.com/klauspost/cpuid/v2 v2.2.9 // indirect
+ github.com/kr/fs v0.1.0 // indirect
+ github.com/kr/text v0.2.0 // indirect
+ github.com/leodido/go-urn v1.4.0 // indirect
+ github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
+ github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/mattn/go-runewidth v0.0.15 // indirect
+ github.com/mattn/go-sqlite3 v1.14.24 // indirect
+ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+ github.com/modern-go/reflect2 v1.0.2 // indirect
+ github.com/muesli/reflow v0.3.0 // indirect
+ github.com/muesli/termenv v0.15.2 // indirect
+ github.com/pelletier/go-toml/v2 v2.2.3 // indirect
+ github.com/rivo/uniseg v0.4.7 // indirect
+ github.com/rogpeppe/go-internal v1.13.1 // indirect
+ github.com/russross/blackfriday/v2 v2.1.0 // indirect
+ github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
+ github.com/ugorji/go/codec v1.2.12 // indirect
+ github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
+ golang.org/x/arch v0.12.0 // indirect
+ golang.org/x/net v0.33.0 // indirect
+ golang.org/x/sync v0.10.0 // indirect
+ golang.org/x/sys v0.29.0 // indirect
+ golang.org/x/text v0.21.0 // indirect
+ google.golang.org/protobuf v1.36.0 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..dd30942
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,190 @@
+github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
+github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
+github.com/bytedance/sonic v1.12.6 h1:/isNmCUF2x3Sh8RAp/4mh4ZGkcFAX/hLrzrK3AvpRzk=
+github.com/bytedance/sonic v1.12.6/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
+github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
+github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E=
+github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
+github.com/charmbracelet/lipgloss v0.10.0 h1:KWeXFSexGcfahHX+54URiZGkBFazf70JNMtwg/AFW3s=
+github.com/charmbracelet/lipgloss v0.10.0/go.mod h1:Wig9DSfvANsxqkRsqj6x87irdy123SR4dOXlKa91ciE=
+github.com/charmbracelet/log v0.4.0 h1:G9bQAcx8rWA2T3pWvx7YtPTPwgqpk7D68BX21IRW8ZM=
+github.com/charmbracelet/log v0.4.0/go.mod h1:63bXt/djrizTec0l11H20t8FDSvA4CRZJ1KH22MdptM=
+github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
+github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
+github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
+github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
+github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc=
+github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/gabriel-vasile/mimetype v1.4.7 h1:SKFKl7kD0RiPdbht0s7hFtjl489WcQ1VyPW8ZzUMYCA=
+github.com/gabriel-vasile/mimetype v1.4.7/go.mod h1:GDlAgAyIRT27BhFl53XNAFtfjzOkLaF35JdEG0P7LtU=
+github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
+github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
+github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
+github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
+github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
+github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
+github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
+github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
+github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
+github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o=
+github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
+github.com/goccy/go-json v0.10.4 h1:JSwxQzIqKfmFX1swYPpUThQZp/Ka4wzJdK0LWVytLPM=
+github.com/goccy/go-json v0.10.4/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
+github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
+github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
+github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
+github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
+github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
+github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
+github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
+github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI=
+github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
+github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
+github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
+github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
+github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
+github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
+github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY=
+github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8=
+github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
+github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
+github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
+github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
+github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
+github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
+github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
+github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
+github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
+github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
+github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s=
+github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8=
+github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
+github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
+github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
+github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
+github.com/pkg/sftp v1.13.7 h1:uv+I3nNJvlKZIQGSr8JVQLNHFU9YhhNpvC14Y6KgmSM=
+github.com/pkg/sftp v1.13.7/go.mod h1:KMKI0t3T6hfA+lTR/ssZdunHo+uwq7ghoN09/FSu3DY=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
+github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
+github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
+github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
+github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
+github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
+github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
+github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w=
+github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ=
+github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
+github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg=
+golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
+golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
+golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
+golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
+golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
+golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
+golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
+golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
+golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
+golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
+golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
+golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.36.0 h1:mjIs9gYtt56AzC4ZaffQuh88TZurBGhIJMBZGSxNerQ=
+google.golang.org/protobuf v1.36.0/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314=
+gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
+gorm.io/driver/sqlite v1.5.7 h1:8NvsrhP0ifM7LX9G4zPB97NwovUakUxc+2V2uuf3Z1I=
+gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4=
+gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
+gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
+nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
diff --git a/init.sh b/init.sh
index a09f3f0..65aaee0 100644
--- a/init.sh
+++ b/init.sh
@@ -1,5 +1,17 @@
-pushd api > /dev/null 2> /dev/null
-source init.sh
+#!/usr/bin/env bash
+if [ ! -f "backend/.env" ]; then
+ cp backend/dev.env backend/.env
+fi
-git config --local include.path .gitconfig
-popd > /dev/null 2> /dev/null
\ No newline at end of file
+if [ ! -d "backend/.tmp" ]; then
+ mkdir backend/.tmp
+fi
+
+if [ ! -f "frontend/.env" ]; then
+ cp frontend/dev.env frontend/.env
+fi
+
+set -a
+source backend/.env
+source frontend/.env
+set +a
diff --git a/lom/.gitignore b/lom/.gitignore
deleted file mode 100644
index 37df925..0000000
--- a/lom/.gitignore
+++ /dev/null
@@ -1,133 +0,0 @@
-# Output
-out
-
-# Logs
-logs
-*.log
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
-lerna-debug.log*
-.pnpm-debug.log*
-
-# Diagnostic reports (https://nodejs.org/api/report.html)
-report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
-
-# Runtime data
-pids
-*.pid
-*.seed
-*.pid.lock
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-lib-cov
-
-# Coverage directory used by tools like istanbul
-coverage
-*.lcov
-
-# nyc test coverage
-.nyc_output
-
-# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
-.grunt
-
-# Bower dependency directory (https://bower.io/)
-bower_components
-
-# node-waf configuration
-.lock-wscript
-
-# Compiled binary addons (https://nodejs.org/api/addons.html)
-build/Release
-
-# Dependency directories
-node_modules/
-jspm_packages/
-
-# Snowpack dependency directory (https://snowpack.dev/)
-web_modules/
-
-# TypeScript cache
-*.tsbuildinfo
-
-# Optional npm cache directory
-.npm
-
-# Optional eslint cache
-.eslintcache
-
-# Optional stylelint cache
-.stylelintcache
-
-# Microbundle cache
-.rpt2_cache/
-.rts2_cache_cjs/
-.rts2_cache_es/
-.rts2_cache_umd/
-
-# Optional REPL history
-.node_repl_history
-
-# Output of 'npm pack'
-*.tgz
-
-# Yarn Integrity file
-.yarn-integrity
-
-# dotenv environment variable files
-.env
-.env.development.local
-.env.test.local
-.env.production.local
-.env.local
-
-# parcel-bundler cache (https://parceljs.org/)
-.cache
-.parcel-cache
-
-# Next.js build output
-.next
-out
-
-# Nuxt.js build / generate output
-.nuxt
-dist
-
-# Gatsby files
-.cache/
-# Comment in the public line in if your project uses Gatsby and not Next.js
-# https://nextjs.org/blog/next-9-1#public-directory-support
-# public
-
-# vuepress build output
-.vuepress/dist
-
-# vuepress v2.x temp and cache directory
-.temp
-.cache
-
-# Docusaurus cache and generated files
-.docusaurus
-
-# Serverless directories
-.serverless/
-
-# FuseBox cache
-.fusebox/
-
-# DynamoDB Local files
-.dynamodb/
-
-# TernJS port file
-.tern-port
-
-# Stores VSCode versions used for testing VSCode extensions
-.vscode-test
-
-# yarn v2
-.yarn/cache
-.yarn/unplugged
-.yarn/build-state.yml
-.yarn/install-state.gz
-.pnp.*
diff --git a/lom/Dockerfile b/lom/Dockerfile
deleted file mode 100644
index 266d5d9..0000000
--- a/lom/Dockerfile
+++ /dev/null
@@ -1,14 +0,0 @@
-FROM node:20.11.1-bookworm
-LABEL org.opencontainers.image.source="https://github.com/greysoh/nextnet"
-WORKDIR /app/
-COPY src /app/src
-COPY tsconfig.json /app/
-COPY package.json /app/
-COPY package-lock.json /app/
-COPY docker-entrypoint.sh /app/
-RUN npm install --save-dev
-RUN npm run build
-RUN rm out/**/*.ts out/**/*.map
-RUN rm -rf src
-RUN npm prune --production
-ENTRYPOINT sh docker-entrypoint.sh
\ No newline at end of file
diff --git a/lom/LICENSE b/lom/LICENSE
deleted file mode 100644
index 8914588..0000000
--- a/lom/LICENSE
+++ /dev/null
@@ -1,28 +0,0 @@
-BSD 3-Clause License
-
-Copyright (c) 2024, Greyson
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lom/README.md b/lom/README.md
deleted file mode 100644
index 16cb312..0000000
--- a/lom/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-# NextNet LOM
-Lights Out Management, NextNet style
\ No newline at end of file
diff --git a/lom/diff.diff b/lom/diff.diff
deleted file mode 100644
index 089939f..0000000
--- a/lom/diff.diff
+++ /dev/null
@@ -1,149 +0,0 @@
-diff --git a/api/src/backendimpl/passyfire-reimpl/routes.ts b/api/src/backendimpl/passyfire-reimpl/routes.ts
-index 2961483..4519a87 100644
---- a/api/src/backendimpl/passyfire-reimpl/routes.ts
-+++ b/api/src/backendimpl/passyfire-reimpl/routes.ts
-@@ -47,25 +47,6 @@ export function route(instance: PassyFireBackendProvider) {
-
- for (const spoofedRoute of unsupportedSpoofedRoutes) {
- fastify.post(spoofedRoute, (req, res) => {
-- if (typeof req.body != "string")
-- return res.status(400).send({
-- error: "Invalid token",
-- });
--
-- try {
-- JSON.parse(req.body);
-- } catch (e) {
-- return res.status(400).send({
-- error: "Invalid token",
-- });
-- }
--
-- // @ts-expect-error
-- if (!req.body.token)
-- return res.status(400).send({
-- error: "Invalid token",
-- });
--
- return res.status(403).send({
- error: "Invalid scope(s)",
- });
-diff --git a/lom/src/commands/backends.ts b/lom/src/commands/backends.ts
-index baba3d1..d16cac1 100644
---- a/lom/src/commands/backends.ts
-+++ b/lom/src/commands/backends.ts
-@@ -16,6 +16,18 @@ type BackendLookupSuccess = {
- }[];
- };
-
-+const addRequiredOptions = {
-+ ssh: [
-+ "sshKey",
-+ "username",
-+ "host",
-+ ],
-+
-+ passyfire: [
-+ "host"
-+ ]
-+};
-+
- export async function run(
- argv: string[],
- println: PrintLine,
-@@ -58,12 +70,12 @@ export async function run(
- );
-
- addBackend.option(
-- "-u, --username",
-+ "-u, --username ",
- "(SSH, PassyFire) Username to authenticate with. With PassyFire, it's the username you create",
- );
-
- addBackend.option(
-- "-h, --host",
-+ "-h, --host ",
- "(SSH, PassyFire) Host to connect to. With PassyFire, it's what you listen on",
- );
-
-@@ -86,10 +98,70 @@ export async function run(
- );
-
- addBackend.option(
-- "-p, --password",
-+ "-p, --password ",
- "(PassyFire) What password you want to use for the primary user",
- );
-
-+ addBackend.action(async(name: string, provider: string, options: {
-+ description?: string,
-+ forceCustomParameters?: boolean,
-+ customParameters?: string,
-+
-+ // SSH (mostly)
-+ sshKey?: string,
-+ username?: string,
-+ host?: string,
-+
-+ // PassyFire (mostly)
-+ isProxied?: boolean,
-+ proxiedPortStr?: number,
-+ guest?: boolean,
-+ userAsk?: boolean,
-+ password?: string
-+ }) => {
-+ // Yes it can index for what we need it to do.
-+ // @ts-expect-error
-+ const isUnsupportedPlatform: boolean = !addRequiredOptions[provider];
-+
-+ if (isUnsupportedPlatform) {
-+ println("WARNING: Platform is not natively supported by the LOM yet!\n");
-+ }
-+
-+ let connectionDetails: string = "";
-+
-+ if (options.forceCustomParameters || isUnsupportedPlatform) {
-+ if (typeof options.customParameters != "string") {
-+ return println("ERROR: You are missing the custom parameters option!\n");
-+ }
-+
-+ connectionDetails = options.customParameters;
-+ } else if (provider == "ssh") {
-+ for (const argument of addRequiredOptions["ssh"]) {
-+ // No.
-+ // @ts-expect-error
-+ const hasArgument = options[argument] as any;
-+
-+ if (!hasArgument) {
-+ return println("ERROR: Missing argument '%s'\n", hasArgument);
-+ };
-+ };
-+
-+ // todo!
-+ } else if (provider == "passyfire") {
-+ for (const argument of addRequiredOptions["passyfire"]) {
-+ // No.
-+ // @ts-expect-error
-+ const hasArgument = options[argument];
-+
-+ if (!hasArgument) {
-+ return println("ERROR: Missing argument '%s'\n", hasArgument);
-+ };
-+ };
-+
-+ // todo!
-+ }
-+ });
-+
- const removeBackend = new SSHCommand(println, "rm");
- removeBackend.description("Removes a backend");
- removeBackend.argument("", "ID of the backend");
-@@ -269,7 +341,7 @@ export async function run(
-
- // It would make sense to check this, then parse argv, however this causes issues with
- // the application name not displaying correctly.
--
-+
- if (argv.length == 1) {
- println("No arguments specified!\n\n");
- program.help();
diff --git a/lom/docker-entrypoint.sh b/lom/docker-entrypoint.sh
deleted file mode 100755
index 36942f1..0000000
--- a/lom/docker-entrypoint.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-export NODE_ENV="production"
-
-if [[ "$SERVER_BASE_URL" == "" ]]; then
- export SERVER_BASE_URL="http://nextnet-api:3000/"
-fi
-
-npm start
diff --git a/lom/eslint.config.js b/lom/eslint.config.js
deleted file mode 100644
index 0afc6f7..0000000
--- a/lom/eslint.config.js
+++ /dev/null
@@ -1,19 +0,0 @@
-import globals from "globals";
-import pluginJs from "@eslint/js";
-import tseslint from "typescript-eslint";
-
-export default [
- pluginJs.configs.recommended,
- ...tseslint.configs.recommended,
-
- {
- languageOptions: {
- globals: globals.node,
- },
-
- rules: {
- "@typescript-eslint/no-explicit-any": "off",
- "no-constant-condition": "warn",
- },
- },
-];
diff --git a/lom/package-lock.json b/lom/package-lock.json
deleted file mode 100644
index cb94dbc..0000000
--- a/lom/package-lock.json
+++ /dev/null
@@ -1,2440 +0,0 @@
-{
- "name": "nextnet-lom",
- "version": "1.1.2",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "nextnet-lom",
- "version": "1.1.2",
- "license": "BSD-3-Clause",
- "dependencies": {
- "axios": "^1.7.4",
- "commander": "^12.0.0",
- "patch-package": "^8.0.0",
- "ssh2": "^1.15.0",
- "string-argv": "^0.3.2"
- },
- "devDependencies": {
- "@eslint/js": "^9.2.0",
- "@types/node": "^20.12.8",
- "@types/ssh2": "^1.15.0",
- "@types/yargs": "^17.0.32",
- "eslint": "^8.57.0",
- "globals": "^15.2.0",
- "nodemon": "^3.0.3",
- "typescript": "^5.3.3",
- "typescript-eslint": "^7.8.0"
- }
- },
- "node_modules/@eslint-community/eslint-utils": {
- "version": "4.4.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
- "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==",
- "dev": true,
- "dependencies": {
- "eslint-visitor-keys": "^3.3.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "peerDependencies": {
- "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
- }
- },
- "node_modules/@eslint-community/regexpp": {
- "version": "4.10.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz",
- "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==",
- "dev": true,
- "engines": {
- "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
- }
- },
- "node_modules/@eslint/eslintrc": {
- "version": "2.1.4",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
- "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
- "dev": true,
- "dependencies": {
- "ajv": "^6.12.4",
- "debug": "^4.3.2",
- "espree": "^9.6.0",
- "globals": "^13.19.0",
- "ignore": "^5.2.0",
- "import-fresh": "^3.2.1",
- "js-yaml": "^4.1.0",
- "minimatch": "^3.1.2",
- "strip-json-comments": "^3.1.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/globals": {
- "version": "13.24.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
- "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
- "dev": true,
- "dependencies": {
- "type-fest": "^0.20.2"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/@eslint/js": {
- "version": "9.2.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.2.0.tgz",
- "integrity": "sha512-ESiIudvhoYni+MdsI8oD7skpprZ89qKocwRM2KEvhhBJ9nl5MRh7BXU5GTod7Mdygq+AUl+QzId6iWJKR/wABA==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
- }
- },
- "node_modules/@humanwhocodes/config-array": {
- "version": "0.11.14",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz",
- "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==",
- "dev": true,
- "dependencies": {
- "@humanwhocodes/object-schema": "^2.0.2",
- "debug": "^4.3.1",
- "minimatch": "^3.0.5"
- },
- "engines": {
- "node": ">=10.10.0"
- }
- },
- "node_modules/@humanwhocodes/module-importer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
- "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
- "dev": true,
- "engines": {
- "node": ">=12.22"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/nzakas"
- }
- },
- "node_modules/@humanwhocodes/object-schema": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
- "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
- "dev": true
- },
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
- "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
- "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
- "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@types/json-schema": {
- "version": "7.0.15",
- "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
- "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
- "dev": true
- },
- "node_modules/@types/node": {
- "version": "20.12.8",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.8.tgz",
- "integrity": "sha512-NU0rJLJnshZWdE/097cdCBbyW1h4hEg0xpovcoAQYHl8dnEyp/NAOiE45pvc+Bd1Dt+2r94v2eGFpQJ4R7g+2w==",
- "dev": true,
- "dependencies": {
- "undici-types": "~5.26.4"
- }
- },
- "node_modules/@types/semver": {
- "version": "7.5.8",
- "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
- "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==",
- "dev": true
- },
- "node_modules/@types/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-YcT8jP5F8NzWeevWvcyrrLB3zcneVjzYY9ZDSMAMboI+2zR1qYWFhwsyOFVzT7Jorn67vqxC0FRiw8YyG9P1ww==",
- "dev": true,
- "dependencies": {
- "@types/node": "^18.11.18"
- }
- },
- "node_modules/@types/ssh2/node_modules/@types/node": {
- "version": "18.19.31",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz",
- "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==",
- "dev": true,
- "dependencies": {
- "undici-types": "~5.26.4"
- }
- },
- "node_modules/@types/yargs": {
- "version": "17.0.32",
- "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz",
- "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==",
- "dev": true,
- "dependencies": {
- "@types/yargs-parser": "*"
- }
- },
- "node_modules/@types/yargs-parser": {
- "version": "21.0.3",
- "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
- "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
- "dev": true
- },
- "node_modules/@typescript-eslint/eslint-plugin": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz",
- "integrity": "sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg==",
- "dev": true,
- "dependencies": {
- "@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/type-utils": "7.8.0",
- "@typescript-eslint/utils": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4",
- "graphemer": "^1.4.0",
- "ignore": "^5.3.1",
- "natural-compare": "^1.4.0",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "@typescript-eslint/parser": "^7.0.0",
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.8.0.tgz",
- "integrity": "sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/typescript-estree": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/scope-manager": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz",
- "integrity": "sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz",
- "integrity": "sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/typescript-estree": "7.8.0",
- "@typescript-eslint/utils": "7.8.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/types": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.8.0.tgz",
- "integrity": "sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw==",
- "dev": true,
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/typescript-estree": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz",
- "integrity": "sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/visitor-keys": "7.8.0",
- "debug": "^4.3.4",
- "globby": "^11.1.0",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
- "dev": true,
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
- "version": "9.0.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz",
- "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==",
- "dev": true,
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/@typescript-eslint/utils": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.8.0.tgz",
- "integrity": "sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ==",
- "dev": true,
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.4.0",
- "@types/json-schema": "^7.0.15",
- "@types/semver": "^7.5.8",
- "@typescript-eslint/scope-manager": "7.8.0",
- "@typescript-eslint/types": "7.8.0",
- "@typescript-eslint/typescript-estree": "7.8.0",
- "semver": "^7.6.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- }
- },
- "node_modules/@typescript-eslint/visitor-keys": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz",
- "integrity": "sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/types": "7.8.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@ungap/structured-clone": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
- "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
- "dev": true
- },
- "node_modules/@yarnpkg/lockfile": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz",
- "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ=="
- },
- "node_modules/abbrev": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
- "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
- "dev": true
- },
- "node_modules/acorn": {
- "version": "8.11.3",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
- "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
- "dev": true,
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/acorn-jsx": {
- "version": "5.3.2",
- "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
- "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
- "dev": true,
- "peerDependencies": {
- "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
- }
- },
- "node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/ansi-styles": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
- "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/anymatch": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
- "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
- "dev": true,
- "dependencies": {
- "normalize-path": "^3.0.0",
- "picomatch": "^2.0.4"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/argparse": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
- "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
- "dev": true
- },
- "node_modules/array-union": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
- "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/asn1": {
- "version": "0.2.6",
- "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
- "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
- "dependencies": {
- "safer-buffer": "~2.1.0"
- }
- },
- "node_modules/asynckit": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
- "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
- },
- "node_modules/at-least-node": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
- "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==",
- "engines": {
- "node": ">= 4.0.0"
- }
- },
- "node_modules/axios": {
- "version": "1.7.4",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
- "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
- "dependencies": {
- "follow-redirects": "^1.15.6",
- "form-data": "^4.0.0",
- "proxy-from-env": "^1.1.0"
- }
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
- },
- "node_modules/bcrypt-pbkdf": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
- "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
- "dependencies": {
- "tweetnacl": "^0.14.3"
- }
- },
- "node_modules/binary-extensions": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
- "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/braces": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
- "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/buildcheck": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
- "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
- "optional": true,
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/call-bind": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
- "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
- "dependencies": {
- "es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "get-intrinsic": "^1.2.4",
- "set-function-length": "^1.2.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/callsites": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
- "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/chalk": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
- "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
- "dependencies": {
- "ansi-styles": "^4.1.0",
- "supports-color": "^7.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/chalk?sponsor=1"
- }
- },
- "node_modules/chalk/node_modules/has-flag": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
- "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chalk/node_modules/supports-color": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
- "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chokidar": {
- "version": "3.5.3",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
- "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
- "dev": true,
- "funding": [
- {
- "type": "individual",
- "url": "https://paulmillr.com/funding/"
- }
- ],
- "dependencies": {
- "anymatch": "~3.1.2",
- "braces": "~3.0.2",
- "glob-parent": "~5.1.2",
- "is-binary-path": "~2.1.0",
- "is-glob": "~4.0.1",
- "normalize-path": "~3.0.0",
- "readdirp": "~3.6.0"
- },
- "engines": {
- "node": ">= 8.10.0"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/ci-info": {
- "version": "3.9.0",
- "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz",
- "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/sibiraj-s"
- }
- ],
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
- "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
- },
- "node_modules/combined-stream": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
- "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
- "dependencies": {
- "delayed-stream": "~1.0.0"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/commander": {
- "version": "12.0.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz",
- "integrity": "sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/concat-map": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
- },
- "node_modules/cpu-features": {
- "version": "0.0.10",
- "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
- "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
- "hasInstallScript": true,
- "optional": true,
- "dependencies": {
- "buildcheck": "~0.0.6",
- "nan": "^2.19.0"
- },
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/debug": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
- "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
- "dev": true,
- "dependencies": {
- "ms": "2.1.2"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/deep-is": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
- "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
- "dev": true
- },
- "node_modules/define-data-property": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
- "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
- "dependencies": {
- "es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "gopd": "^1.0.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/delayed-stream": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
- "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/doctrine": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
- "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
- "dev": true,
- "dependencies": {
- "esutils": "^2.0.2"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/es-define-property": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
- "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
- "dependencies": {
- "get-intrinsic": "^1.2.4"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-errors": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/escape-string-regexp": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
- "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/eslint": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
- "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
- "dev": true,
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.6.1",
- "@eslint/eslintrc": "^2.1.4",
- "@eslint/js": "8.57.0",
- "@humanwhocodes/config-array": "^0.11.14",
- "@humanwhocodes/module-importer": "^1.0.1",
- "@nodelib/fs.walk": "^1.2.8",
- "@ungap/structured-clone": "^1.2.0",
- "ajv": "^6.12.4",
- "chalk": "^4.0.0",
- "cross-spawn": "^7.0.2",
- "debug": "^4.3.2",
- "doctrine": "^3.0.0",
- "escape-string-regexp": "^4.0.0",
- "eslint-scope": "^7.2.2",
- "eslint-visitor-keys": "^3.4.3",
- "espree": "^9.6.1",
- "esquery": "^1.4.2",
- "esutils": "^2.0.2",
- "fast-deep-equal": "^3.1.3",
- "file-entry-cache": "^6.0.1",
- "find-up": "^5.0.0",
- "glob-parent": "^6.0.2",
- "globals": "^13.19.0",
- "graphemer": "^1.4.0",
- "ignore": "^5.2.0",
- "imurmurhash": "^0.1.4",
- "is-glob": "^4.0.0",
- "is-path-inside": "^3.0.3",
- "js-yaml": "^4.1.0",
- "json-stable-stringify-without-jsonify": "^1.0.1",
- "levn": "^0.4.1",
- "lodash.merge": "^4.6.2",
- "minimatch": "^3.1.2",
- "natural-compare": "^1.4.0",
- "optionator": "^0.9.3",
- "strip-ansi": "^6.0.1",
- "text-table": "^0.2.0"
- },
- "bin": {
- "eslint": "bin/eslint.js"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-scope": {
- "version": "7.2.2",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
- "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
- "dev": true,
- "dependencies": {
- "esrecurse": "^4.3.0",
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-visitor-keys": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
- "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
- "dev": true,
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint/node_modules/@eslint/js": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
- "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
- "dev": true,
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- }
- },
- "node_modules/eslint/node_modules/glob-parent": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
- "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
- "dev": true,
- "dependencies": {
- "is-glob": "^4.0.3"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/eslint/node_modules/globals": {
- "version": "13.24.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
- "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
- "dev": true,
- "dependencies": {
- "type-fest": "^0.20.2"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/espree": {
- "version": "9.6.1",
- "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
- "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
- "dev": true,
- "dependencies": {
- "acorn": "^8.9.0",
- "acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^3.4.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/esquery": {
- "version": "1.5.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz",
- "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==",
- "dev": true,
- "dependencies": {
- "estraverse": "^5.1.0"
- },
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/esrecurse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
- "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
- "dev": true,
- "dependencies": {
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estraverse": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
- "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
- "dev": true,
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/esutils": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
- "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/fast-deep-equal": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "dev": true
- },
- "node_modules/fast-glob": {
- "version": "3.3.2",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
- "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.4"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
- "node_modules/fast-json-stable-stringify": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "dev": true
- },
- "node_modules/fast-levenshtein": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
- "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
- "dev": true
- },
- "node_modules/fastq": {
- "version": "1.17.1",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz",
- "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==",
- "dev": true,
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
- "node_modules/file-entry-cache": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
- "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
- "dev": true,
- "dependencies": {
- "flat-cache": "^3.0.4"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/fill-range": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
- "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/find-up": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
- "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
- "dev": true,
- "dependencies": {
- "locate-path": "^6.0.0",
- "path-exists": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/find-yarn-workspace-root": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz",
- "integrity": "sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==",
- "dependencies": {
- "micromatch": "^4.0.2"
- }
- },
- "node_modules/flat-cache": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
- "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
- "dev": true,
- "dependencies": {
- "flatted": "^3.2.9",
- "keyv": "^4.5.3",
- "rimraf": "^3.0.2"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/flat-cache/node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dev": true,
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/flatted": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
- "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
- "dev": true
- },
- "node_modules/follow-redirects": {
- "version": "1.15.6",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
- "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
- "funding": [
- {
- "type": "individual",
- "url": "https://github.com/sponsors/RubenVerborgh"
- }
- ],
- "engines": {
- "node": ">=4.0"
- },
- "peerDependenciesMeta": {
- "debug": {
- "optional": true
- }
- }
- },
- "node_modules/form-data": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
- "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
- "dependencies": {
- "asynckit": "^0.4.0",
- "combined-stream": "^1.0.8",
- "mime-types": "^2.1.12"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/fs-extra": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz",
- "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==",
- "dependencies": {
- "at-least-node": "^1.0.0",
- "graceful-fs": "^4.2.0",
- "jsonfile": "^6.0.1",
- "universalify": "^2.0.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/fs.realpath": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
- },
- "node_modules/fsevents": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
- "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
- "dev": true,
- "hasInstallScript": true,
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/function-bind": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-intrinsic": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
- "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
- "dependencies": {
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "has-proto": "^1.0.1",
- "has-symbols": "^1.0.3",
- "hasown": "^2.0.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/glob": {
- "version": "7.2.3",
- "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
- "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^3.1.1",
- "once": "^1.3.0",
- "path-is-absolute": "^1.0.0"
- },
- "engines": {
- "node": "*"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/globals": {
- "version": "15.2.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-15.2.0.tgz",
- "integrity": "sha512-FQ5YwCHZM3nCmtb5FzEWwdUc9K5d3V/w9mzcz8iGD1gC/aOTHc6PouYu0kkKipNJqHAT7m51sqzQjEjIP+cK0A==",
- "dev": true,
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/globby": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
- "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
- "dev": true,
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/globby/node_modules/slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/gopd": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
- "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
- "dependencies": {
- "get-intrinsic": "^1.1.3"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/graceful-fs": {
- "version": "4.2.11",
- "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
- "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
- },
- "node_modules/graphemer": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
- "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
- "dev": true
- },
- "node_modules/has-flag": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
- "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/has-property-descriptors": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
- "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
- "dependencies": {
- "es-define-property": "^1.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-proto": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
- "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-symbols": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
- "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hasown": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
- "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
- "dependencies": {
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/ignore": {
- "version": "5.3.1",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz",
- "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==",
- "dev": true,
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/ignore-by-default": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
- "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==",
- "dev": true
- },
- "node_modules/import-fresh": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
- "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==",
- "dev": true,
- "dependencies": {
- "parent-module": "^1.0.0",
- "resolve-from": "^4.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/imurmurhash": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
- "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
- "dev": true,
- "engines": {
- "node": ">=0.8.19"
- }
- },
- "node_modules/inflight": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
- "dependencies": {
- "once": "^1.3.0",
- "wrappy": "1"
- }
- },
- "node_modules/inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
- },
- "node_modules/is-binary-path": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
- "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
- "dev": true,
- "dependencies": {
- "binary-extensions": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-docker": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz",
- "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==",
- "bin": {
- "is-docker": "cli.js"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-glob": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
- "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
- "dev": true,
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
- "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/is-path-inside": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
- "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-wsl": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz",
- "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==",
- "dependencies": {
- "is-docker": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/isarray": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
- "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
- },
- "node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
- "dev": true,
- "dependencies": {
- "argparse": "^2.0.1"
- },
- "bin": {
- "js-yaml": "bin/js-yaml.js"
- }
- },
- "node_modules/json-buffer": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
- "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
- "dev": true
- },
- "node_modules/json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true
- },
- "node_modules/json-stable-stringify": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.1.1.tgz",
- "integrity": "sha512-SU/971Kt5qVQfJpyDveVhQ/vya+5hvrjClFOcr8c0Fq5aODJjMwutrOfCU+eCnVD5gpx1Q3fEqkyom77zH1iIg==",
- "dependencies": {
- "call-bind": "^1.0.5",
- "isarray": "^2.0.5",
- "jsonify": "^0.0.1",
- "object-keys": "^1.1.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/json-stable-stringify-without-jsonify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
- "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
- "dev": true
- },
- "node_modules/jsonfile": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
- "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
- "dependencies": {
- "universalify": "^2.0.0"
- },
- "optionalDependencies": {
- "graceful-fs": "^4.1.6"
- }
- },
- "node_modules/jsonify": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.1.tgz",
- "integrity": "sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg==",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/keyv": {
- "version": "4.5.4",
- "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
- "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
- "dev": true,
- "dependencies": {
- "json-buffer": "3.0.1"
- }
- },
- "node_modules/klaw-sync": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz",
- "integrity": "sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==",
- "dependencies": {
- "graceful-fs": "^4.1.11"
- }
- },
- "node_modules/levn": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
- "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
- "dev": true,
- "dependencies": {
- "prelude-ls": "^1.2.1",
- "type-check": "~0.4.0"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/locate-path": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
- "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
- "dev": true,
- "dependencies": {
- "p-locate": "^5.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lodash.merge": {
- "version": "4.6.2",
- "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
- "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
- "dev": true
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
- "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "dependencies": {
- "mime-db": "1.52.0"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/minimist": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
- "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
- "dev": true
- },
- "node_modules/nan": {
- "version": "2.19.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz",
- "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==",
- "optional": true
- },
- "node_modules/natural-compare": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
- "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
- "dev": true
- },
- "node_modules/nodemon": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.3.tgz",
- "integrity": "sha512-7jH/NXbFPxVaMwmBCC2B9F/V6X1VkEdNgx3iu9jji8WxWcvhMWkmhNWhI5077zknOnZnBzba9hZP6bCPJLSReQ==",
- "dev": true,
- "dependencies": {
- "chokidar": "^3.5.2",
- "debug": "^4",
- "ignore-by-default": "^1.0.1",
- "minimatch": "^3.1.2",
- "pstree.remy": "^1.1.8",
- "semver": "^7.5.3",
- "simple-update-notifier": "^2.0.0",
- "supports-color": "^5.5.0",
- "touch": "^3.1.0",
- "undefsafe": "^2.0.5"
- },
- "bin": {
- "nodemon": "bin/nodemon.js"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/nodemon"
- }
- },
- "node_modules/nopt": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz",
- "integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==",
- "dev": true,
- "dependencies": {
- "abbrev": "1"
- },
- "bin": {
- "nopt": "bin/nopt.js"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/normalize-path": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
- "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/object-keys": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "dependencies": {
- "wrappy": "1"
- }
- },
- "node_modules/open": {
- "version": "7.4.2",
- "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz",
- "integrity": "sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==",
- "dependencies": {
- "is-docker": "^2.0.0",
- "is-wsl": "^2.1.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/optionator": {
- "version": "0.9.4",
- "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
- "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
- "dev": true,
- "dependencies": {
- "deep-is": "^0.1.3",
- "fast-levenshtein": "^2.0.6",
- "levn": "^0.4.1",
- "prelude-ls": "^1.2.1",
- "type-check": "^0.4.0",
- "word-wrap": "^1.2.5"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/os-tmpdir": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
- "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/p-limit": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
- "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
- "dev": true,
- "dependencies": {
- "yocto-queue": "^0.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/p-locate": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
- "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
- "dev": true,
- "dependencies": {
- "p-limit": "^3.0.2"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/parent-module": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
- "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
- "dev": true,
- "dependencies": {
- "callsites": "^3.0.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/patch-package": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/patch-package/-/patch-package-8.0.0.tgz",
- "integrity": "sha512-da8BVIhzjtgScwDJ2TtKsfT5JFWz1hYoBl9rUQ1f38MC2HwnEIkK8VN3dKMKcP7P7bvvgzNDbfNHtx3MsQb5vA==",
- "dependencies": {
- "@yarnpkg/lockfile": "^1.1.0",
- "chalk": "^4.1.2",
- "ci-info": "^3.7.0",
- "cross-spawn": "^7.0.3",
- "find-yarn-workspace-root": "^2.0.0",
- "fs-extra": "^9.0.0",
- "json-stable-stringify": "^1.0.2",
- "klaw-sync": "^6.0.0",
- "minimist": "^1.2.6",
- "open": "^7.4.2",
- "rimraf": "^2.6.3",
- "semver": "^7.5.3",
- "slash": "^2.0.0",
- "tmp": "^0.0.33",
- "yaml": "^2.2.2"
- },
- "bin": {
- "patch-package": "index.js"
- },
- "engines": {
- "node": ">=14",
- "npm": ">5"
- }
- },
- "node_modules/path-exists": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-is-absolute": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/prelude-ls": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
- "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
- "dev": true,
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/proxy-from-env": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
- "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
- },
- "node_modules/pstree.remy": {
- "version": "1.1.8",
- "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
- "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==",
- "dev": true
- },
- "node_modules/punycode": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
- "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
- "dev": true,
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
- "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/readdirp": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
- "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
- "dev": true,
- "dependencies": {
- "picomatch": "^2.2.1"
- },
- "engines": {
- "node": ">=8.10.0"
- }
- },
- "node_modules/resolve-from": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
- "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
- "dev": true,
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/reusify": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
- "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
- "dev": true,
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
- "node_modules/rimraf": {
- "version": "2.7.1",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz",
- "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==",
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- }
- },
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
- "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
- "node_modules/safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
- },
- "node_modules/semver": {
- "version": "7.6.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
- "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/set-function-length": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
- "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
- "dependencies": {
- "define-data-property": "^1.1.4",
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "get-intrinsic": "^1.2.4",
- "gopd": "^1.0.1",
- "has-property-descriptors": "^1.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/simple-update-notifier": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
- "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
- "dev": true,
- "dependencies": {
- "semver": "^7.5.3"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/slash": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz",
- "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
- "hasInstallScript": true,
- "dependencies": {
- "asn1": "^0.2.6",
- "bcrypt-pbkdf": "^1.0.2"
- },
- "engines": {
- "node": ">=10.16.0"
- },
- "optionalDependencies": {
- "cpu-features": "~0.0.9",
- "nan": "^2.18.0"
- }
- },
- "node_modules/string-argv": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz",
- "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==",
- "engines": {
- "node": ">=0.6.19"
- }
- },
- "node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-json-comments": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
- "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
- "dev": true,
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/supports-color": {
- "version": "5.5.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
- "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
- "dev": true,
- "dependencies": {
- "has-flag": "^3.0.0"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/text-table": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
- "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
- "dev": true
- },
- "node_modules/tmp": {
- "version": "0.0.33",
- "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
- "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
- "dependencies": {
- "os-tmpdir": "~1.0.2"
- },
- "engines": {
- "node": ">=0.6.0"
- }
- },
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
- "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/touch": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz",
- "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==",
- "dev": true,
- "dependencies": {
- "nopt": "~1.0.10"
- },
- "bin": {
- "nodetouch": "bin/nodetouch.js"
- }
- },
- "node_modules/ts-api-utils": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz",
- "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==",
- "dev": true,
- "engines": {
- "node": ">=16"
- },
- "peerDependencies": {
- "typescript": ">=4.2.0"
- }
- },
- "node_modules/tweetnacl": {
- "version": "0.14.5",
- "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
- "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
- },
- "node_modules/type-check": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
- "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
- "dev": true,
- "dependencies": {
- "prelude-ls": "^1.2.1"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/type-fest": {
- "version": "0.20.2",
- "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
- "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/typescript": {
- "version": "5.3.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz",
- "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==",
- "dev": true,
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/typescript-eslint": {
- "version": "7.8.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-7.8.0.tgz",
- "integrity": "sha512-sheFG+/D8N/L7gC3WT0Q8sB97Nm573Yfr+vZFzl/4nBdYcmviBPtwGSX9TJ7wpVg28ocerKVOt+k2eGmHzcgVA==",
- "dev": true,
- "dependencies": {
- "@typescript-eslint/eslint-plugin": "7.8.0",
- "@typescript-eslint/parser": "7.8.0",
- "@typescript-eslint/utils": "7.8.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/undefsafe": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
- "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
- "dev": true
- },
- "node_modules/undici-types": {
- "version": "5.26.5",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
- "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
- "dev": true
- },
- "node_modules/universalify": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
- "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
- "engines": {
- "node": ">= 10.0.0"
- }
- },
- "node_modules/uri-js": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dev": true,
- "dependencies": {
- "punycode": "^2.1.0"
- }
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/word-wrap": {
- "version": "1.2.5",
- "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
- "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
- },
- "node_modules/yaml": {
- "version": "2.4.2",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz",
- "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==",
- "bin": {
- "yaml": "bin.mjs"
- },
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/yocto-queue": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
- "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
- "dev": true,
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- }
- }
-}
diff --git a/lom/package.json b/lom/package.json
deleted file mode 100644
index 7ee6d83..0000000
--- a/lom/package.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "name": "nextnet-lom",
- "version": "1.1.2",
- "description": "Lights Out Management, NextNet style",
- "main": "index.js",
- "type": "module",
- "scripts": {
- "test": "echo \"Error: no test specified\" && exit 1",
- "build": "tsc",
- "start": "cd out && node --enable-source-maps index.js",
- "dev": "nodemon --watch src --ext ts,js,mjs,json --exec \"tsc && cd out && node --enable-source-maps index.js\""
- },
- "keywords": [],
- "author": "greysoh",
- "license": "BSD-3-Clause",
- "devDependencies": {
- "@eslint/js": "^9.2.0",
- "@types/node": "^20.12.8",
- "@types/ssh2": "^1.15.0",
- "@types/yargs": "^17.0.32",
- "eslint": "^8.57.0",
- "globals": "^15.2.0",
- "nodemon": "^3.0.3",
- "typescript": "^5.3.3",
- "typescript-eslint": "^7.8.0"
- },
- "dependencies": {
- "axios": "^1.7.4",
- "commander": "^12.0.0",
- "patch-package": "^8.0.0",
- "ssh2": "^1.15.0",
- "string-argv": "^0.3.2"
- }
-}
diff --git a/lom/src/commands.ts b/lom/src/commands.ts
deleted file mode 100644
index bb06314..0000000
--- a/lom/src/commands.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-import type { Axios } from "axios";
-
-import { run as connection } from "./commands/connections.js";
-import { run as backends } from "./commands/backends.js";
-import { run as users } from "./commands/users.js";
-
-export type PrintLine = (...str: unknown[]) => void;
-export type KeyboardRead = (disableEcho?: boolean) => Promise;
-
-type Command = (
- args: string[],
- println: PrintLine,
- axios: Axios,
- apiKey: string,
- keyboardRead: KeyboardRead,
-) => Promise;
-
-type Commands = {
- name: string;
- description: string;
- run: Command;
-}[];
-
-export const commands: Commands = [
- {
- name: "help",
- description: "Prints help",
- async run(_args: string[], printf: PrintLine) {
- commands.forEach(command => {
- printf(`${command.name}: ${command.description}\n`);
- });
-
- printf(
- "\nRun a command of your choosing with --help to see more options.\n",
- );
- },
- },
- {
- name: "clear",
- description: "Clears screen",
- async run(_args: string[], printf: PrintLine) {
- printf("\x1B[2J\x1B[3J\x1B[H");
- },
- },
- {
- name: "conn",
- description: "Manages connections for NextNet",
- run: connection,
- },
- {
- name: "user",
- description: "Manages users for NextNet",
- run: users,
- },
- {
- name: "backend",
- description: "Manages backends for NextNet",
- run: backends,
- },
- {
- name: "back",
- description: "(alias) Manages backends for NextNet",
- run: backends,
- },
-];
diff --git a/lom/src/commands/backends.ts b/lom/src/commands/backends.ts
deleted file mode 100644
index 0930bbb..0000000
--- a/lom/src/commands/backends.ts
+++ /dev/null
@@ -1,519 +0,0 @@
-import type { Axios } from "axios";
-
-import { SSHCommand } from "../libs/patchCommander.js";
-import type { PrintLine, KeyboardRead } from "../commands.js";
-
-type BackendLookupSuccess = {
- success: boolean;
- data: {
- id: number;
-
- name: string;
- description: string;
- backend: string;
- connectionDetails?: string;
- logs: string[];
- }[];
-};
-
-const addRequiredOptions = {
- ssh: ["sshKey", "username", "host"],
-
- passyfire: ["host"],
-};
-
-export async function run(
- argv: string[],
- println: PrintLine,
- axios: Axios,
- token: string,
- readKeyboard: KeyboardRead,
-) {
- const program = new SSHCommand(println);
- program.description("Manages backends for NextNet");
- program.version("v1.0.0");
-
- const addBackend = new SSHCommand(println, "add");
-
- addBackend.description("Adds a backend");
- addBackend.argument("", "Name of the backend");
-
- addBackend.argument(
- "",
- "Provider of the backend (ex. passyfire, ssh)",
- );
-
- addBackend.option(
- "-d, --description ",
- "Description for the backend",
- );
-
- addBackend.option(
- "-f, --force-custom-parameters",
- "If turned on, this forces you to use custom parameters",
- );
-
- addBackend.option(
- "-c, --custom-parameters ",
- "Custom parameters. Use this if the backend you're using isn't native to SSH yet, or if you manually turn on -f.",
- );
-
- // SSH provider
- addBackend.option(
- "-k, --ssh-key ",
- "(SSH) SSH private key to use to authenticate with the server",
- );
-
- addBackend.option(
- "-u, --username ",
- "(SSH, PassyFire) Username to authenticate with. With PassyFire, it's the username you create",
- );
-
- addBackend.option(
- "-h, --host ",
- "(SSH, PassyFire) Host to connect to. With PassyFire, it's what you listen on",
- );
-
- // PassyFire provider
- addBackend.option(
- "-pe, --is-proxied",
- "(PassyFire) Specify if you're behind a proxy or not so we can get the right IP",
- );
-
- addBackend.option(
- "-pp, --proxied-port ",
- "(PassyFire) If you're behind a proxy, and the port is different, specify the port to return",
- );
-
- addBackend.option("-g, --guest", "(PassyFire) Enable the guest user");
-
- addBackend.option(
- "-ua, --user-ask",
- "(PassyFire) Ask what users you want to create",
- );
-
- addBackend.option(
- "-p, --password ",
- "(PassyFire) What password you want to use for the primary user",
- );
-
- addBackend.action(
- async (
- name: string,
- provider: string,
- options: {
- description?: string;
- forceCustomParameters?: boolean;
- customParameters?: string;
-
- // SSH (mostly)
- sshKey?: string;
- username?: string;
- host?: string;
-
- // PassyFire (mostly)
- isProxied?: boolean;
- proxiedPort?: string;
- guest?: boolean;
- userAsk?: boolean;
- password?: string;
- },
- ) => {
- // @ts-expect-error: Yes it can index for what we need it to do.
- const isUnsupportedPlatform: boolean = !addRequiredOptions[provider];
-
- if (isUnsupportedPlatform) {
- println(
- "WARNING: Platform is not natively supported by the LOM yet!\n",
- );
- }
-
- let connectionDetails: string = "";
-
- if (options.forceCustomParameters || isUnsupportedPlatform) {
- if (typeof options.customParameters != "string") {
- return println(
- "ERROR: You are missing the custom parameters option!\n",
- );
- }
-
- connectionDetails = options.customParameters;
- } else if (provider == "ssh") {
- for (const argument of addRequiredOptions["ssh"]) {
- // @ts-expect-error: No.
- const hasArgument = options[argument];
-
- if (!hasArgument) {
- return println("ERROR: Missing argument '%s'\n", argument);
- }
- }
-
- const unstringifiedArguments: {
- ip?: string;
- port?: number;
- username?: string;
- privateKey?: string;
- } = {};
-
- if (options.host) {
- const sourceSplit: string[] = options.host.split(":");
-
- const sourceIP: string = sourceSplit[0];
- const sourcePort: number =
- sourceSplit.length >= 2 ? parseInt(sourceSplit[1]) : 22;
-
- unstringifiedArguments.ip = sourceIP;
- unstringifiedArguments.port = sourcePort;
- }
-
- unstringifiedArguments.username = options.username;
- unstringifiedArguments.privateKey = options.sshKey?.replaceAll(
- "\\n",
- "\n",
- );
-
- connectionDetails = JSON.stringify(unstringifiedArguments);
- } else if (provider == "passyfire") {
- for (const argument of addRequiredOptions["passyfire"]) {
- // @ts-expect-error: No.
- const hasArgument = options[argument];
-
- if (!hasArgument) {
- return println("ERROR: Missing argument '%s'\n", argument);
- }
- }
-
- const unstringifiedArguments: {
- ip?: string;
- port?: number;
- publicPort?: number;
- isProxied?: boolean;
- users: {
- username: string;
- password: string;
- }[];
- } = {
- users: [],
- };
-
- if (options.guest) {
- unstringifiedArguments.users.push({
- username: "guest",
- password: "guest",
- });
- }
-
- if (options.username) {
- if (!options.password) {
- return println("Password must not be left blank\n");
- }
-
- unstringifiedArguments.users.push({
- username: options.username,
- password: options.password,
- });
- }
-
- if (options.userAsk) {
- let shouldContinueAsking: boolean = true;
-
- while (shouldContinueAsking) {
- println("Creating a user.\nUsername: ");
- const username = await readKeyboard();
-
- let passwordConfirmOne = "a";
- let passwordConfirmTwo = "b";
-
- println("\n");
-
- while (passwordConfirmOne != passwordConfirmTwo) {
- println("Password: ");
- passwordConfirmOne = await readKeyboard(true);
-
- println("\nConfirm password: ");
- passwordConfirmTwo = await readKeyboard(true);
-
- println("\n");
-
- if (passwordConfirmOne != passwordConfirmTwo) {
- println("Passwords do not match! Try again.\n\n");
- }
- }
-
- unstringifiedArguments.users.push({
- username,
- password: passwordConfirmOne,
- });
-
- println("\nShould we continue creating users? (y/n) ");
- shouldContinueAsking = (await readKeyboard())
- .toLowerCase()
- .trim()
- .startsWith("y");
-
- println("\n\n");
- }
- }
-
- if (unstringifiedArguments.users.length == 0) {
- return println(
- "No users will be created with your current arguments! You must have users set up.\n",
- );
- }
-
- unstringifiedArguments.isProxied = Boolean(options.isProxied);
-
- if (options.proxiedPort) {
- unstringifiedArguments.publicPort = parseInt(
- options.proxiedPort ?? "",
- );
-
- if (Number.isNaN(unstringifiedArguments.publicPort)) {
- println("UID (%s) is not a number.\n", options.proxiedPort);
- return;
- }
- }
-
- if (options.host) {
- const sourceSplit: string[] = options.host.split(":");
-
- if (sourceSplit.length != 2) {
- return println(
- "Source could not be splitted down (are you missing the ':' in the source to specify port?)\n",
- );
- }
-
- const sourceIP: string = sourceSplit[0];
- const sourcePort: number = parseInt(sourceSplit[1]);
-
- if (Number.isNaN(sourcePort)) {
- println("UID (%s) is not a number.\n", sourcePort);
- return;
- }
-
- unstringifiedArguments.ip = sourceIP;
- unstringifiedArguments.port = sourcePort;
- }
-
- connectionDetails = JSON.stringify(unstringifiedArguments);
- }
-
- const response = await axios.post("/api/v1/backends/create", {
- token,
-
- name,
- description: options.description,
- backend: provider,
-
- connectionDetails,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error creating a backend!\n");
- }
-
- return;
- }
-
- println("Successfully created the backend.\n");
- },
- );
-
- const removeBackend = new SSHCommand(println, "rm");
- removeBackend.description("Removes a backend");
- removeBackend.argument("", "ID of the backend");
-
- removeBackend.action(async (idStr: string) => {
- const id: number = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number.\n", idStr);
- return;
- }
-
- const response = await axios.post("/api/v1/backends/remove", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error deleting backend!\n");
- }
-
- return;
- }
-
- println("Backend has been successfully deleted.\n");
- });
-
- const lookupBackend = new SSHCommand(println, "find");
- lookupBackend.description("Looks up a backend based on your arguments");
-
- lookupBackend.option("-n, --name ", "Name of the backend");
-
- lookupBackend.option(
- "-p, --provider ",
- "Provider of the backend (ex. passyfire, ssh)",
- );
-
- lookupBackend.option(
- "-d, --description ",
- "Description for the backend",
- );
-
- lookupBackend.option(
- "-e, --parse-connection-details",
- "If specified, we automatically parse the connection details to make them human readable, if standard JSON.",
- );
-
- lookupBackend.action(
- async (options: {
- name?: string;
- provider?: string;
- description?: string;
- parseConnectionDetails?: boolean;
- }) => {
- const response = await axios.post("/api/v1/backends/lookup", {
- token,
-
- name: options.name,
- description: options.description,
-
- backend: options.provider,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error looking up backends!\n");
- }
-
- return;
- }
-
- const { data }: BackendLookupSuccess = response.data;
-
- for (const backend of data) {
- println("ID: %s:\n", backend.id);
- println(" - Name: %s\n", backend.name);
- println(" - Description: %s\n", backend.description);
- println(" - Using Backend: %s\n", backend.backend);
-
- if (backend.connectionDetails) {
- if (options.parseConnectionDetails) {
- // We don't know what we're recieving. We just try to parse it (hence the any type)
- // {} is more accurate but TS yells at us if we do that :(
-
- // eslint-disable-next-line
- let parsedJSONData: any | undefined;
-
- try {
- parsedJSONData = JSON.parse(backend.connectionDetails);
- } catch (e) {
- println(" - Connection Details: %s\n", backend.connectionDetails);
- continue;
- }
-
- if (!parsedJSONData) {
- // Not really an assertion but I don't care right now
- println(
- "Assertion failed: parsedJSONData should not be undefined\n",
- );
- continue;
- }
-
- println(" - Connection details:\n");
-
- for (const key of Object.keys(parsedJSONData)) {
- let value: string | number = parsedJSONData[key];
-
- if (typeof value == "string") {
- value = value.replaceAll("\n", "\n" + " ".repeat(16));
- }
-
- if (typeof value == "object") {
- // TODO: implement?
- value = JSON.stringify(value);
- }
-
- println(" - %s: %s\n", key, value);
- }
- } else {
- println(" - Connection Details: %s\n", backend.connectionDetails);
- }
- }
-
- println("\n");
- }
-
- println("%s backends found.\n", data.length);
- },
- );
-
- const logsCommand = new SSHCommand(println, "logs");
- logsCommand.description("View logs for a backend");
- logsCommand.argument("", "ID of the backend");
-
- logsCommand.action(async (idStr: string) => {
- const id: number = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number.\n", idStr);
- return;
- }
-
- const response = await axios.post("/api/v1/backends/lookup", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error getting logs!\n");
- }
-
- return;
- }
-
- const { data }: BackendLookupSuccess = response.data;
- const ourBackend = data.find(i => i.id == id);
-
- if (!ourBackend) return println("Could not find the backend!\n");
- ourBackend.logs.forEach(log => println("%s\n", log));
- });
-
- program.addCommand(addBackend);
- program.addCommand(removeBackend);
- program.addCommand(lookupBackend);
- program.addCommand(logsCommand);
-
- program.parse(argv);
-
- // It would make sense to check this, then parse argv, however this causes issues with
- // the application name not displaying correctly.
-
- if (argv.length == 1) {
- println("No arguments specified!\n\n");
- program.help();
- return;
- }
-
- await new Promise(resolve => program.onExit(resolve));
-}
diff --git a/lom/src/commands/connections.ts b/lom/src/commands/connections.ts
deleted file mode 100644
index 29f4eb6..0000000
--- a/lom/src/commands/connections.ts
+++ /dev/null
@@ -1,504 +0,0 @@
-import type { Axios } from "axios";
-
-import { SSHCommand } from "../libs/patchCommander.js";
-import type { PrintLine } from "../commands.js";
-
-// https://stackoverflow.com/questions/37938504/what-is-the-best-way-to-find-all-items-are-deleted-inserted-from-original-arra
-function difference(a: any[], b: any[]) {
- return a.filter(x => b.indexOf(x) < 0);
-}
-
-type InboundConnectionSuccess = {
- success: true;
- data: {
- ip: string;
- port: number;
-
- connectionDetails: {
- sourceIP: string;
- sourcePort: number;
- destPort: number;
- enabled: boolean;
- };
- }[];
-};
-
-type LookupCommandSuccess = {
- success: true;
- data: {
- id: number;
- name: string;
- description: string;
- sourceIP: string;
- sourcePort: number;
- destPort: number;
- providerID: number;
- autoStart: boolean;
- }[];
-};
-
-export async function run(
- argv: string[],
- println: PrintLine,
- axios: Axios,
- token: string,
-) {
- if (argv.length == 1)
- return println(
- "error: no arguments specified! run %s --help to see commands.\n",
- argv[0],
- );
-
- const program = new SSHCommand(println);
- program.description("Manages connections for NextNet");
- program.version("v1.0.0");
-
- const addCommand = new SSHCommand(println, "add");
- addCommand.description("Creates a new connection");
-
- addCommand.argument(
- "",
- "The backend ID to use. Can be fetched by the command 'backend search'",
- );
-
- addCommand.argument("", "The name for the tunnel");
- addCommand.argument("", "The protocol to use. Either TCP or UDP");
-
- addCommand.argument(
- "",
- "Source IP and port combo (ex. '192.168.0.63:25565'",
- );
-
- addCommand.argument("", "Destination port to use");
- addCommand.option("-d, --description", "Description for the tunnel");
-
- addCommand.action(
- async (
- providerIDStr: string,
- name: string,
- protocolRaw: string,
- source: string,
- destPortRaw: string,
- options: {
- description?: string;
- },
- ) => {
- const providerID = parseInt(providerIDStr);
-
- if (Number.isNaN(providerID)) {
- println("ID (%s) is not a number\n", providerIDStr);
- return;
- }
-
- const protocol = protocolRaw.toLowerCase().trim();
-
- if (protocol != "tcp" && protocol != "udp") {
- return println("Protocol is not a valid option (not tcp or udp)\n");
- }
-
- const sourceSplit: string[] = source.split(":");
-
- if (sourceSplit.length != 2) {
- return println(
- "Source could not be splitted down (are you missing the ':' in the source to specify port?)\n",
- );
- }
-
- const sourceIP: string = sourceSplit[0];
- const sourcePort: number = parseInt(sourceSplit[1]);
-
- if (Number.isNaN(sourcePort)) {
- return println("Port splitted is not a number\n");
- }
-
- const destinationPort: number = parseInt(destPortRaw);
-
- if (Number.isNaN(destinationPort)) {
- return println("Destination port could not be parsed into a number\n");
- }
-
- const response = await axios.post("/api/v1/forward/create", {
- token,
-
- name,
- description: options.description,
-
- protocol,
-
- sourceIP,
- sourcePort,
-
- destinationPort,
-
- providerID,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error creating a connection!\n");
- }
-
- return;
- }
-
- println("Successfully created connection.\n");
- },
- );
-
- const lookupCommand = new SSHCommand(println, "find");
-
- lookupCommand.description(
- "Looks up all connections based on the arguments you specify",
- );
-
- lookupCommand.option(
- "-b, --backend-id ",
- "The backend ID to use. Can be fetched by 'back find'",
- );
-
- lookupCommand.option("-n, --name ", "The name for the tunnel");
-
- lookupCommand.option(
- "-p, --protocol ",
- "The protocol to use. Either TCP or UDP",
- );
-
- lookupCommand.option(
- "-s , --source",
- "Source IP and port combo (ex. '192.168.0.63:25565'",
- );
-
- lookupCommand.option("-d, --dest-port ", "Destination port to use");
-
- lookupCommand.option(
- "-o, --description ",
- "Description for the tunnel",
- );
-
- lookupCommand.action(
- async (options: {
- backendId?: string;
- destPort?: string;
- name?: string;
- protocol?: string;
- source?: string;
- description?: string;
- }) => {
- let numberBackendID: number | undefined;
-
- let sourceIP: string | undefined;
- let sourcePort: number | undefined;
-
- let destPort: number | undefined;
-
- if (options.backendId) {
- numberBackendID = parseInt(options.backendId);
-
- if (Number.isNaN(numberBackendID)) {
- println("ID (%s) is not a number\n", options.backendId);
- return;
- }
- }
-
- if (options.source) {
- const sourceSplit: string[] = options.source.split(":");
-
- if (sourceSplit.length != 2) {
- return println(
- "Source could not be splitted down (are you missing the ':' in the source to specify port?)\n",
- );
- }
-
- sourceIP = sourceSplit[0];
- sourcePort = parseInt(sourceSplit[1]);
-
- if (Number.isNaN(sourcePort)) {
- return println("Port splitted is not a number\n");
- }
- }
-
- if (options.destPort) {
- destPort = parseInt(options.destPort);
-
- if (Number.isNaN(destPort)) {
- println("ID (%s) is not a number\n", options.destPort);
- return;
- }
- }
-
- const response = await axios.post("/api/v1/forward/lookup", {
- token,
-
- name: options.name,
- description: options.description,
-
- protocol: options.protocol,
-
- sourceIP,
- sourcePort,
-
- destinationPort: destPort,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error requesting connections!\n");
- }
-
- return;
- }
-
- const { data }: LookupCommandSuccess = response.data;
-
- for (const connection of data) {
- println(
- "ID: %s%s:\n",
- connection.id,
- connection.autoStart ? " (automatically starts)" : "",
- );
- println(" - Backend ID: %s\n", connection.providerID);
- println(" - Name: %s\n", connection.name);
- if (connection.description)
- println(" - Description: %s\n", connection.description);
- println(
- " - Source: %s:%s\n",
- connection.sourceIP,
- connection.sourcePort,
- );
- println(" - Destination port: %s\n", connection.destPort);
-
- println("\n");
- }
-
- println("%s connections found.\n", data.length);
- },
- );
-
- const startTunnel = new SSHCommand(println, "start");
- startTunnel.description("Starts a tunnel");
- startTunnel.argument("", "Tunnel ID to start");
-
- startTunnel.action(async (idStr: string) => {
- const id = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number\n", idStr);
- return;
- }
-
- const response = await axios.post("/api/v1/forward/start", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error starting the connection!\n");
- }
-
- return;
- }
-
- println("Successfully started tunnel.\n");
- return;
- });
-
- const stopTunnel = new SSHCommand(println, "stop");
- stopTunnel.description("Stops a tunnel");
- stopTunnel.argument("", "Tunnel ID to stop");
-
- stopTunnel.action(async (idStr: string) => {
- const id = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number\n", idStr);
- return;
- }
-
- const response = await axios.post("/api/v1/forward/stop", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error stopping a connection!\n");
- }
-
- return;
- }
-
- println("Successfully stopped tunnel.\n");
- });
-
- const getInbound = new SSHCommand(println, "get-inbound");
- getInbound.description("Shows all current connections");
- getInbound.argument("", "Tunnel ID to view inbound connections of");
- getInbound.option("-t, --tail", "Live-view of connection list");
- getInbound.option(
- "-s, --tail-pull-rate ",
- "Controls the speed to pull at (in ms)",
- );
-
- getInbound.action(
- async (
- idStr: string,
- options: {
- tail?: boolean;
- tailPullRate?: string;
- },
- ): Promise => {
- const pullRate: number = options.tailPullRate
- ? parseInt(options.tailPullRate)
- : 2000;
- const id = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number\n", idStr);
- return;
- }
-
- if (Number.isNaN(pullRate)) {
- println("Pull rate is not a number\n");
- return;
- }
-
- if (options.tail) {
- let previousEntries: string[] = [];
-
- // FIXME?
- // eslint-disable-next-line no-constant-condition
- while (true) {
- const response = await axios.post("/api/v1/forward/connections", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error requesting inbound connections!\n");
- }
-
- return;
- }
-
- const { data }: InboundConnectionSuccess = response.data;
- const simplifiedArray: string[] = data.map(i => `${i.ip}:${i.port}`);
-
- const insertedItems: string[] = difference(
- simplifiedArray,
- previousEntries,
- );
-
- const removedItems: string[] = difference(
- previousEntries,
- simplifiedArray,
- );
-
- insertedItems.forEach(i => println("CONNECTED: %s\n", i));
- removedItems.forEach(i => println("DISCONNECTED: %s\n", i));
-
- previousEntries = simplifiedArray;
-
- await new Promise(i => setTimeout(i, pullRate));
- }
- } else {
- const response = await axios.post("/api/v1/forward/connections", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error requesting connections!\n");
- }
-
- return;
- }
-
- const { data }: InboundConnectionSuccess = response.data;
-
- if (data.length == 0) {
- println("There are currently no connected clients.\n");
- return;
- }
-
- println(
- "Connected clients (for source: %s:%s):\n",
- data[0].connectionDetails.sourceIP,
- data[0].connectionDetails.sourcePort,
- );
-
- for (const entry of data) {
- println(" - %s:%s\n", entry.ip, entry.port);
- }
- }
- },
- );
-
- const removeTunnel = new SSHCommand(println, "rm");
- removeTunnel.description("Removes a tunnel");
- removeTunnel.argument("", "Tunnel ID to remove");
-
- removeTunnel.action(async (idStr: string) => {
- const id = parseInt(idStr);
-
- if (Number.isNaN(id)) {
- println("ID (%s) is not a number\n", idStr);
- return;
- }
-
- const response = await axios.post("/api/v1/forward/remove", {
- token,
- id,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error deleting connection!\n");
- }
-
- return;
- }
-
- println("Successfully deleted connection.\n");
- });
-
- program.addCommand(addCommand);
- program.addCommand(lookupCommand);
- program.addCommand(startTunnel);
- program.addCommand(stopTunnel);
- program.addCommand(getInbound);
- program.addCommand(removeTunnel);
-
- program.parse(argv);
- await new Promise(resolve => program.onExit(resolve));
-}
diff --git a/lom/src/commands/users.ts b/lom/src/commands/users.ts
deleted file mode 100644
index 2c9452b..0000000
--- a/lom/src/commands/users.ts
+++ /dev/null
@@ -1,215 +0,0 @@
-import type { Axios } from "axios";
-
-import { SSHCommand } from "../libs/patchCommander.js";
-import type { PrintLine, KeyboardRead } from "../commands.js";
-
-type UserLookupSuccess = {
- success: true;
- data: {
- id: number;
- isServiceAccount: boolean;
- username: string;
- name: string;
- email: string;
- }[];
-};
-
-export async function run(
- argv: string[],
- println: PrintLine,
- axios: Axios,
- apiKey: string,
- readKeyboard: KeyboardRead,
-) {
- if (argv.length == 1)
- return println(
- "error: no arguments specified! run %s --help to see commands.\n",
- argv[0],
- );
-
- const program = new SSHCommand(println);
- program.description("Manages users for NextNet");
- program.version("v1.0.0");
-
- const addCommand = new SSHCommand(println, "add");
- addCommand.description("Create a new user");
- addCommand.argument("", "Username of new user");
- addCommand.argument("", "Email of new user");
- addCommand.argument("[name]", "Name of new user (defaults to username)");
-
- addCommand.option("-p, --password", "Password of User");
- addCommand.option(
- "-a, --ask-password, --ask-pass, --askpass",
- "Asks for a password. Hides output",
- );
-
- addCommand.option(
- "-s, --service-account, --service",
- "Turns the user into a service account",
- );
-
- addCommand.action(
- async (
- username: string,
- email: string,
- name: string,
- options: {
- password?: string;
- askPassword?: boolean;
- isServiceAccount?: boolean;
- },
- ) => {
- if (!options.password && !options.askPassword) {
- println("No password supplied, and askpass has not been supplied.\n");
- return;
- }
-
- let password: string = "";
-
- if (options.askPassword) {
- let passwordConfirmOne = "a";
- let passwordConfirmTwo = "b";
-
- while (passwordConfirmOne != passwordConfirmTwo) {
- println("Password: ");
- passwordConfirmOne = await readKeyboard(true);
-
- println("\nConfirm password: ");
- passwordConfirmTwo = await readKeyboard(true);
-
- println("\n");
-
- if (passwordConfirmOne != passwordConfirmTwo) {
- println("Passwords do not match! Try again.\n\n");
- }
- }
-
- password = passwordConfirmOne;
- } else {
- // @ts-expect-error: From the first check we do, we know this is safe (you MUST specify a password)
- password = options.password;
- }
-
- const response = await axios.post("/api/v1/users/create", {
- name,
- username,
- email,
- password,
-
- allowUnsafeGlobalTokens: options.isServiceAccount,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error creating users!\n");
- }
-
- return;
- }
-
- println("User created successfully.\nToken: %s\n", response.data.token);
- },
- );
-
- const removeCommand = new SSHCommand(println, "rm");
- removeCommand.description("Remove a user");
- removeCommand.argument("", "ID of user to remove");
-
- removeCommand.action(async (uidStr: string) => {
- const uid = parseInt(uidStr);
-
- if (Number.isNaN(uid)) {
- println("UID (%s) is not a number.\n", uid);
- return;
- }
-
- const response = await axios.post("/api/v1/users/remove", {
- token: apiKey,
- uid,
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error deleting user!\n");
- }
-
- return;
- }
-
- println("User has been successfully deleted.\n");
- });
-
- const lookupCommand = new SSHCommand(println, "find");
- lookupCommand.description("Find a user");
- lookupCommand.option("-i, --id ", "UID of User");
- lookupCommand.option("-n, --name ", "Name of User");
- lookupCommand.option("-u, --username ", "Username of User");
- lookupCommand.option("-e, --email ", "Email of User");
- lookupCommand.option("-s, --service", "The user is a service account");
-
- lookupCommand.action(async options => {
- // FIXME: redundant parseInt calls
-
- if (options.id) {
- const uid = parseInt(options.id);
-
- if (Number.isNaN(uid)) {
- println("UID (%s) is not a number.\n", uid);
- return;
- }
- }
-
- const response = await axios.post("/api/v1/users/lookup", {
- token: apiKey,
- id: options.id ? parseInt(options.id) : undefined,
- name: options.name,
- username: options.username,
- email: options.email,
- service: Boolean(options.service),
- });
-
- if (response.status != 200) {
- if (process.env.NODE_ENV != "production") console.log(response);
-
- if (response.data.error) {
- println(`Error: ${response.data.error}\n`);
- } else {
- println("Error finding users!\n");
- }
-
- return;
- }
-
- const { data }: UserLookupSuccess = response.data;
-
- for (const user of data) {
- println(
- "UID: %s%s:\n",
- user.id,
- user.isServiceAccount ? " (service)" : "",
- );
- println("- Username: %s\n", user.username);
- println("- Name: %s\n", user.name);
- println("- Email: %s\n", user.email);
-
- println("\n");
- }
-
- println("%s users found.\n", data.length);
- });
-
- program.addCommand(addCommand);
- program.addCommand(removeCommand);
- program.addCommand(lookupCommand);
-
- program.parse(argv);
- await new Promise(resolve => program.onExit(resolve));
-}
diff --git a/lom/src/copyID.ts b/lom/src/copyID.ts
deleted file mode 100644
index c306db4..0000000
--- a/lom/src/copyID.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-import { writeFile } from "node:fs/promises";
-import ssh2 from "ssh2";
-
-import { readFromKeyboard } from "./libs/readFromKeyboard.js";
-import type { ClientKeys } from "./index.js";
-
-export async function runCopyID(
- username: string,
- password: string,
- keys: ClientKeys,
- stream: ssh2.ServerChannel,
-) {
- stream.write(
- "Hey there! I think you're using ssh-copy-id. If this is an error, you may close this terminal.\n",
- );
-
- stream.write("Please wait...\n");
-
- const keyData = await readFromKeyboard(stream, true);
- stream.write("Parsing key...\n");
-
- const parsedKey = ssh2.utils.parseKey(keyData);
-
- if (parsedKey instanceof Error) {
- stream.write(parsedKey.message + "\n");
- return stream.close();
- }
-
- stream.write("Passed checks. Writing changes...\n");
-
- keys.push({
- username,
- password,
- publicKey: keyData,
- });
-
- try {
- await writeFile("../keys/clients.json", JSON.stringify(keys, null, 2));
- } catch (e) {
- console.log(e);
- return stream.write(
- "ERROR: Failed to save changes! If you're the administrator, view the console for details.\n",
- );
- }
-
- stream.write("Success!\n");
- return stream.close();
-}
diff --git a/lom/src/index.ts b/lom/src/index.ts
deleted file mode 100644
index c2d93ef..0000000
--- a/lom/src/index.ts
+++ /dev/null
@@ -1,242 +0,0 @@
-import { readFile, writeFile, mkdir } from "node:fs/promises";
-import { timingSafeEqual } from "node:crypto";
-import { format } from "node:util";
-
-import parseArgsStringToArgv from "string-argv";
-import baseAxios from "axios";
-import ssh2 from "ssh2";
-
-import { readFromKeyboard } from "./libs/readFromKeyboard.js";
-import { commands } from "./commands.js";
-import { runCopyID } from "./copyID.js";
-
-export type ClientKeys = {
- publicKey: string;
- username: string;
- password: string;
-}[];
-
-function checkValue(input: Buffer, allowed: Buffer): boolean {
- const autoReject = input.length !== allowed.length;
- if (autoReject) allowed = input;
-
- const isMatch = timingSafeEqual(input, allowed);
- return !autoReject && isMatch;
-}
-
-let serverKeyFile: Buffer | string | undefined;
-let clientKeys: ClientKeys = [];
-
-const serverBaseURL: string =
- process.env.SERVER_BASE_URL ?? "http://127.0.0.1:3000/";
-
-const axios = baseAxios.create({
- baseURL: serverBaseURL,
- validateStatus: () => true,
-});
-
-try {
- clientKeys = JSON.parse(await readFile("../keys/clients.json", "utf8"));
-} catch (e) {
- console.log("INFO: We don't have the client key file.");
-}
-
-try {
- serverKeyFile = await readFile("../keys/host.key");
-} catch (e) {
- console.log(
- "ERROR: Failed to read the host key file! Creating new keypair...",
- );
- await mkdir("../keys").catch(() => null);
-
- const keyPair: { private: string; public: string } = await new Promise(
- resolve =>
- ssh2.utils.generateKeyPair("ed25519", (err, keyPair) => resolve(keyPair)),
- );
-
- await writeFile("../keys/host.key", keyPair.private);
- await writeFile("../keys/host.pub", keyPair.public);
-
- serverKeyFile = keyPair.private;
-}
-
-if (!serverKeyFile) throw new Error("Somehow failed to fetch the key file!");
-
-const server: ssh2.Server = new ssh2.Server({
- hostKeys: [serverKeyFile],
- banner: "NextNet-LOM (c) NextNet project et al.",
-});
-
-server.on("connection", client => {
- let token: string = "";
-
- let username: string = "";
- let password: string = "";
-
- client.on("authentication", async auth => {
- if (auth.method == "password") {
- const response = await axios.post("/api/v1/users/login", {
- username: auth.username,
- password: auth.password,
- });
-
- if (response.status == 403) {
- return auth.reject(["password", "publickey"]);
- }
-
- token = response.data.token;
-
- username = auth.username;
- password = auth.password;
-
- auth.accept();
- } else if (auth.method == "publickey") {
- const userData = {
- username: "",
- password: "",
- };
-
- for (const rawKey of clientKeys) {
- const key = ssh2.utils.parseKey(rawKey.publicKey);
-
- if (key instanceof Error) {
- console.log(key);
- continue;
- }
-
- if (
- (rawKey.username == auth.username &&
- auth.key.algo == key.type &&
- checkValue(auth.key.data, key.getPublicSSH())) ||
- (auth.signature &&
- key.verify(auth.blob as Buffer, auth.signature, auth.key.algo))
- ) {
- userData.username = rawKey.username;
- userData.password = rawKey.password;
- }
- }
-
- if (!userData.username || !userData.password)
- return auth.reject(["password", "publickey"]);
-
- const response = await axios.post("/api/v1/users/login", userData);
-
- if (response.status == 403) {
- return auth.reject(["password", "publickey"]);
- }
-
- token = response.data.token;
-
- username = userData.username;
- password = userData.password;
-
- auth.accept();
- } else {
- return auth.reject(["password", "publickey"]);
- }
- });
-
- client.on("ready", () => {
- client.on("session", accept => {
- const conn = accept();
-
- conn.on("exec", async (accept, reject, info) => {
- const stream = accept();
-
- if (
- info.command.includes(".ssh/authorized_keys") &&
- info.command.startsWith("exec sh -c")
- ) {
- return await runCopyID(username, password, clientKeys, stream);
- }
-
- // Matches on ; and &&
- const commandsRecv = info.command.split(/;|&&/).map(i => i.trim());
-
- function println(...data: unknown[]) {
- stream.write(format(...data).replaceAll("\n", "\r\n"));
- }
-
- for (const command of commandsRecv) {
- const argv = parseArgsStringToArgv(command);
-
- if (argv[0] == "exit") {
- stream.close();
- } else {
- const command = commands.find(i => i.name == argv[0]);
-
- if (!command) {
- stream.write(`Unknown command ${argv[0]}.\r\n`);
-
- continue;
- }
-
- await command.run(argv, println, axios, token, disableEcho =>
- readFromKeyboard(stream, disableEcho),
- );
- }
- }
-
- return stream.close();
- });
-
- // We're dumb. We don't really care.
- conn.on("pty", accept => accept());
- conn.on("window-change", accept => {
- if (typeof accept != "function") return;
- accept();
- });
-
- conn.on("shell", async accept => {
- const stream = accept();
- stream.write(
- "Welcome to NextNet LOM. Run 'help' to see commands.\r\n\r\n~$ ",
- );
-
- function println(...data: unknown[]) {
- stream.write(format(...data).replaceAll("\n", "\r\n"));
- }
-
- // FIXME (greysoh): wtf? this isn't setting correctly.
- // @eslint-disable-next-line
- while (true) {
- const line = await readFromKeyboard(stream);
- stream.write("\r\n");
-
- if (line == "") {
- stream.write(`~$ `);
- continue;
- }
-
- const argv = parseArgsStringToArgv(line);
-
- if (argv[0] == "exit") {
- stream.close();
- } else {
- const command = commands.find(i => i.name == argv[0]);
-
- if (!command) {
- stream.write(
- `Unknown command ${argv[0]}. Run 'help' to see commands.\r\n~$ `,
- );
-
- continue;
- }
-
- await command.run(argv, println, axios, token, disableEcho =>
- readFromKeyboard(stream, disableEcho),
- );
- stream.write("~$ ");
- }
- }
- });
- });
- });
-});
-
-server.listen(
- 2222,
- process.env.NODE_ENV == "production" ? "0.0.0.0" : "127.0.0.1",
-);
-
-console.log("Started server at ::2222");
diff --git a/lom/src/libs/patchCommander.ts b/lom/src/libs/patchCommander.ts
deleted file mode 100644
index f30e492..0000000
--- a/lom/src/libs/patchCommander.ts
+++ /dev/null
@@ -1,108 +0,0 @@
-import { Command, type ParseOptions } from "commander";
-import { PrintLine } from "../commands";
-
-export class SSHCommand extends Command {
- hasRecievedExitSignal: boolean;
- println: PrintLine;
-
- exitEventHandlers: ((...any: unknown[]) => void)[];
- parent: SSHCommand | null;
-
- /**
- * Modified version of the Commander command with slight automated patches, to work with our SSH environment.
- * @param println PrintLine function to use
- * @param name Optional field for the name of the command
- */
- constructor(
- println: PrintLine,
- name?: string,
- disableSSHHelpPatching: boolean = false,
- ) {
- super(name);
-
- this.exitEventHandlers = [];
-
- this.configureOutput({
- writeOut: str => println(str),
- writeErr: str => {
- if (this.hasRecievedExitSignal) return;
- println(str);
- },
- });
-
- if (!disableSSHHelpPatching) {
- const sshCommand = new SSHCommand(println, "help", true);
-
- sshCommand.description("display help for command");
- sshCommand.argument("[command]", "command to show help for");
- sshCommand.action(() => {
- this.hasRecievedExitSignal = true;
-
- if (process.env.NODE_ENV != "production") {
- println(
- "Caught irrecoverable crash (command help call) in patchCommander\n",
- );
- } else {
- println("Aborted\n");
- }
- });
-
- this.addCommand(sshCommand);
- }
- }
-
- recvExitDispatch() {
- this.hasRecievedExitSignal = true;
- this.exitEventHandlers.forEach(eventHandler => eventHandler());
-
- let parentElement = this.parent;
-
- while (parentElement instanceof SSHCommand) {
- parentElement.hasRecievedExitSignal = true;
- parentElement.exitEventHandlers.forEach(eventHandler => eventHandler());
-
- parentElement = parentElement.parent;
- }
- }
-
- onExit(callback: (...any: any[]) => void) {
- this.exitEventHandlers.push(callback);
- if (this.hasRecievedExitSignal) callback();
- }
-
- _exit() {
- this.recvExitDispatch();
- }
-
- _exitCallback() {
- this.recvExitDispatch();
- }
-
- action(fn: (...args: any[]) => void | Promise): this {
- super.action(fn);
-
- // @ts-expect-error: This parameter is private, but we need control over it.
- // prettier-ignore
- const oldActionHandler: (...args: any[]) => void | Promise = this._actionHandler;
-
- // @ts-expect-error: Overriding private parameters (but this works)
- this._actionHandler = async (...args: any[]): Promise => {
- if (this.hasRecievedExitSignal) return;
- await oldActionHandler(...args);
-
- this.recvExitDispatch();
- };
-
- return this;
- }
-
- parse(argv?: readonly string[], options?: ParseOptions): this {
- super.parse(["nextruntime", ...(argv ?? [])], options);
- return this;
- }
-
- createCommand(name: string) {
- const command = new SSHCommand(this.println, name);
- return command;
- }
-}
diff --git a/lom/src/libs/readFromKeyboard.ts b/lom/src/libs/readFromKeyboard.ts
deleted file mode 100644
index 9ac19ee..0000000
--- a/lom/src/libs/readFromKeyboard.ts
+++ /dev/null
@@ -1,109 +0,0 @@
-import type { ServerChannel } from "ssh2";
-
-const pullRate = process.env.KEYBOARD_PULLING_RATE
- ? parseInt(process.env.KEYBOARD_PULLING_RATE)
- : 5;
-
-const leftEscape = "\x1B[D";
-const rightEscape = "\x1B[C";
-
-const ourBackspace = "\u0008";
-const clientBackspace = "\x7F";
-
-export async function readFromKeyboard(
- stream: ServerChannel,
- disableEcho: boolean = false,
-): Promise {
- let promise: (value: string | PromiseLike) => void;
-
- let line = "";
- let lineIndex = 0;
-
- async function eventLoop(): Promise {
- const readStreamDataBuf = stream.read();
- if (readStreamDataBuf == null) return setTimeout(eventLoop, pullRate);
-
- const readStreamData = readStreamDataBuf.toString();
-
- // Fixes several bugs (incl. potential social eng. exploits, ssh-copy-id being broken, etc)
- for (const character of readStreamData.split("")) {
- if (character == "\x03") {
- stream.write("^C");
- return promise("");
- } else if (character == "\r" || character == "\n") {
- return promise(line.replace("\r", ""));
- } else if (character == clientBackspace) {
- if (line.length == 0) return setTimeout(eventLoop, pullRate); // Here because if we do it in the parent if statement, shit breaks
- line = line.substring(0, lineIndex - 1) + line.substring(lineIndex);
-
- if (!disableEcho) {
- const deltaCursor = line.length - lineIndex;
-
- if (deltaCursor == line.length)
- return setTimeout(eventLoop, pullRate);
-
- if (deltaCursor < 0) {
- // Use old technique if the delta is < 0, as the new one is tailored to the start + 1 to end - 1
- stream.write(ourBackspace + " " + ourBackspace);
- } else {
- // Jump forward to the front, and remove the last character
- stream.write(rightEscape.repeat(deltaCursor) + " " + ourBackspace);
-
- // Go backwards & rerender text & go backwards again (wtf?)
- stream.write(
- leftEscape.repeat(deltaCursor + 1) +
- line.substring(lineIndex - 1) +
- leftEscape.repeat(deltaCursor + 1),
- );
- }
-
- lineIndex -= 1;
- }
- } else if (character == "\x1B") {
- if (character == rightEscape) {
- if (lineIndex + 1 > line.length)
- return setTimeout(eventLoop, pullRate);
- lineIndex += 1;
- } else if (character == leftEscape) {
- if (lineIndex - 1 < 0) return setTimeout(eventLoop, pullRate);
- lineIndex -= 1;
- } else {
- return setTimeout(eventLoop, pullRate);
- }
-
- if (!disableEcho) stream.write(character);
- } else {
- lineIndex += 1;
-
- // There isn't a splice method for String prototypes. So, ugh:
- line =
- line.substring(0, lineIndex - 1) +
- character +
- line.substring(lineIndex - 1);
-
- if (!disableEcho) {
- let deltaCursor = line.length - lineIndex;
-
- // wtf?
- if (deltaCursor < 0) {
- console.log(
- "FIXME: somehow, our deltaCursor value is negative! please investigate me",
- );
- deltaCursor = 0;
- }
-
- stream.write(
- line.substring(lineIndex - 1) + leftEscape.repeat(deltaCursor),
- );
- }
- }
- }
-
- setTimeout(eventLoop, pullRate);
- }
-
- return new Promise(resolve => {
- setTimeout(eventLoop, pullRate);
- promise = resolve;
- });
-}
diff --git a/lom/tsconfig.json b/lom/tsconfig.json
deleted file mode 100644
index 99ab26e..0000000
--- a/lom/tsconfig.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
- "compilerOptions": {
- "target": "es2021",
- "module": "es2022",
- "moduleResolution": "node",
-
- "lib": [
- "es2021"
- ],
-
- "outDir": "./out",
- "rootDir": "./src",
-
- "strict": true,
- "esModuleInterop": true,
- "sourceMap": true,
-
- "declaration": true,
- "declarationMap": true,
-
- "strictPropertyInitialization": false,
- },
-
- "include": ["src/**/*.ts"],
- "exclude": ["node_modules"]
-}
\ No newline at end of file
diff --git a/prod-docker.env b/prod-docker.env
index dcc049b..954c20f 100644
--- a/prod-docker.env
+++ b/prod-docker.env
@@ -1,5 +1,4 @@
-# These are default values, please change these!
-
-POSTGRES_USERNAME=nextnet
-POSTGRES_PASSWORD=nextnet
-POSTGRES_DB=nextnet
\ No newline at end of file
+POSTGRES_USERNAME=hermes
+POSTGRES_PASSWORD=hermes
+POSTGRES_DB=hermes
+JWT_SECRET=hermes
diff --git a/routes/Hermes API/Backend/Create.bru b/routes/Hermes API/Backend/Create.bru
new file mode 100644
index 0000000..777809f
--- /dev/null
+++ b/routes/Hermes API/Backend/Create.bru
@@ -0,0 +1,25 @@
+meta {
+ name: Create
+ type: http
+ seq: 1
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/backends/create
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNjY1NjksIm5iZiI6MTczNDk4MDE2OSwiaWF0IjoxNzM0OTgwMTY5fQ.-9IiM8N-azqBLwrAJkKqIzZ6yuumEzErKzSefXWpzaQ",
+ "name": "SSH",
+ "backend": "ssh",
+ "connectionDetails": {
+ "ip": "127.0.0.1",
+ "port": 22,
+ "username": "test",
+ "privateKey": ""
+ }
+ }
+}
diff --git a/routes/Hermes API/Backend/Lookup.bru b/routes/Hermes API/Backend/Lookup.bru
new file mode 100644
index 0000000..8c50b52
--- /dev/null
+++ b/routes/Hermes API/Backend/Lookup.bru
@@ -0,0 +1,19 @@
+meta {
+ name: Lookup
+ type: http
+ seq: 3
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/backends/lookup
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzYyMzI2NjEsIm5iZiI6MTczNjE0NjI2MSwiaWF0IjoxNzM2MTQ2MjYxfQ.juoZ74xs-FBnbbT9Zlei1LmcNx7kTEfzymHlVbeMmtQ",
+ "name": "SSH",
+ "id": 1
+ }
+}
diff --git a/routes/Hermes API/Backend/Remove.bru b/routes/Hermes API/Backend/Remove.bru
new file mode 100644
index 0000000..b941754
--- /dev/null
+++ b/routes/Hermes API/Backend/Remove.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Remove
+ type: http
+ seq: 2
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/backends/remove
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNjY1NjksIm5iZiI6MTczNDk4MDE2OSwiaWF0IjoxNzM0OTgwMTY5fQ.-9IiM8N-azqBLwrAJkKqIzZ6yuumEzErKzSefXWpzaQ",
+ "id": 69
+ }
+}
diff --git a/routes/Hermes API/Forward/Create.bru b/routes/Hermes API/Forward/Create.bru
new file mode 100644
index 0000000..d26c8ed
--- /dev/null
+++ b/routes/Hermes API/Forward/Create.bru
@@ -0,0 +1,29 @@
+meta {
+ name: Create
+ type: http
+ seq: 1
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/create
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "name": "Test Route",
+ "description": "This is a test route for SSH",
+
+ "protocol": "tcp",
+
+ "sourceIP": "127.0.0.1",
+ "sourcePort": 8000,
+
+ "destinationPort": 9000,
+
+ "providerID": 2,
+ "autoStart": true
+ }
+}
diff --git a/routes/Hermes API/Forward/Get Inbound Connections.bru b/routes/Hermes API/Forward/Get Inbound Connections.bru
new file mode 100644
index 0000000..7a750cb
--- /dev/null
+++ b/routes/Hermes API/Forward/Get Inbound Connections.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Get Inbound Connections
+ type: http
+ seq: 6
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/connections
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "id": 2
+ }
+}
diff --git a/routes/Hermes API/Forward/Lookup.bru b/routes/Hermes API/Forward/Lookup.bru
new file mode 100644
index 0000000..54e1b5c
--- /dev/null
+++ b/routes/Hermes API/Forward/Lookup.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Lookup
+ type: http
+ seq: 3
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/lookup
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "protocol": "tcp"
+ }
+}
diff --git a/routes/Hermes API/Forward/Remove.bru b/routes/Hermes API/Forward/Remove.bru
new file mode 100644
index 0000000..a0519ff
--- /dev/null
+++ b/routes/Hermes API/Forward/Remove.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Remove
+ type: http
+ seq: 2
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/remove
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "id": 2
+ }
+}
diff --git a/routes/Hermes API/Forward/Start.bru b/routes/Hermes API/Forward/Start.bru
new file mode 100644
index 0000000..acc56f8
--- /dev/null
+++ b/routes/Hermes API/Forward/Start.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Start
+ type: http
+ seq: 4
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/start
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "id": 2
+ }
+}
diff --git a/routes/Hermes API/Forward/Stop.bru b/routes/Hermes API/Forward/Stop.bru
new file mode 100644
index 0000000..c2fd445
--- /dev/null
+++ b/routes/Hermes API/Forward/Stop.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Stop
+ type: http
+ seq: 5
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/forward/stop
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzUwNzY0MTEsIm5iZiI6MTczNDk5MDAxMSwiaWF0IjoxNzM0OTkwMDExfQ.N9TLraX4peHt7FKv8tPcHuEzL0K7T2IBEw3piQS_4OY",
+ "id": 1
+ }
+}
diff --git a/api/routes/NextNet API/Get Permissions.bru b/routes/Hermes API/Get Permissions.bru
similarity index 100%
rename from api/routes/NextNet API/Get Permissions.bru
rename to routes/Hermes API/Get Permissions.bru
diff --git a/api/routes/NextNet API/Users/Create.bru b/routes/Hermes API/Users/Create.bru
similarity index 50%
rename from api/routes/NextNet API/Users/Create.bru
rename to routes/Hermes API/Users/Create.bru
index 31a8f46..bd63237 100644
--- a/api/routes/NextNet API/Users/Create.bru
+++ b/routes/Hermes API/Users/Create.bru
@@ -5,15 +5,16 @@ meta {
}
post {
- url: http://127.0.0.1:3000/api/v1/users/create
+ url: http://127.0.0.1:8000/api/v1/users/create
body: json
auth: inherit
}
body:json {
{
- "name": "Greysoh Hofuh",
- "email": "greyson@hofers.cloud",
+ "name": "Test User",
+ "email": "test@example.com",
+ "username": "testuser",
"password": "hunter123"
}
}
diff --git a/routes/Hermes API/Users/Log In.bru b/routes/Hermes API/Users/Log In.bru
new file mode 100644
index 0000000..6081bf4
--- /dev/null
+++ b/routes/Hermes API/Users/Log In.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Log In
+ type: http
+ seq: 2
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/users/login
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "email": "test@example.com",
+ "password": "hunter123"
+ }
+}
diff --git a/routes/Hermes API/Users/Lookup.bru b/routes/Hermes API/Users/Lookup.bru
new file mode 100644
index 0000000..088f40c
--- /dev/null
+++ b/routes/Hermes API/Users/Lookup.bru
@@ -0,0 +1,18 @@
+meta {
+ name: Lookup
+ type: http
+ seq: 4
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/users/lookup
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzQ4OTQwNTEsIm5iZiI6MTczNDg5Mzg3MSwiaWF0IjoxNzM0ODkzODcxfQ.l4GbSKejqeRxSze9Kjj3A-8mxKqUuOz58iHzPOraNmo",
+ "name": "Test User"
+ }
+}
diff --git a/routes/Hermes API/Users/Refresh JWT Token.bru b/routes/Hermes API/Users/Refresh JWT Token.bru
new file mode 100644
index 0000000..ef15e23
--- /dev/null
+++ b/routes/Hermes API/Users/Refresh JWT Token.bru
@@ -0,0 +1,17 @@
+meta {
+ name: Refresh JWT Token
+ type: http
+ seq: 5
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/users/refresh
+ body: json
+ auth: none
+}
+
+body:json {
+ {
+ "token": "0AqAPZ6P7RTM1DsV18R3MzFmhyoHgn8xMCO35K1zLv6U1J3mUwOUADbT9ZaRsUZRtNLn87YRqyexZzHDgbaQAg9GGLV46g9+UWJKOUgCCSA="
+ }
+}
diff --git a/routes/Hermes API/Users/Remove.bru b/routes/Hermes API/Users/Remove.bru
new file mode 100644
index 0000000..55e4ec5
--- /dev/null
+++ b/routes/Hermes API/Users/Remove.bru
@@ -0,0 +1,17 @@
+meta {
+ name: Remove
+ type: http
+ seq: 3
+}
+
+post {
+ url: http://127.0.0.1:8000/api/v1/users/remove
+ body: json
+ auth: inherit
+}
+
+body:json {
+ {
+ "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhdWQiOlsiMSJdLCJleHAiOjE3MzQ4ODg4NDMsIm5iZiI6MTczNDg4ODY2MywiaWF0IjoxNzM0ODg4NjYzfQ.diHYYJaQrVAPvWpeqsriH2sbJnuCaBCAzbAL4JoYzqc"
+ }
+}
diff --git a/api/routes/NextNet API/bruno.json b/routes/Hermes API/bruno.json
similarity index 62%
rename from api/routes/NextNet API/bruno.json
rename to routes/Hermes API/bruno.json
index af51919..4da5f41 100644
--- a/api/routes/NextNet API/bruno.json
+++ b/routes/Hermes API/bruno.json
@@ -1,5 +1,5 @@
{
"version": "1",
- "name": "NextNet API",
+ "name": "Hermes",
"type": "collection"
-}
\ No newline at end of file
+}
diff --git a/api/routes/NextNet API/runroute.sh b/routes/Hermes API/runroute.sh
similarity index 100%
rename from api/routes/NextNet API/runroute.sh
rename to routes/Hermes API/runroute.sh
diff --git a/shell.nix b/shell.nix
index aa8bd62..ea1aa8e 100644
--- a/shell.nix
+++ b/shell.nix
@@ -1,24 +1,15 @@
{
pkgs ? import { },
}: pkgs.mkShell {
- buildInputs = with pkgs; [
+ buildInputs = with pkgs; [
# api/
- nodejs
- openssl
- postgresql
- lsof
- ];
+ go
+ gopls
+ ];
shellHook = ''
- export PRISMA_QUERY_ENGINE_BINARY=${pkgs.prisma-engines}/bin/query-engine
- export PRISMA_QUERY_ENGINE_LIBRARY=${pkgs.prisma-engines}/lib/libquery_engine.node
- export PRISMA_SCHEMA_ENGINE_BINARY=${pkgs.prisma-engines}/bin/schema-engine
-
- if [ ! -d ".tmp" ]; then
- echo "Hello and welcome to the NextNet project!"
- mkdir .tmp
+ if [ -f init.sh ]; then
+ source init.sh
fi
-
- source init.sh
'';
}