commit
562201a342
|
|
@ -0,0 +1,5 @@
|
|||
.git
|
||||
.github
|
||||
*.md
|
||||
tests/
|
||||
docs/
|
||||
|
|
@ -49,5 +49,4 @@ coverage
|
|||
*.sw?
|
||||
|
||||
*.tsbuildinfo
|
||||
.vite/*
|
||||
|
||||
.vite/*
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
> 🎉 I'm excited to announce that WGDashboard is officially listed on DigitalOcean's Marketplace! For more information, please visit [Host WGDashboard & WireGuard with DigitalOcean](https://docs.wgdashboard.dev/host-wgdashboard-wireguard-with-digitalocean.html) for more information!
|
||||
|
||||
> [!NOTE]
|
||||
> **Help Wanted 🎉**: Localizing WGDashboard to other languages! If you're willing to help, please visit https://github.com/donaldzou/WGDashboard/issues/397. Many thanks!
|
||||
> **Help Wanted 🎉**: Localizing WGDashboard to other languages! If you're willing to help, please visit https://github.com/WGDashboard/WGDashboard/issues/397. Many thanks!
|
||||
|
||||
|
||||
|
||||
|
|
@ -22,7 +22,7 @@
|
|||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/donaldzou/wireguard-dashboard/releases/latest"><img src="https://img.shields.io/github/v/release/donaldzou/wireguard-dashboard?style=for-the-badge"></a>
|
||||
<a href="https://github.com/WGDashboard/WGDashboard/releases/latest"><img src="https://img.shields.io/github/v/release/donaldzou/wireguard-dashboard?style=for-the-badge"></a>
|
||||
<a href="https://wakatime.com/badge/github/donaldzou/WGDashboard"><img src="https://wakatime.com/badge/github/donaldzou/WGDashboard.svg?style=for-the-badge" alt="wakatime"></a>
|
||||
<a href="https://hitscounter.dev"><img src="https://hitscounter.dev/api/hit?url=https%3A%2F%2Fgithub.com%2Fdonaldzou%2FWGDashboard&label=Visitor&icon=github&color=%230a58ca&style=for-the-badge"></a>
|
||||
<img src="https://img.shields.io/docker/pulls/donaldzou/wgdashboard?logo=docker&label=Docker%20Image%20Pulls&labelColor=ffffff&style=for-the-badge">
|
||||
|
|
|
|||
|
|
@ -0,0 +1,76 @@
|
|||
FROM golang:1.24 AS awg-go
|
||||
|
||||
RUN git clone https://github.com/WGDashboard/amneziawg-go /awg
|
||||
WORKDIR /awg
|
||||
RUN go mod download && \
|
||||
go mod verify && \
|
||||
go build -ldflags '-linkmode external -extldflags "-fno-PIC -static"' -v -o /usr/bin
|
||||
|
||||
FROM alpine:latest AS awg-tools
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
make git build-base linux-headers \
|
||||
&& git clone https://github.com/WGDashboard/amneziawg-tools \
|
||||
&& cd amneziawg-tools/src \
|
||||
&& make \
|
||||
&& chmod +x wg*
|
||||
|
||||
FROM alpine:latest
|
||||
LABEL maintainer="dselen@nerthus.nl"
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
iproute2 iptables bash curl wget unzip procps sudo \
|
||||
tzdata wireguard-tools python3 py3-psutil py3-bcrypt openresolv
|
||||
|
||||
COPY --from=awg-go /usr/bin/amneziawg-go /usr/bin/amneziawg-go
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg /usr/bin/awg
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg-quick/linux.bash /usr/bin/awg-quick
|
||||
|
||||
# Declaring environment variables, change Peernet to an address you like, standard is a 24 bit subnet.
|
||||
ARG wg_net="10.0.0.1" \
|
||||
wg_port="51820"
|
||||
|
||||
# Following ENV variables are changable on container runtime because /entrypoint.sh handles that. See compose.yaml for more info.
|
||||
ENV TZ="Europe/Amsterdam" \
|
||||
global_dns="9.9.9.9" \
|
||||
wgd_port="10086" \
|
||||
public_ip=""
|
||||
|
||||
# Using WGDASH -- like wg_net functionally as a ARG command. But it is needed in entrypoint.sh so it needs to be exported as environment variable.
|
||||
ENV WGDASH=/opt/wgdashboard
|
||||
|
||||
# Doing WireGuard Dashboard installation measures. Modify the git clone command to get the preferred version, with a specific branch for example.
|
||||
RUN mkdir /data \
|
||||
&& mkdir /configs \
|
||||
&& mkdir -p ${WGDASH}/src \
|
||||
&& mkdir -p /etc/amnezia/amneziawg
|
||||
COPY ./src ${WGDASH}/src
|
||||
|
||||
# Generate basic WireGuard interface. Echoing the WireGuard interface config for readability, adjust if you want it for efficiency.
|
||||
# Also setting the pipefail option, verbose: https://github.com/hadolint/hadolint/wiki/DL4006.
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
RUN out_adapt=$(ip -o -4 route show to default | awk '{print $NF}') \
|
||||
&& echo -e "[Interface]\n\
|
||||
Address = ${wg_net}/24\n\
|
||||
PrivateKey =\n\
|
||||
PostUp = iptables -t nat -I POSTROUTING 1 -s ${wg_net}/24 -o ${out_adapt} -j MASQUERADE\n\
|
||||
PostUp = iptables -I FORWARD -i wg0 -o wg0 -j DROP\n\
|
||||
PreDown = iptables -t nat -D POSTROUTING -s ${wg_net}/24 -o ${out_adapt} -j MASQUERADE\n\
|
||||
PreDown = iptables -D FORWARD -i wg0 -o wg0 -j DROP\n\
|
||||
ListenPort = ${wg_port}\n\
|
||||
SaveConfig = true\n\
|
||||
DNS = ${global_dns}" > /configs/wg0.conf.template \
|
||||
&& chmod 600 /configs/wg0.conf.template
|
||||
|
||||
# Defining a way for Docker to check the health of the container. In this case: checking the gunicorn process.
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD sh -c 'pgrep gunicorn > /dev/null && pgrep tail > /dev/null' || exit 1
|
||||
|
||||
# Copy the basic entrypoint.sh script.
|
||||
COPY ./docker/entrypoint.sh /entrypoint.sh
|
||||
|
||||
# Exposing the default WireGuard Dashboard port for web access.
|
||||
EXPOSE 10086
|
||||
WORKDIR $WGDASH
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
||||
|
|
@ -1,53 +1,103 @@
|
|||
FROM golang:1.24 AS awg-go
|
||||
#
|
||||
# AWG GOLANG BUILDING STAGE
|
||||
# Base: Alpine
|
||||
#
|
||||
|
||||
RUN git clone https://github.com/WGDashboard/amneziawg-go /awg
|
||||
WORKDIR /awg
|
||||
FROM golang:1.25-alpine AS awg-go
|
||||
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
gcc \
|
||||
musl-dev
|
||||
|
||||
# Standard working directory for WGDashboard
|
||||
RUN mkdir -p /workspace && \
|
||||
git clone https://github.com/WGDashboard/amneziawg-go /workspace/awg
|
||||
|
||||
ENV CGO_ENABLED=1
|
||||
|
||||
WORKDIR /workspace/awg
|
||||
RUN go mod download && \
|
||||
go mod verify && \
|
||||
go build -ldflags '-linkmode external -extldflags "-fno-PIC -static"' -v -o /usr/bin
|
||||
|
||||
#
|
||||
# AWG TOOLS BUILDING STAGE
|
||||
# Base: Debian
|
||||
#
|
||||
FROM alpine:latest AS awg-tools
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
make git build-base linux-headers \
|
||||
&& git clone https://github.com/WGDashboard/amneziawg-tools \
|
||||
&& cd amneziawg-tools/src \
|
||||
&& make \
|
||||
&& chmod +x wg*
|
||||
RUN apk add --no-cache \
|
||||
make \
|
||||
git \
|
||||
build-base \
|
||||
linux-headers \
|
||||
ca-certificates
|
||||
|
||||
FROM alpine:latest
|
||||
RUN mkdir -p /workspace && \
|
||||
git clone https://github.com/WGDashboard/amneziawg-tools /workspace/awg-tools
|
||||
|
||||
WORKDIR /workspace/awg-tools/src
|
||||
RUN make && chmod +x wg*
|
||||
|
||||
#
|
||||
# PIP DEPENDENCY BUILDING
|
||||
# Base: Alpine
|
||||
#
|
||||
FROM python:3.13-alpine AS pip-builder
|
||||
|
||||
|
||||
RUN apk add --no-cache \
|
||||
build-base \
|
||||
pkgconfig \
|
||||
python3-dev \
|
||||
libffi-dev \
|
||||
linux-headers \
|
||||
&& mkdir -p /opt/wgdashboard/src \
|
||||
&& python3 -m venv /opt/wgdashboard/src/venv
|
||||
|
||||
COPY ./src/requirements.txt /opt/wgdashboard/src
|
||||
RUN . /opt/wgdashboard/src/venv/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install -r /opt/wgdashboard/src/requirements.txt
|
||||
|
||||
#
|
||||
# WGDashboard RUNNING STAGE
|
||||
# Base: Alpine
|
||||
#
|
||||
FROM python:3.13-alpine AS final
|
||||
LABEL maintainer="dselen@nerthus.nl"
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
iproute2 iptables bash curl wget unzip procps sudo \
|
||||
tzdata wireguard-tools python3 py3-psutil py3-bcrypt openresolv
|
||||
# Install only the runtime dependencies
|
||||
RUN apk add --no-cache \
|
||||
iproute2 iptables \
|
||||
bash curl \
|
||||
wget unzip \
|
||||
procps sudo \
|
||||
tzdata wireguard-tools \
|
||||
openresolv openrc
|
||||
|
||||
# Copy only the final binaries from the builders
|
||||
COPY --from=awg-go /usr/bin/amneziawg-go /usr/bin/amneziawg-go
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg /usr/bin/awg
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg-quick/linux.bash /usr/bin/awg-quick
|
||||
COPY --from=awg-tools /workspace/awg-tools/src/wg /usr/bin/awg
|
||||
COPY --from=awg-tools /workspace/awg-tools/src/wg-quick/linux.bash /usr/bin/awg-quick
|
||||
|
||||
# Declaring environment variables, change Peernet to an address you like, standard is a 24 bit subnet.
|
||||
ARG wg_net="10.0.0.1" \
|
||||
wg_port="51820"
|
||||
|
||||
# Following ENV variables are changable on container runtime because /entrypoint.sh handles that. See compose.yaml for more info.
|
||||
# Environment variables
|
||||
ARG wg_net="10.0.0.1"
|
||||
ARG wg_port="51820"
|
||||
ENV TZ="Europe/Amsterdam" \
|
||||
global_dns="9.9.9.9" \
|
||||
wgd_port="10086" \
|
||||
public_ip=""
|
||||
public_ip="" \
|
||||
WGDASH=/opt/wgdashboard
|
||||
|
||||
# Using WGDASH -- like wg_net functionally as a ARG command. But it is needed in entrypoint.sh so it needs to be exported as environment variable.
|
||||
ENV WGDASH=/opt/wgdashboard
|
||||
# Create directories
|
||||
RUN mkdir /data /configs -p ${WGDASH}/src /etc/amnezia/amneziawg
|
||||
|
||||
# Doing WireGuard Dashboard installation measures. Modify the git clone command to get the preferred version, with a specific branch for example.
|
||||
RUN mkdir /data \
|
||||
&& mkdir /configs \
|
||||
&& mkdir -p ${WGDASH}/src \
|
||||
&& mkdir -p /etc/amnezia/amneziawg
|
||||
# Copy app source and prebuilt venv only (no pip cache)
|
||||
COPY ./src ${WGDASH}/src
|
||||
COPY --from=pip-builder /opt/wgdashboard/src/venv /opt/wgdashboard/src/venv
|
||||
|
||||
# Generate basic WireGuard interface. Echoing the WireGuard interface config for readability, adjust if you want it for efficiency.
|
||||
# Also setting the pipefail option, verbose: https://github.com/hadolint/hadolint/wiki/DL4006.
|
||||
# WireGuard interface template
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
RUN out_adapt=$(ip -o -4 route show to default | awk '{print $NF}') \
|
||||
&& echo -e "[Interface]\n\
|
||||
|
|
@ -62,15 +112,12 @@ SaveConfig = true\n\
|
|||
DNS = ${global_dns}" > /configs/wg0.conf.template \
|
||||
&& chmod 600 /configs/wg0.conf.template
|
||||
|
||||
# Defining a way for Docker to check the health of the container. In this case: checking the gunicorn process.
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD sh -c 'pgrep gunicorn > /dev/null && pgrep tail > /dev/null' || exit 1
|
||||
|
||||
# Copy the basic entrypoint.sh script.
|
||||
COPY ./docker/entrypoint.sh /entrypoint.sh
|
||||
|
||||
# Exposing the default WireGuard Dashboard port for web access.
|
||||
EXPOSE 10086
|
||||
WORKDIR $WGDASH
|
||||
WORKDIR $WGDASH/src
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
||||
|
|
|
|||
|
|
@ -91,12 +91,23 @@ Updating WGDashboard is currently in **alpha** stage. While the update process m
|
|||
|
||||
## ⚙️ Environment Variables
|
||||
|
||||
| Variable | Accepted Values | Default | Example | Description |
|
||||
|---------------|------------------------------------------|-------------------------|------------------------|-----------------------------------------------------------------------------|
|
||||
| `tz` | Timezone | `Europe/Amsterdam` | `America/New_York` | Sets the container's timezone. Useful for accurate logs and scheduling. |
|
||||
| `global_dns` | IPv4 and IPv6 addresses | `9.9.9.9` | `8.8.8.8`, `1.1.1.1` | Default DNS for WireGuard clients. |
|
||||
| `public_ip` | Public IP address | Retrieved automatically | `253.162.134.73` | Used to generate accurate client configs. Needed if container is NAT’d. |
|
||||
| `wgd_port` | Any port that is allowed for the process | `10086` | `443` | This port is used to set the WGDashboard web port. |
|
||||
| Variable | Accepted Values | Default | Example | Description |
|
||||
| ------------------ | ---------------------------------------- | ----------------------- | --------------------- | ----------------------------------------------------------------------- |
|
||||
| `tz` | Timezone | `Europe/Amsterdam` | `America/New_York` | Sets the container's timezone. Useful for accurate logs and scheduling. |
|
||||
| `global_dns` | IPv4 and IPv6 addresses | `9.9.9.9` | `8.8.8.8`, `1.1.1.1` | Default DNS for WireGuard clients. |
|
||||
| `public_ip` | Public IP address | Retrieved automatically | `253.162.134.73` | Used to generate accurate client configs. Needed if container is NAT’d. |
|
||||
| `wgd_port` | Any port that is allowed for the process | `10086` | `443` | This port is used to set the WGDashboard web port. |
|
||||
| `username` | Any non‐empty string | `-` | `admin` | Username for the WGDashboard web interface account. |
|
||||
| `password` | Any non‐empty string | `-` | `s3cr3tP@ss` | Password for the WGDashboard web interface account (stored hashed). |
|
||||
| `enable_totp` | `true`, `false` | `true` | `false` | Enable TOTP‐based two‐factor authentication for the account. |
|
||||
| `wg_autostart` | Wireguard interface name | `false` | `true` | Auto‐start the WireGuard client when the container launches. |
|
||||
| `email_server` | SMTP server address | `-` | `smtp.gmail.com` | SMTP server for sending email notifications. |
|
||||
| `email_port` | SMTP port number | `-` | `587` | Port for connecting to the SMTP server. |
|
||||
| `email_encryption` | `TLS`, `SSL`, etc. | `-` | `TLS` | Encryption method for email communication. |
|
||||
| `email_username` | Any non-empty string | `-` | `user@example.com` | Username for SMTP authentication. |
|
||||
| `email_password` | Any non-empty string | `-` | `app_password` | Password for SMTP authentication. |
|
||||
| `email_from` | Valid email address | `-` | `noreply@example.com` | Email address used as the sender for notifications. |
|
||||
| `email_template` | Path to template file | `-` | `your-template` | Custom template for email notifications. |
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -122,7 +133,7 @@ Examples:
|
|||
To build from source:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/donaldzou/WGDashboard.git
|
||||
git clone https://github.com/WGDashboard/WGDashboard.git
|
||||
cd WGDashboard
|
||||
docker build . -f docker/Dockerfile -t yourname/wgdashboard:latest
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,10 +1,55 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Path to the configuration file (exists because of previous function).
|
||||
config_file="/data/wg-dashboard.ini"
|
||||
|
||||
trap 'stop_service' SIGTERM
|
||||
|
||||
# Hash password with bcrypt
|
||||
hash_password() {
|
||||
python3 -c "import bcrypt; print(bcrypt.hashpw('$1'.encode(), bcrypt.gensalt(12)).decode())"
|
||||
}
|
||||
|
||||
# Function to set or update section/key/value in the INI file
|
||||
set_ini() {
|
||||
local section="$1" key="$2" value="$3"
|
||||
local current_value
|
||||
|
||||
# Add section if it doesn't exist
|
||||
grep -q "^\[${section}\]" "$config_file" \
|
||||
|| printf "\n[%s]\n" "${section}" >> "$config_file"
|
||||
|
||||
# Check current value if key exists
|
||||
if grep -q "^[[:space:]]*${key}[[:space:]]*=" "$config_file"; then
|
||||
current_value=$(grep "^[[:space:]]*${key}[[:space:]]*=" "$config_file" | cut -d= -f2- | xargs)
|
||||
|
||||
# Don't display actual value if it's a password field
|
||||
if [[ "$key" == *"password"* ]]; then
|
||||
if [ "$current_value" = "$value" ]; then
|
||||
echo "- $key is already set correctly (value hidden)"
|
||||
return 0
|
||||
fi
|
||||
sed -i "/^\[${section}\]/,/^\[/{s|^[[:space:]]*${key}[[:space:]]*=.*|${key} = ${value}|}" "$config_file"
|
||||
echo "- Updated $key (value hidden)"
|
||||
else
|
||||
if [ "$current_value" = "$value" ]; then
|
||||
echo "- $key is already set correctly ($value)"
|
||||
return 0
|
||||
fi
|
||||
sed -i "/^\[${section}\]/,/^\[/{s|^[[:space:]]*${key}[[:space:]]*=.*|${key} = ${value}|}" "$config_file"
|
||||
echo "- Updated $key to: $value"
|
||||
fi
|
||||
else
|
||||
sed -i "/^\[${section}\]/a ${key} = ${value}" "$config_file"
|
||||
|
||||
# Don't display actual value if it's a password field
|
||||
if [[ "$key" == *"password"* ]]; then
|
||||
echo "- Added new setting $key (value hidden)"
|
||||
else
|
||||
echo "- Added new setting $key: $value"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
stop_service() {
|
||||
echo "[WGDashboard] Stopping WGDashboard..."
|
||||
/bin/bash ./wgd.sh stop
|
||||
|
|
@ -12,79 +57,62 @@ stop_service() {
|
|||
}
|
||||
|
||||
echo "------------------------- START ----------------------------"
|
||||
echo "Starting the WireGuard Dashboard Docker container."
|
||||
echo "Starting the WGDashboard Docker container."
|
||||
|
||||
ensure_installation() {
|
||||
# When using a custom directory to store the files, this part moves over and makes sure the installation continues.
|
||||
echo "Quick-installing..."
|
||||
|
||||
|
||||
# Make the wgd.sh script executable.
|
||||
chmod +x "${WGDASH}"/src/wgd.sh
|
||||
cd "${WGDASH}"/src || exit
|
||||
|
||||
|
||||
# Github issue: https://github.com/donaldzou/WGDashboard/issues/723
|
||||
echo "Checking for stale pids..."
|
||||
if [[ -f ${WGDASH}/src/gunicorn.pid ]]; then
|
||||
echo "Found stale pid, removing..."
|
||||
rm ${WGDASH}/src/gunicorn.pid
|
||||
fi
|
||||
|
||||
|
||||
# Removing clear shell command from the wgd.sh script to enhance docker logging.
|
||||
echo "Removing clear command from wgd.sh for better Docker logging."
|
||||
sed -i '/clear/d' ./wgd.sh
|
||||
|
||||
# Create the databases directory if it does not exist yet.
|
||||
|
||||
# Create required directories and links
|
||||
if [ ! -d "/data/db" ]; then
|
||||
echo "Creating database dir"
|
||||
mkdir /data/db
|
||||
mkdir -p /data/db
|
||||
fi
|
||||
|
||||
# Linking the database on the persistent directory location to where WGDashboard expects.
|
||||
|
||||
if [ ! -d "${WGDASH}/src/db" ]; then
|
||||
ln -s /data/db "${WGDASH}/src/db"
|
||||
fi
|
||||
|
||||
# Create the wg-dashboard.ini file if it does not exist yet.
|
||||
|
||||
if [ ! -f "${config_file}" ]; then
|
||||
echo "Creating wg-dashboard.ini file"
|
||||
touch "${config_file}"
|
||||
fi
|
||||
|
||||
# Link the wg-dashboard.ini file from the persistent directory to where WGDashboard expects it.
|
||||
|
||||
if [ ! -f "${WGDASH}/src/wg-dashboard.ini" ]; then
|
||||
ln -s "${config_file}" "${WGDASH}/src/wg-dashboard.ini"
|
||||
fi
|
||||
|
||||
# Create the Python virtual environment.
|
||||
python3 -m venv "${WGDASH}"/src/venv
|
||||
. "${WGDASH}/src/venv/bin/activate"
|
||||
|
||||
# Due to this pip dependency being available as a system package we can just move it to the venv.
|
||||
echo "Moving PIP dependency from ephemerality to runtime environment: psutil"
|
||||
mv /usr/lib/python3.12/site-packages/psutil* "${WGDASH}"/src/venv/lib/python3.12/site-packages
|
||||
|
||||
# Due to this pip dependency being available as a system package we can just move it to the venv.
|
||||
echo "Moving PIP dependency from ephemerality to runtime environment: bcrypt"
|
||||
mv /usr/lib/python3.12/site-packages/bcrypt* "${WGDASH}"/src/venv/lib/python3.12/site-packages
|
||||
|
||||
|
||||
# Use the bash interpreter to install WGDashboard according to the wgd.sh script.
|
||||
/bin/bash ./wgd.sh install
|
||||
|
||||
|
||||
echo "Looks like the installation succeeded. Moving on."
|
||||
|
||||
# This first step is to ensure the wg0.conf file exists, and if not, then its copied over from the ephemeral container storage.
|
||||
# This is done so WGDashboard it works out of the box, it also sets a randomly generated private key.
|
||||
|
||||
|
||||
# Setup WireGuard if needed
|
||||
if [ ! -f "/etc/wireguard/wg0.conf" ]; then
|
||||
echo "Standard wg0 Configuration file not found, grabbing template."
|
||||
cp -a "/configs/wg0.conf.template" "/etc/wireguard/wg0.conf"
|
||||
|
||||
|
||||
echo "Setting a secure private key."
|
||||
|
||||
local privateKey
|
||||
privateKey=$(wg genkey)
|
||||
sed -i "s|^PrivateKey *=.*$|PrivateKey = ${privateKey}|g" /etc/wireguard/wg0.conf
|
||||
|
||||
|
||||
echo "Done setting template."
|
||||
else
|
||||
echo "Existing wg0 configuration file found, using that."
|
||||
|
|
@ -93,63 +121,75 @@ ensure_installation() {
|
|||
|
||||
set_envvars() {
|
||||
printf "\n------------- SETTING ENVIRONMENT VARIABLES ----------------\n"
|
||||
|
||||
# Check if the file is empty
|
||||
|
||||
# Check if config file is empty
|
||||
if [ ! -s "${config_file}" ]; then
|
||||
echo "Config file is empty. Creating [Peers] section."
|
||||
|
||||
# Create [Peers] section with initial values
|
||||
{
|
||||
echo "[Peers]"
|
||||
echo "peer_global_dns = ${global_dns}"
|
||||
echo "remote_endpoint = ${public_ip}"
|
||||
echo -e "\n[Server]"
|
||||
echo "app_port = ${wgd_port}"
|
||||
} > "${config_file}"
|
||||
|
||||
else
|
||||
echo "Config file is not empty, using pre-existing."
|
||||
echo "Config file is empty. Creating initial structure."
|
||||
fi
|
||||
|
||||
echo "Verifying current variables..."
|
||||
|
||||
# Check and update the DNS if it has changed
|
||||
current_dns=$(grep "peer_global_dns = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${global_dns}" == "$current_dns" ]; then
|
||||
echo "DNS is set correctly, moving on."
|
||||
|
||||
else
|
||||
echo "Changing default DNS..."
|
||||
sed -i "s/^peer_global_dns = .*/peer_global_dns = ${global_dns}/" "${config_file}"
|
||||
|
||||
echo "Checking basic configuration:"
|
||||
set_ini Peers peer_global_dns "${global_dns}"
|
||||
|
||||
if [ -z "${public_ip}" ]; then
|
||||
public_ip=$(curl -s ifconfig.me)
|
||||
echo "Automatically detected public IP: ${public_ip}"
|
||||
fi
|
||||
|
||||
# Checking the current set public IP and changing it if it has changed.
|
||||
current_public_ip=$(grep "remote_endpoint = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${public_ip}" == "" ]; then
|
||||
default_ip=$(curl -s ifconfig.me)
|
||||
|
||||
echo "Trying to fetch the Public-IP using ifconfig.me: ${default_ip}"
|
||||
sed -i "s/^remote_endpoint = .*/remote_endpoint = ${default_ip}/" "${config_file}"
|
||||
elif [ "${current_public_ip}" != "${public_ip}" ]; then
|
||||
sed -i "s/^remote_endpoint = .*/remote_endpoint = ${public_ip}/" "${config_file}"
|
||||
else
|
||||
echo "Public-IP is correct, moving on."
|
||||
|
||||
set_ini Peers remote_endpoint "${public_ip}"
|
||||
set_ini Server app_port "${wgd_port}"
|
||||
|
||||
# Account settings - process all parameters
|
||||
[[ -n "$username" ]] && echo "Configuring user account:"
|
||||
# Basic account variables
|
||||
[[ -n "$username" ]] && set_ini Account username "${username}"
|
||||
|
||||
if [[ -n "$password" ]]; then
|
||||
echo "- Setting password"
|
||||
set_ini Account password "$(hash_password "${password}")"
|
||||
fi
|
||||
|
||||
# Checking the current WGDashboard web port and changing if needed.
|
||||
current_wgd_port=$(grep "app_port = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${current_wgd_port}" == "${wgd_port}" ]; then
|
||||
echo "Current WGD port is set correctly, moving on."
|
||||
else
|
||||
echo "Changing default WGD port..."
|
||||
sed -i "s/^app_port = .*/app_port = ${wgd_port}/" "${config_file}"
|
||||
|
||||
# Additional account variables
|
||||
[[ -n "$enable_totp" ]] && set_ini Account enable_totp "${enable_totp}"
|
||||
[[ -n "$totp_verified" ]] && set_ini Account totp_verified "${totp_verified}"
|
||||
[[ -n "$totp_key" ]] && set_ini Account totp_key "${totp_key}"
|
||||
|
||||
# Welcome session
|
||||
[[ -n "$welcome_session" ]] && set_ini Other welcome_session "${welcome_session}"
|
||||
# If username and password are set but welcome_session isn't, disable it
|
||||
if [[ -n "$username" && -n "$password" && -z "$welcome_session" ]]; then
|
||||
set_ini Other welcome_session "false"
|
||||
fi
|
||||
|
||||
# Autostart WireGuard
|
||||
if [[ -n "$wg_autostart" ]]; then
|
||||
echo "Configuring WireGuard autostart:"
|
||||
set_ini WireGuardConfiguration autostart "${wg_autostart}"
|
||||
fi
|
||||
|
||||
# Email (check if any settings need to be configured)
|
||||
email_vars=("email_server" "email_port" "email_encryption" "email_username" "email_password" "email_from" "email_template")
|
||||
for var in "${email_vars[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
echo "Configuring email settings:"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Email (iterate through all possible fields)
|
||||
email_fields=("server:email_server" "port:email_port" "encryption:email_encryption"
|
||||
"username:email_username" "email_password:email_password"
|
||||
"send_from:email_from" "email_template:email_template")
|
||||
|
||||
for field_pair in "${email_fields[@]}"; do
|
||||
IFS=: read -r field var <<< "$field_pair"
|
||||
[[ -n "${!var}" ]] && set_ini Email "$field" "${!var}"
|
||||
done
|
||||
}
|
||||
|
||||
# === CORE SERVICES ===
|
||||
start_core() {
|
||||
# Start service and monitor logs
|
||||
start_and_monitor() {
|
||||
printf "\n---------------------- STARTING CORE -----------------------\n"
|
||||
|
||||
|
||||
# Due to some instances complaining about this, making sure its there every time.
|
||||
mkdir -p /dev/net
|
||||
mknod /dev/net/tun c 10 200
|
||||
|
|
@ -157,23 +197,19 @@ start_core() {
|
|||
|
||||
# Actually starting WGDashboard
|
||||
echo "Activating Python venv and executing the WireGuard Dashboard service."
|
||||
/bin/bash ./wgd.sh start
|
||||
}
|
||||
|
||||
ensure_blocking() {
|
||||
bash ./wgd.sh start
|
||||
|
||||
# Wait a second before continuing, to give the python program some time to get ready.
|
||||
sleep 1s
|
||||
sleep 1
|
||||
echo -e "\nEnsuring container continuation."
|
||||
|
||||
# Find and tail the latest error and access logs if they exist
|
||||
|
||||
# Find and monitor log file
|
||||
local logdir="${WGDASH}/src/log"
|
||||
|
||||
latestErrLog=$(find "$logdir" -name "error_*.log" -type f -print | sort -r | head -n 1)
|
||||
|
||||
|
||||
# Only tail the logs if they are found
|
||||
if [ -n "$latestErrLog" ]; then
|
||||
tail -f "$latestErrLog" &
|
||||
|
||||
# Wait for the tail process to end.
|
||||
wait $!
|
||||
else
|
||||
|
|
@ -182,8 +218,7 @@ ensure_blocking() {
|
|||
fi
|
||||
}
|
||||
|
||||
# Execute functions for the WireGuard Dashboard services, then set the environment variables
|
||||
# Main execution flow
|
||||
ensure_installation
|
||||
set_envvars
|
||||
start_core
|
||||
ensure_blocking
|
||||
start_and_monitor
|
||||
|
|
@ -5,6 +5,7 @@
|
|||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"axios": "^1.9.0",
|
||||
"marked": "^15.0.7",
|
||||
"openai": "^4.89.0",
|
||||
"pinia-plugin-persistedstate": "^4.2.0"
|
||||
|
|
@ -163,6 +164,17 @@
|
|||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmmirror.com/axios/-/axios-1.9.0.tgz",
|
||||
"integrity": "sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/braces/-/braces-3.0.3.tgz",
|
||||
|
|
@ -444,6 +456,26 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.9",
|
||||
"resolved": "https://registry.npmmirror.com/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"debug": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.2.tgz",
|
||||
|
|
@ -998,6 +1030,12 @@
|
|||
"integrity": "sha512-hkT3yDPFbs95mNCy1+7qNKC6Pro+/ibzYxtM2iqEigpf0sVw+bg4Zh9/snjsBcf990vfIsg5+1U7VyiyBb3etg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/quansync": {
|
||||
"version": "0.2.10",
|
||||
"resolved": "https://registry.npmmirror.com/quansync/-/quansync-0.2.10.tgz",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"dependencies": {
|
||||
"axios": "^1.9.0",
|
||||
"marked": "^15.0.7",
|
||||
"openai": "^4.89.0",
|
||||
"pinia-plugin-persistedstate": "^4.2.0"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,232 @@
|
|||
import datetime
|
||||
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint, render_template, abort, request, Flask, current_app, session, redirect, url_for
|
||||
import os
|
||||
|
||||
from modules.WireguardConfiguration import WireguardConfiguration
|
||||
from modules.DashboardConfig import DashboardConfig
|
||||
from modules.Email import EmailSender
|
||||
|
||||
|
||||
def ResponseObject(status=True, message=None, data=None, status_code = 200) -> Flask.response_class:
|
||||
response = Flask.make_response(current_app, {
|
||||
"status": status,
|
||||
"message": message,
|
||||
"data": data
|
||||
})
|
||||
response.status_code = status_code
|
||||
response.content_type = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
|
||||
from modules.DashboardClients import DashboardClients
|
||||
def createClientBlueprint(wireguardConfigurations: dict[WireguardConfiguration], dashboardConfig: DashboardConfig, dashboardClients: DashboardClients):
|
||||
|
||||
client = Blueprint('client', __name__, template_folder=os.path.abspath("./static/dist/WGDashboardClient"))
|
||||
prefix = f'{dashboardConfig.GetConfig("Server", "app_prefix")[1]}/client'
|
||||
|
||||
def login_required(f):
|
||||
@wraps(f)
|
||||
def func(*args, **kwargs):
|
||||
if session.get("Email") is None or session.get("TotpVerified") is None or not session.get("TotpVerified") or session.get("Role") != "client":
|
||||
return ResponseObject(False, "Unauthorized access.", data=None, status_code=401)
|
||||
|
||||
if not dashboardClients.GetClient(session.get("ClientID")):
|
||||
session.clear()
|
||||
return ResponseObject(False, "Unauthorized access.", data=None, status_code=401)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return func
|
||||
|
||||
@client.before_request
|
||||
def clientBeforeRequest():
|
||||
if not dashboardConfig.GetConfig("Clients", "enable")[1]:
|
||||
abort(404)
|
||||
|
||||
if request.method.lower() == 'options':
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.post(f'{prefix}/api/signup')
|
||||
def ClientAPI_SignUp():
|
||||
data = request.get_json()
|
||||
status, msg = dashboardClients.SignUp(**data)
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.get(f'{prefix}/api/signin/oidc/providers')
|
||||
def ClientAPI_SignIn_OIDC_GetProviders():
|
||||
_, oidc = dashboardConfig.GetConfig("OIDC", "client_enable")
|
||||
if not oidc:
|
||||
return ResponseObject(status=False, message="OIDC is disabled")
|
||||
|
||||
return ResponseObject(data=dashboardClients.OIDC.GetProviders())
|
||||
|
||||
@client.post(f'{prefix}/api/signin/oidc')
|
||||
def ClientAPI_SignIn_OIDC():
|
||||
_, oidc = dashboardConfig.GetConfig("OIDC", "client_enable")
|
||||
if not oidc:
|
||||
return ResponseObject(status=False, message="OIDC is disabled")
|
||||
|
||||
data = request.get_json()
|
||||
status, oidcData = dashboardClients.SignIn_OIDC(**data)
|
||||
if not status:
|
||||
return ResponseObject(status, oidcData)
|
||||
|
||||
session['Email'] = oidcData.get('email')
|
||||
session['Role'] = 'client'
|
||||
session['TotpVerified'] = True
|
||||
|
||||
return ResponseObject()
|
||||
|
||||
@client.post(f'{prefix}/api/signin')
|
||||
def ClientAPI_SignIn():
|
||||
data = request.get_json()
|
||||
status, msg = dashboardClients.SignIn(**data)
|
||||
if status:
|
||||
session['Email'] = data.get('Email')
|
||||
session['Role'] = 'client'
|
||||
session['TotpVerified'] = False
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword/generateResetToken')
|
||||
def ClientAPI_ResetPassword_GenerateResetToken():
|
||||
date = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
|
||||
emailSender = EmailSender(dashboardConfig)
|
||||
if not emailSender.ready():
|
||||
return ResponseObject(False, "We can't send you an email due to your Administrator has not setup email service. Please contact your administrator.")
|
||||
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
if not email:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
token = dashboardClients.GenerateClientPasswordResetToken(u.get('ClientID'))
|
||||
|
||||
status, msg = emailSender.send(
|
||||
email, "[WGDashboard | Client] Reset Password",
|
||||
f"Hi {email}, \n\nIt looks like you're trying to reset your password at {date} \n\nEnter this 6 digits code on the Forgot Password to continue:\n\n{token}\n\nThis code will expire in 30 minutes for your security. If you didn’t request a password reset, you can safely ignore this email—your current password will remain unchanged.\n\nIf you need help, feel free to contact support.\n\nBest regards,\n\nWGDashboard"
|
||||
)
|
||||
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword/validateResetToken')
|
||||
def ClientAPI_ResetPassword_ValidateResetToken():
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
token = data.get('Token', None)
|
||||
if not all([email, token]):
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
return ResponseObject(status=dashboardClients.ValidateClientPasswordResetToken(u.get('ClientID'), token))
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword')
|
||||
def ClientAPI_ResetPassword():
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
token = data.get('Token', None)
|
||||
password = data.get('Password', None)
|
||||
confirmPassword = data.get('ConfirmPassword', None)
|
||||
if not all([email, token, password, confirmPassword]):
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
if not dashboardClients.ValidateClientPasswordResetToken(u.get('ClientID'), token):
|
||||
return ResponseObject(False, "Verification code is either invalid or expired")
|
||||
|
||||
status, msg = dashboardClients.ResetClientPassword(u.get('ClientID'), password, confirmPassword)
|
||||
|
||||
dashboardClients.RevokeClientPasswordResetToken(u.get('ClientID'), token)
|
||||
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
|
||||
@client.get(f'{prefix}/api/signout')
|
||||
def ClientAPI_SignOut():
|
||||
if session.get("SignInMethod") == "OIDC":
|
||||
dashboardClients.SignOut_OIDC()
|
||||
session.clear()
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.get(f'{prefix}/api/signin/totp')
|
||||
def ClientAPI_SignIn_TOTP():
|
||||
token = request.args.get('Token', None)
|
||||
if not token:
|
||||
return ResponseObject(False, "Please provide TOTP token")
|
||||
|
||||
status, msg = dashboardClients.SignIn_GetTotp(token)
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/signin/totp')
|
||||
def ClientAPI_SignIn_ValidateTOTP():
|
||||
data = request.get_json()
|
||||
token = data.get('Token', None)
|
||||
userProvidedTotp = data.get('UserProvidedTOTP', None)
|
||||
if not all([token, userProvidedTotp]):
|
||||
return ResponseObject(False, "Please fill in all fields")
|
||||
status, msg = dashboardClients.SignIn_GetTotp(token, userProvidedTotp)
|
||||
if status:
|
||||
if session.get('Email') is None:
|
||||
return ResponseObject(False, "Sign in status is invalid", status_code=401)
|
||||
session['TotpVerified'] = True
|
||||
profile = dashboardClients.GetClientProfile(session.get("ClientID"))
|
||||
|
||||
return ResponseObject(True, data={
|
||||
"Email": session.get('Email'),
|
||||
"Profile": profile
|
||||
})
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.get(prefix)
|
||||
def ClientIndex():
|
||||
return render_template('client.html')
|
||||
|
||||
@client.get(f'{prefix}/api/serverInformation')
|
||||
def ClientAPI_ServerInformation():
|
||||
return ResponseObject(data={
|
||||
"ServerTimezone": str(get_localzone())
|
||||
})
|
||||
|
||||
@client.get(f'{prefix}/api/validateAuthentication')
|
||||
@login_required
|
||||
def ClientAPI_ValidateAuthentication():
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.get(f'{prefix}/api/configurations')
|
||||
@login_required
|
||||
def ClientAPI_Configurations():
|
||||
return ResponseObject(True, data=dashboardClients.GetClientAssignedPeers(session['ClientID']))
|
||||
|
||||
@client.get(f'{prefix}/api/settings/getClientProfile')
|
||||
@login_required
|
||||
def ClientAPI_Settings_GetClientProfile():
|
||||
return ResponseObject(data={
|
||||
"Email": session.get("Email"),
|
||||
"SignInMethod": session.get("SignInMethod"),
|
||||
"Profile": dashboardClients.GetClientProfile(session.get("ClientID"))
|
||||
})
|
||||
|
||||
@client.post(f'{prefix}/api/settings/updatePassword')
|
||||
@login_required
|
||||
def ClientAPI_Settings_UpdatePassword():
|
||||
data = request.get_json()
|
||||
status, message = dashboardClients.UpdateClientPassword(session['ClientID'], **data)
|
||||
|
||||
return ResponseObject(status, message)
|
||||
|
||||
return client
|
||||
2717
src/dashboard.py
2717
src/dashboard.py
File diff suppressed because it is too large
Load Diff
|
|
@ -1,26 +1,26 @@
|
|||
import os.path
|
||||
import dashboard, configparser
|
||||
import dashboard
|
||||
from datetime import datetime
|
||||
global sqldb, cursor, DashboardConfig, WireguardConfigurations, AllPeerJobs, JobLogger
|
||||
global sqldb, cursor, DashboardConfig, WireguardConfigurations, AllPeerJobs, JobLogger, Dash
|
||||
app_host, app_port = dashboard.gunicornConfig()
|
||||
date = datetime.today().strftime('%Y_%m_%d_%H_%M_%S')
|
||||
|
||||
def post_worker_init(worker):
|
||||
dashboard.startThreads()
|
||||
dashboard.DashboardPlugins.startThreads()
|
||||
|
||||
worker_class = 'gthread'
|
||||
workers = 1
|
||||
threads = 1
|
||||
threads = 2
|
||||
bind = f"{app_host}:{app_port}"
|
||||
daemon = True
|
||||
pidfile = './gunicorn.pid'
|
||||
wsgi_app = "dashboard:app"
|
||||
accesslog = f"./log/access_{date}.log"
|
||||
log_level = "debug"
|
||||
loglevel = "info"
|
||||
capture_output = True
|
||||
errorlog = f"./log/error_{date}.log"
|
||||
pythonpath = "., ./modules"
|
||||
|
||||
print(f"[Gunicorn] WGDashboard w/ Gunicorn will be running on {bind}", flush=True)
|
||||
print(f"[Gunicorn] Access log file is at {accesslog}", flush=True)
|
||||
print(f"[Gunicorn] Error log file is at {errorlog}", flush=True)
|
||||
print(f"[Gunicorn] Error log file is at {errorlog}", flush=True)
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
import os
|
||||
import random
|
||||
import re
|
||||
import subprocess
|
||||
import uuid
|
||||
|
||||
from .Peer import Peer
|
||||
from .Utilities import ValidateIPAddressesWithRange, ValidateDNSAddress, GenerateWireguardPublicKey
|
||||
|
||||
|
||||
class AmneziaWGPeer(Peer):
|
||||
def __init__(self, tableData, configuration):
|
||||
self.advanced_security = tableData["advanced_security"]
|
||||
super().__init__(tableData, configuration)
|
||||
|
||||
|
||||
def updatePeer(self, name: str, private_key: str,
|
||||
preshared_key: str,
|
||||
dns_addresses: str, allowed_ip: str, endpoint_allowed_ip: str, mtu: int,
|
||||
keepalive: int, advanced_security: str) -> tuple[bool, str] or tuple[bool, None]:
|
||||
if not self.configuration.getStatus():
|
||||
self.configuration.toggleConfiguration()
|
||||
|
||||
existingAllowedIps = [item for row in list(
|
||||
map(lambda x: [q.strip() for q in x.split(',')],
|
||||
map(lambda y: y.allowed_ip,
|
||||
list(filter(lambda k: k.id != self.id, self.configuration.getPeersList()))))) for item in row]
|
||||
|
||||
if allowed_ip in existingAllowedIps:
|
||||
return False, "Allowed IP already taken by another peer"
|
||||
if not ValidateIPAddressesWithRange(endpoint_allowed_ip):
|
||||
return False, f"Endpoint Allowed IPs format is incorrect"
|
||||
if len(dns_addresses) > 0 and not ValidateDNSAddress(dns_addresses):
|
||||
return False, f"DNS format is incorrect"
|
||||
|
||||
if type(mtu) is str:
|
||||
mtu = 0
|
||||
|
||||
if type(keepalive) is str:
|
||||
keepalive = 0
|
||||
|
||||
if mtu < 0 or mtu > 1460:
|
||||
return False, "MTU format is not correct"
|
||||
if keepalive < 0:
|
||||
return False, "Persistent Keepalive format is not correct"
|
||||
if advanced_security != "on" and advanced_security != "off":
|
||||
return False, "Advanced Security can only be on or off"
|
||||
if len(private_key) > 0:
|
||||
pubKey = GenerateWireguardPublicKey(private_key)
|
||||
if not pubKey[0] or pubKey[1] != self.id:
|
||||
return False, "Private key does not match with the public key"
|
||||
try:
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
pskExist = len(preshared_key) > 0
|
||||
|
||||
if pskExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(preshared_key)
|
||||
newAllowedIPs = allowed_ip.replace(" ", "")
|
||||
updateAllowedIp = subprocess.check_output(
|
||||
f"{self.configuration.Protocol} set {self.configuration.Name} peer {self.id} allowed-ips {newAllowedIPs} {f'preshared-key {uid}' if pskExist else 'preshared-key /dev/null'}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
if pskExist: os.remove(uid)
|
||||
|
||||
if len(updateAllowedIp.decode().strip("\n")) != 0:
|
||||
return False, "Update peer failed when updating Allowed IPs"
|
||||
saveConfig = subprocess.check_output(f"{self.configuration.Protocol}-quick save {self.configuration.Name}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if f"wg showconf {self.configuration.Name}" not in saveConfig.decode().strip('\n'):
|
||||
return False, "Update peer failed when saving the configuration"
|
||||
|
||||
with self.configuration.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"name": name,
|
||||
"private_key": private_key,
|
||||
"DNS": dns_addresses,
|
||||
"endpoint_allowed_ip": endpoint_allowed_ip,
|
||||
"mtu": mtu,
|
||||
"keepalive": keepalive,
|
||||
"preshared_key": preshared_key,
|
||||
"advanced_security": advanced_security
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.configuration.getPeers()
|
||||
return True, None
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return False, exc.output.decode("UTF-8").strip()
|
||||
|
|
@ -0,0 +1,324 @@
|
|||
"""
|
||||
AmneziaWG Configuration
|
||||
"""
|
||||
import random, sqlalchemy, os, subprocess, re, uuid
|
||||
from flask import current_app
|
||||
from .PeerJobs import PeerJobs
|
||||
from .AmneziaWGPeer import AmneziaWGPeer
|
||||
from .PeerShareLinks import PeerShareLinks
|
||||
from .Utilities import RegexMatch
|
||||
from .WireguardConfiguration import WireguardConfiguration
|
||||
from .DashboardWebHooks import DashboardWebHooks
|
||||
|
||||
|
||||
class AmneziaWireguardConfiguration(WireguardConfiguration):
|
||||
def __init__(self, DashboardConfig,
|
||||
AllPeerJobs: PeerJobs,
|
||||
AllPeerShareLinks: PeerShareLinks,
|
||||
DashboardWebHooks: DashboardWebHooks,
|
||||
name: str = None, data: dict = None, backup: dict = None, startup: bool = False):
|
||||
self.Jc = 0
|
||||
self.Jmin = 0
|
||||
self.Jmax = 0
|
||||
self.S1 = 0
|
||||
self.S2 = 0
|
||||
self.H1 = 1
|
||||
self.H2 = 2
|
||||
self.H3 = 3
|
||||
self.H4 = 4
|
||||
|
||||
super().__init__(DashboardConfig, AllPeerJobs, AllPeerShareLinks, DashboardWebHooks, name, data, backup, startup, wg=False)
|
||||
|
||||
def toJson(self):
|
||||
self.Status = self.getStatus()
|
||||
return {
|
||||
"Status": self.Status,
|
||||
"Name": self.Name,
|
||||
"PrivateKey": self.PrivateKey,
|
||||
"PublicKey": self.PublicKey,
|
||||
"Address": self.Address,
|
||||
"ListenPort": self.ListenPort,
|
||||
"PreUp": self.PreUp,
|
||||
"PreDown": self.PreDown,
|
||||
"PostUp": self.PostUp,
|
||||
"PostDown": self.PostDown,
|
||||
"SaveConfig": self.SaveConfig,
|
||||
"Info": self.configurationInfo.model_dump(),
|
||||
"DataUsage": {
|
||||
"Total": sum(list(map(lambda x: x.cumu_data + x.total_data, self.Peers))),
|
||||
"Sent": sum(list(map(lambda x: x.cumu_sent + x.total_sent, self.Peers))),
|
||||
"Receive": sum(list(map(lambda x: x.cumu_receive + x.total_receive, self.Peers)))
|
||||
},
|
||||
"ConnectedPeers": len(list(filter(lambda x: x.status == "running", self.Peers))),
|
||||
"TotalPeers": len(self.Peers),
|
||||
"Protocol": self.Protocol,
|
||||
"Table": self.Table,
|
||||
"Jc": self.Jc,
|
||||
"Jmin": self.Jmin,
|
||||
"Jmax": self.Jmax,
|
||||
"S1": self.S1,
|
||||
"S2": self.S2,
|
||||
"H1": self.H1,
|
||||
"H2": self.H2,
|
||||
"H3": self.H3,
|
||||
"H4": self.H4
|
||||
}
|
||||
|
||||
def createDatabase(self, dbName = None):
|
||||
if dbName is None:
|
||||
dbName = self.Name
|
||||
|
||||
|
||||
self.peersTable = sqlalchemy.Table(
|
||||
dbName, self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False, primary_key=True),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersRestrictedTable = sqlalchemy.Table(
|
||||
f'{dbName}_restrict_access', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False, primary_key=True),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersTransferTable = sqlalchemy.Table(
|
||||
f'{dbName}_transfer', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('time', (sqlalchemy.DATETIME if self.DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else sqlalchemy.TIMESTAMP),
|
||||
server_default=sqlalchemy.func.now()),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersDeletedTable = sqlalchemy.Table(
|
||||
f'{dbName}_deleted', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.infoTable = sqlalchemy.Table(
|
||||
'ConfigurationsInfo', self.metadata,
|
||||
sqlalchemy.Column('ID', sqlalchemy.String(255), primary_key=True),
|
||||
sqlalchemy.Column('Info', sqlalchemy.Text),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.peersHistoryEndpointTable = sqlalchemy.Table(
|
||||
f'{dbName}_history_endpoint', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('time',
|
||||
(sqlalchemy.DATETIME if self.DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else sqlalchemy.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
|
||||
def getPeers(self):
|
||||
self.Peers.clear()
|
||||
current_app.logger.info(f"Refreshing {self.Name} peer list")
|
||||
|
||||
if self.configurationFileChanged():
|
||||
with open(self.configPath, 'r') as configFile:
|
||||
p = []
|
||||
pCounter = -1
|
||||
content = configFile.read().split('\n')
|
||||
try:
|
||||
if "[Peer]" not in content:
|
||||
current_app.logger.info(f"{self.Name} config has no [Peer] section")
|
||||
return
|
||||
|
||||
peerStarts = content.index("[Peer]")
|
||||
content = content[peerStarts:]
|
||||
for i in content:
|
||||
if not RegexMatch("#(.*)", i) and not RegexMatch(";(.*)", i):
|
||||
if i == "[Peer]":
|
||||
pCounter += 1
|
||||
p.append({})
|
||||
p[pCounter]["name"] = ""
|
||||
else:
|
||||
if len(i) > 0:
|
||||
split = re.split(r'\s*=\s*', i, 1)
|
||||
if len(split) == 2:
|
||||
p[pCounter][split[0]] = split[1]
|
||||
|
||||
if RegexMatch("#Name# = (.*)", i):
|
||||
split = re.split(r'\s*=\s*', i, 1)
|
||||
if len(split) == 2:
|
||||
p[pCounter]["name"] = split[1]
|
||||
with self.engine.begin() as conn:
|
||||
for i in p:
|
||||
if "PublicKey" in i.keys():
|
||||
tempPeer = conn.execute(self.peersTable.select().where(
|
||||
self.peersTable.columns.id == i['PublicKey']
|
||||
)).mappings().fetchone()
|
||||
if tempPeer is None:
|
||||
tempPeer = {
|
||||
"id": i['PublicKey'],
|
||||
"advanced_security": i.get('AdvancedSecurity', 'off'),
|
||||
"private_key": "",
|
||||
"DNS": self.DashboardConfig.GetConfig("Peers", "peer_global_DNS")[1],
|
||||
"endpoint_allowed_ip": self.DashboardConfig.GetConfig("Peers", "peer_endpoint_allowed_ip")[
|
||||
1],
|
||||
"name": i.get("name"),
|
||||
"total_receive": 0,
|
||||
"total_sent": 0,
|
||||
"total_data": 0,
|
||||
"endpoint": "N/A",
|
||||
"status": "stopped",
|
||||
"latest_handshake": "N/A",
|
||||
"allowed_ip": i.get("AllowedIPs", "N/A"),
|
||||
"cumu_receive": 0,
|
||||
"cumu_sent": 0,
|
||||
"cumu_data": 0,
|
||||
"mtu": self.DashboardConfig.GetConfig("Peers", "peer_mtu")[1],
|
||||
"keepalive": self.DashboardConfig.GetConfig("Peers", "peer_keep_alive")[1],
|
||||
"remote_endpoint": self.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1],
|
||||
"preshared_key": i["PresharedKey"] if "PresharedKey" in i.keys() else ""
|
||||
}
|
||||
conn.execute(
|
||||
self.peersTable.insert().values(tempPeer)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.peersTable.update().values({
|
||||
"allowed_ip": i.get("AllowedIPs", "N/A")
|
||||
}).where(
|
||||
self.peersTable.columns.id == i['PublicKey']
|
||||
)
|
||||
)
|
||||
self.Peers.append(AmneziaWGPeer(tempPeer, self))
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"{self.Name} getPeers() Error", e)
|
||||
else:
|
||||
with self.engine.connect() as conn:
|
||||
existingPeers = conn.execute(self.peersTable.select()).mappings().fetchall()
|
||||
for i in existingPeers:
|
||||
self.Peers.append(AmneziaWGPeer(i, self))
|
||||
|
||||
def addPeers(self, peers: list) -> tuple[bool, list, str]:
|
||||
result = {
|
||||
"message": None,
|
||||
"peers": []
|
||||
}
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
for i in peers:
|
||||
newPeer = {
|
||||
"id": i['id'],
|
||||
"private_key": i['private_key'],
|
||||
"DNS": i['DNS'],
|
||||
"endpoint_allowed_ip": i['endpoint_allowed_ip'],
|
||||
"name": i['name'],
|
||||
"total_receive": 0,
|
||||
"total_sent": 0,
|
||||
"total_data": 0,
|
||||
"endpoint": "N/A",
|
||||
"status": "stopped",
|
||||
"latest_handshake": "N/A",
|
||||
"allowed_ip": i.get("allowed_ip", "N/A"),
|
||||
"cumu_receive": 0,
|
||||
"cumu_sent": 0,
|
||||
"cumu_data": 0,
|
||||
"mtu": i['mtu'],
|
||||
"keepalive": i['keepalive'],
|
||||
"remote_endpoint": self.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1],
|
||||
"preshared_key": i["preshared_key"],
|
||||
"advanced_security": i['advanced_security']
|
||||
}
|
||||
conn.execute(
|
||||
self.peersTable.insert().values(newPeer)
|
||||
)
|
||||
for p in peers:
|
||||
presharedKeyExist = len(p['preshared_key']) > 0
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
if presharedKeyExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(p['preshared_key'])
|
||||
|
||||
subprocess.check_output(
|
||||
f"{self.Protocol} set {self.Name} peer {p['id']} allowed-ips {p['allowed_ip'].replace(' ', '')}{f' preshared-key {uid}' if presharedKeyExist else ''}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if presharedKeyExist:
|
||||
os.remove(uid)
|
||||
subprocess.check_output(
|
||||
f"{self.Protocol}-quick save {self.Name}", shell=True, stderr=subprocess.STDOUT)
|
||||
self.getPeers()
|
||||
for p in peers:
|
||||
p = self.searchPeer(p['id'])
|
||||
if p[0]:
|
||||
result['peers'].append(p[1])
|
||||
self.DashboardWebHooks.RunWebHook("peer_created", {
|
||||
"configuration": self.Name,
|
||||
"peers": list(map(lambda k : k['id'], peers))
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error("Add peers error", e)
|
||||
return False, [], str(e)
|
||||
return True, result['peers'], ""
|
||||
|
||||
def getRestrictedPeers(self):
|
||||
self.RestrictedPeers = []
|
||||
with self.engine.connect() as conn:
|
||||
restricted = conn.execute(self.peersRestrictedTable.select()).mappings().fetchall()
|
||||
for i in restricted:
|
||||
self.RestrictedPeers.append(AmneziaWGPeer(i, self))
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import configparser
|
||||
import os
|
||||
from sqlalchemy_utils import database_exists, create_database
|
||||
from flask import current_app
|
||||
|
||||
def ConnectionString(database) -> str:
|
||||
parser = configparser.ConfigParser(strict=False)
|
||||
parser.read_file(open('wg-dashboard.ini', "r+"))
|
||||
sqlitePath = os.path.join("db")
|
||||
if not os.path.isdir(sqlitePath):
|
||||
os.mkdir(sqlitePath)
|
||||
if parser.get("Database", "type") == "postgresql":
|
||||
cn = f'postgresql+psycopg://{parser.get("Database", "username")}:{parser.get("Database", "password")}@{parser.get("Database", "host")}/{database}'
|
||||
elif parser.get("Database", "type") == "mysql":
|
||||
cn = f'mysql+pymysql://{parser.get("Database", "username")}:{parser.get("Database", "password")}@{parser.get("Database", "host")}/{database}'
|
||||
else:
|
||||
cn = f'sqlite:///{os.path.join(sqlitePath, f"{database}.db")}'
|
||||
try:
|
||||
if not database_exists(cn):
|
||||
create_database(cn)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Database error. Terminating...", e)
|
||||
exit(1)
|
||||
|
||||
return cn
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
"""
|
||||
Dashboard API Key
|
||||
"""
|
||||
class DashboardAPIKey:
|
||||
def __init__(self, Key: str, CreatedAt: str, ExpiredAt: str):
|
||||
self.Key = Key
|
||||
self.CreatedAt = CreatedAt
|
||||
self.ExpiredAt = ExpiredAt
|
||||
|
||||
def toJson(self):
|
||||
return self.__dict__
|
||||
|
|
@ -0,0 +1,498 @@
|
|||
import datetime
|
||||
import hashlib
|
||||
import random
|
||||
import uuid
|
||||
|
||||
import bcrypt
|
||||
import pyotp
|
||||
import sqlalchemy as db
|
||||
import requests
|
||||
|
||||
from .ConnectionString import ConnectionString
|
||||
from .DashboardClientsPeerAssignment import DashboardClientsPeerAssignment
|
||||
from .DashboardClientsTOTP import DashboardClientsTOTP
|
||||
from .DashboardOIDC import DashboardOIDC
|
||||
from .Utilities import ValidatePasswordStrength
|
||||
from .DashboardLogger import DashboardLogger
|
||||
from flask import session
|
||||
|
||||
|
||||
class DashboardClients:
|
||||
def __init__(self, wireguardConfigurations):
|
||||
self.logger = DashboardLogger()
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.OIDC = DashboardOIDC("Client")
|
||||
|
||||
self.dashboardClientsTable = db.Table(
|
||||
'DashboardClients', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Email', db.String(255), nullable=False, index=True),
|
||||
db.Column('Password', db.String(500)),
|
||||
db.Column('TotpKey', db.String(500)),
|
||||
db.Column('TotpKeyVerified', db.Integer),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('DeletedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardOIDCClientsTable = db.Table(
|
||||
'DashboardOIDCClients', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Email', db.String(255), nullable=False, index=True),
|
||||
db.Column('ProviderIssuer', db.String(500), nullable=False, index=True),
|
||||
db.Column('ProviderSubject', db.String(500), nullable=False, index=True),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('DeletedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardClientsInfoTable = db.Table(
|
||||
'DashboardClientsInfo', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Name', db.String(500)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardClientsPasswordResetLinkTable = db.Table(
|
||||
'DashboardClientsPasswordResetLinks', self.metadata,
|
||||
db.Column('ResetToken', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('ClientID', db.String(255), nullable=False),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('ExpiryDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
self.Clients = {}
|
||||
self.ClientsRaw = []
|
||||
self.__getClients()
|
||||
self.DashboardClientsTOTP = DashboardClientsTOTP()
|
||||
self.DashboardClientsPeerAssignment = DashboardClientsPeerAssignment(wireguardConfigurations)
|
||||
|
||||
def __getClients(self):
|
||||
with self.engine.connect() as conn:
|
||||
localClients = db.select(
|
||||
self.dashboardClientsTable.c.ClientID,
|
||||
self.dashboardClientsTable.c.Email,
|
||||
db.literal_column("'Local'").label("ClientGroup")
|
||||
).where(
|
||||
self.dashboardClientsTable.c.DeletedDate.is_(None)
|
||||
)
|
||||
|
||||
oidcClients = db.select(
|
||||
self.dashboardOIDCClientsTable.c.ClientID,
|
||||
self.dashboardOIDCClientsTable.c.Email,
|
||||
self.dashboardOIDCClientsTable.c.ProviderIssuer.label("ClientGroup"),
|
||||
).where(
|
||||
self.dashboardOIDCClientsTable.c.DeletedDate.is_(None)
|
||||
)
|
||||
|
||||
union = db.union(localClients, oidcClients).alias("U")
|
||||
|
||||
self.ClientsRaw = conn.execute(
|
||||
db.select(
|
||||
union,
|
||||
self.dashboardClientsInfoTable.c.Name
|
||||
).outerjoin(self.dashboardClientsInfoTable,
|
||||
union.c.ClientID == self.dashboardClientsInfoTable.c.ClientID)
|
||||
).mappings().fetchall()
|
||||
|
||||
groups = set(map(lambda c: c.get('ClientGroup'), self.ClientsRaw))
|
||||
gr = {}
|
||||
for g in groups:
|
||||
gr[(g if g == 'Local' else self.OIDC.GetProviderNameByIssuer(g))] = [
|
||||
dict(x) for x in list(
|
||||
filter(lambda c: c.get('ClientGroup') == g, self.ClientsRaw)
|
||||
)
|
||||
]
|
||||
self.Clients = gr
|
||||
|
||||
def GetAllClients(self):
|
||||
self.__getClients()
|
||||
return self.Clients
|
||||
|
||||
def GetAllClientsRaw(self):
|
||||
self.__getClients()
|
||||
return self.ClientsRaw
|
||||
|
||||
def GetClient(self, ClientID) -> dict[str, str] | None:
|
||||
c = filter(lambda x: x['ClientID'] == ClientID, self.ClientsRaw)
|
||||
client = next((dict(client) for client in c), None)
|
||||
if client is not None:
|
||||
client['ClientGroup'] = self.OIDC.GetProviderNameByIssuer(client['ClientGroup'])
|
||||
return client
|
||||
|
||||
def GetClientProfile(self, ClientID):
|
||||
with self.engine.connect() as conn:
|
||||
return dict(conn.execute(
|
||||
db.select(
|
||||
*[c for c in self.dashboardClientsInfoTable.c if c.name != 'ClientID']
|
||||
).where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
).mappings().fetchone())
|
||||
|
||||
def SignIn_ValidatePassword(self, Email, Password) -> bool:
|
||||
if not all([Email, Password]):
|
||||
return False
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
return bcrypt.checkpw(Password.encode("utf-8"), existingClient.get("Password").encode("utf-8"))
|
||||
return False
|
||||
|
||||
def SignIn_UserExistence(self, Email):
|
||||
with self.engine.connect() as conn:
|
||||
existingClient = conn.execute(
|
||||
self.dashboardClientsTable.select().where(
|
||||
self.dashboardClientsTable.c.Email == Email
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return existingClient
|
||||
|
||||
def SignIn_OIDC_UserExistence(self, data: dict[str, str]):
|
||||
with self.engine.connect() as conn:
|
||||
existingClient = conn.execute(
|
||||
self.dashboardOIDCClientsTable.select().where(
|
||||
db.and_(
|
||||
self.dashboardOIDCClientsTable.c.ProviderIssuer == data.get('iss'),
|
||||
self.dashboardOIDCClientsTable.c.ProviderSubject == data.get('sub'),
|
||||
)
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return existingClient
|
||||
|
||||
def SignUp_OIDC(self, data: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
if not self.SignIn_OIDC_UserExistence(data):
|
||||
with self.engine.begin() as conn:
|
||||
newClientUUID = str(uuid.uuid4())
|
||||
conn.execute(
|
||||
self.dashboardOIDCClientsTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Email": data.get('email', ''),
|
||||
"ProviderIssuer": data.get('iss', ''),
|
||||
"ProviderSubject": data.get('sub', '')
|
||||
})
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Name": data.get("name")
|
||||
})
|
||||
)
|
||||
self.logger.log(Message=f"User {data.get('email', '')} from {data.get('iss', '')} signed up")
|
||||
self.__getClients()
|
||||
return True, newClientUUID
|
||||
return False, "User already signed up"
|
||||
|
||||
def SignOut_OIDC(self):
|
||||
sessionPayload = session.get('OIDCPayload')
|
||||
status, oidc_config = self.OIDC.GetProviderConfiguration(session.get('SignInPayload').get("Provider"))
|
||||
signOut = requests.get(
|
||||
oidc_config.get("end_session_endpoint"),
|
||||
params={
|
||||
'id_token_hint': session.get('SignInPayload').get("Payload").get('sid')
|
||||
}
|
||||
)
|
||||
return True
|
||||
|
||||
def SignIn_OIDC(self, **kwargs):
|
||||
status, data = self.OIDC.VerifyToken(**kwargs)
|
||||
if not status:
|
||||
return False, "Sign in failed. Reason: " + data
|
||||
existingClient = self.SignIn_OIDC_UserExistence(data)
|
||||
if not existingClient:
|
||||
status, newClientUUID = self.SignUp_OIDC(data)
|
||||
session['ClientID'] = newClientUUID
|
||||
else:
|
||||
session['ClientID'] = existingClient.get("ClientID")
|
||||
session['SignInMethod'] = 'OIDC'
|
||||
session['SignInPayload'] = {
|
||||
"Provider": kwargs.get('provider'),
|
||||
"Payload": data
|
||||
}
|
||||
return True, data
|
||||
|
||||
def SignIn(self, Email, Password) -> tuple[bool, str]:
|
||||
if not all([Email, Password]):
|
||||
return False, "Please fill in all fields"
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
checkPwd = self.SignIn_ValidatePassword(Email, Password)
|
||||
if checkPwd:
|
||||
session['SignInMethod'] = 'local'
|
||||
session['Email'] = Email
|
||||
session['ClientID'] = existingClient.get("ClientID")
|
||||
return True, self.DashboardClientsTOTP.GenerateToken(existingClient.get("ClientID"))
|
||||
return False, "Email or Password is incorrect"
|
||||
|
||||
def SignIn_GetTotp(self, Token: str, UserProvidedTotp: str = None) -> tuple[bool, str] or tuple[bool, None, str]:
|
||||
status, data = self.DashboardClientsTOTP.GetTotp(Token)
|
||||
|
||||
if not status:
|
||||
return False, "TOTP Token is invalid"
|
||||
if UserProvidedTotp is None:
|
||||
if data.get('TotpKeyVerified') is None:
|
||||
return True, pyotp.totp.TOTP(data.get('TotpKey')).provisioning_uri(name=data.get('Email'),
|
||||
issuer_name="WGDashboard Client")
|
||||
else:
|
||||
totpMatched = pyotp.totp.TOTP(data.get('TotpKey')).verify(UserProvidedTotp)
|
||||
if not totpMatched:
|
||||
return False, "TOTP is does not match"
|
||||
else:
|
||||
self.DashboardClientsTOTP.RevokeToken(Token)
|
||||
if data.get('TotpKeyVerified') is None:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
'TotpKeyVerified': 1
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == data.get('ClientID')
|
||||
)
|
||||
)
|
||||
|
||||
return True, None
|
||||
|
||||
def SignUp(self, Email, Password, ConfirmPassword) -> tuple[bool, str] or tuple[bool, None]:
|
||||
try:
|
||||
if not all([Email, Password, ConfirmPassword]):
|
||||
return False, "Please fill in all fields"
|
||||
if Password != ConfirmPassword:
|
||||
return False, "Passwords does not match"
|
||||
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
return False, "Email already signed up"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(Password)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
newClientUUID = str(uuid.uuid4())
|
||||
totpKey = pyotp.random_base32()
|
||||
encodePassword = Password.encode('utf-8')
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Email": Email,
|
||||
"Password": bcrypt.hashpw(encodePassword, bcrypt.gensalt()).decode("utf-8"),
|
||||
"TotpKey": totpKey
|
||||
})
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.insert().values({
|
||||
"ClientID": newClientUUID
|
||||
})
|
||||
)
|
||||
self.logger.log(Message=f"User {Email} signed up")
|
||||
self.__getClients()
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"Signed up failed, reason: {str(e)}")
|
||||
return False, "Signe up failed."
|
||||
|
||||
return True, None
|
||||
|
||||
def GetClientAssignedPeers(self, ClientID):
|
||||
return self.DashboardClientsPeerAssignment.GetAssignedPeers(ClientID)
|
||||
|
||||
def ResetClientPassword(self, ClientID, NewPassword, ConfirmNewPassword) -> tuple[bool, str] | tuple[bool, None]:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False, "Client does not exist"
|
||||
|
||||
if NewPassword != ConfirmNewPassword:
|
||||
return False, "New passwords does not match"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(NewPassword)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
"TotpKeyVerified": None,
|
||||
"TotpKey": pyotp.random_base32(),
|
||||
"Password": bcrypt.hashpw(NewPassword.encode('utf-8'), bcrypt.gensalt()).decode("utf-8"),
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} reset password and TOTP")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} reset password failed, reason: {str(e)}")
|
||||
return False, "Reset password failed."
|
||||
|
||||
|
||||
return True, None
|
||||
|
||||
def UpdateClientPassword(self, ClientID, CurrentPassword, NewPassword, ConfirmNewPassword) -> tuple[bool, str] | tuple[bool, None]:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False, "Client does not exist"
|
||||
|
||||
if not all([CurrentPassword, NewPassword, ConfirmNewPassword]):
|
||||
return False, "Please fill in all fields"
|
||||
|
||||
if not self.SignIn_ValidatePassword(c.get('Email'), CurrentPassword):
|
||||
return False, "Current password does not match"
|
||||
|
||||
if NewPassword != ConfirmNewPassword:
|
||||
return False, "New passwords does not match"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(NewPassword)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
"Password": bcrypt.hashpw(NewPassword.encode('utf-8'), bcrypt.gensalt()).decode("utf-8"),
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} updated password")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} update password failed, reason: {str(e)}")
|
||||
return False, "Update password failed."
|
||||
return True, None
|
||||
|
||||
def UpdateClientProfile(self, ClientID, Name):
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.update().values({
|
||||
"Name": Name
|
||||
}).where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} updated name to {Name}")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} updated name to {Name} failed")
|
||||
return False
|
||||
return True
|
||||
|
||||
def DeleteClient(self, ClientID):
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
client = self.GetClient(ClientID)
|
||||
if client.get("ClientGroup") == "Local":
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.delete().where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.dashboardOIDCClientsTable.delete().where(
|
||||
self.dashboardOIDCClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.delete().where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.DashboardClientsPeerAssignment.UnassignPeers(ClientID)
|
||||
self.__getClients()
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"Failed to delete {ClientID}")
|
||||
return False
|
||||
return True
|
||||
|
||||
'''
|
||||
For WGDashboard Admin to Manage Clients
|
||||
'''
|
||||
|
||||
def GenerateClientPasswordResetToken(self, ClientID) -> bool | str:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False
|
||||
|
||||
newToken = str(random.randint(0, 999999)).zfill(6)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.update().values({
|
||||
"ExpiryDate": datetime.datetime.now()
|
||||
|
||||
}).where(
|
||||
db.and_(
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ExpiryDate > db.func.now()
|
||||
)
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.insert().values({
|
||||
"ResetToken": newToken,
|
||||
"ClientID": ClientID,
|
||||
"CreatedDate": datetime.datetime.now(),
|
||||
"ExpiryDate": datetime.datetime.now() + datetime.timedelta(minutes=30)
|
||||
})
|
||||
)
|
||||
|
||||
return newToken
|
||||
|
||||
def ValidateClientPasswordResetToken(self, ClientID, Token):
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False
|
||||
with self.engine.connect() as conn:
|
||||
t = conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.select().where(
|
||||
db.and_(self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ResetToken == Token,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ExpiryDate > datetime.datetime.now())
|
||||
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return t is not None
|
||||
|
||||
def RevokeClientPasswordResetToken(self, ClientID, Token):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.update().values({
|
||||
"ExpiryDate": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ResetToken == Token)
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def GetAssignedPeerClients(self, ConfigurationName, PeerID):
|
||||
c = self.DashboardClientsPeerAssignment.GetAssignedClients(ConfigurationName, PeerID)
|
||||
for a in c:
|
||||
client = self.GetClient(a.ClientID)
|
||||
if client is not None:
|
||||
a.Client = self.GetClient(a.ClientID)
|
||||
return c
|
||||
|
||||
def GetClientAssignedPeersGrouped(self, ClientID):
|
||||
client = self.GetClient(ClientID)
|
||||
if client is not None:
|
||||
p = self.DashboardClientsPeerAssignment.GetAssignedPeers(ClientID)
|
||||
configs = set(map(lambda x : x['configuration_name'], p))
|
||||
d = {}
|
||||
for i in configs:
|
||||
d[i] = list(filter(lambda x : x['configuration_name'] == i, p))
|
||||
return d
|
||||
return None
|
||||
|
||||
def AssignClient(self, ConfigurationName, PeerID, ClientID) -> tuple[bool, dict[str, str]] | tuple[bool, None]:
|
||||
return self.DashboardClientsPeerAssignment.AssignClient(ClientID, ConfigurationName, PeerID)
|
||||
|
||||
def UnassignClient(self, AssignmentID):
|
||||
return self.DashboardClientsPeerAssignment.UnassignClients(AssignmentID)
|
||||
|
||||
|
|
@ -0,0 +1,159 @@
|
|||
import datetime
|
||||
import uuid
|
||||
|
||||
from .ConnectionString import ConnectionString
|
||||
from .DashboardLogger import DashboardLogger
|
||||
import sqlalchemy as db
|
||||
from .WireguardConfiguration import WireguardConfiguration
|
||||
|
||||
class Assignment:
|
||||
def __init__(self, **kwargs):
|
||||
self.AssignmentID: str = kwargs.get('AssignmentID')
|
||||
self.ClientID: str = kwargs.get('ClientID')
|
||||
self.ConfigurationName: str = kwargs.get('ConfigurationName')
|
||||
self.PeerID: str = kwargs.get('PeerID')
|
||||
self.AssignedDate: datetime.datetime = kwargs.get('AssignedDate')
|
||||
self.UnassignedDate: datetime.datetime = kwargs.get('UnassignedDate')
|
||||
self.Client: dict = {
|
||||
"ClientID": self.ClientID
|
||||
}
|
||||
|
||||
def toJson(self):
|
||||
return {
|
||||
"AssignmentID": self.AssignmentID,
|
||||
"Client": self.Client,
|
||||
"ConfigurationName": self.ConfigurationName,
|
||||
"PeerID": self.PeerID,
|
||||
"AssignedDate": self.AssignedDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"UnassignedDate": self.UnassignedDate.strftime("%Y-%m-%d %H:%M:%S") if self.UnassignedDate is not None else self.UnassignedDate
|
||||
}
|
||||
|
||||
class DashboardClientsPeerAssignment:
|
||||
def __init__(self, wireguardConfigurations: dict[str, WireguardConfiguration]):
|
||||
self.logger = DashboardLogger()
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.wireguardConfigurations = wireguardConfigurations
|
||||
self.dashboardClientsPeerAssignmentTable = db.Table(
|
||||
'DashboardClientsPeerAssignment', self.metadata,
|
||||
db.Column('AssignmentID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('ClientID', db.String(255), nullable=False, index=True),
|
||||
db.Column('ConfigurationName', db.String(255)),
|
||||
db.Column('PeerID', db.String(500)),
|
||||
db.Column('AssignedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('UnassignedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.assignments: list[Assignment] = []
|
||||
self.__getAssignments()
|
||||
|
||||
def __getAssignments(self):
|
||||
with self.engine.connect() as conn:
|
||||
assignments = []
|
||||
get = conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.select().where(
|
||||
self.dashboardClientsPeerAssignmentTable.c.UnassignedDate.is_(None)
|
||||
)
|
||||
).mappings().fetchall()
|
||||
for a in get:
|
||||
assignments.append(Assignment(**a))
|
||||
self.assignments = assignments
|
||||
|
||||
|
||||
def AssignClient(self, ClientID, ConfigurationName, PeerID):
|
||||
existing = list(
|
||||
filter(lambda e:
|
||||
e.ClientID == ClientID and
|
||||
e.ConfigurationName == ConfigurationName and
|
||||
e.PeerID == PeerID, self.assignments)
|
||||
)
|
||||
if len(existing) == 0:
|
||||
if ConfigurationName in self.wireguardConfigurations.keys():
|
||||
config = self.wireguardConfigurations.get(ConfigurationName)
|
||||
peer = list(filter(lambda x : x.id == PeerID, config.Peers))
|
||||
if len(peer) == 1:
|
||||
with self.engine.begin() as conn:
|
||||
data = {
|
||||
"AssignmentID": str(uuid.uuid4()),
|
||||
"ClientID": ClientID,
|
||||
"ConfigurationName": ConfigurationName,
|
||||
"PeerID": PeerID
|
||||
}
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.insert().values(data)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True, data
|
||||
return False, None
|
||||
|
||||
def UnassignClients(self, AssignmentID):
|
||||
existing = list(
|
||||
filter(lambda e:
|
||||
e.AssignmentID == AssignmentID, self.assignments)
|
||||
)
|
||||
if not existing:
|
||||
return False
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.update().values({
|
||||
"UnassignedDate": datetime.datetime.now()
|
||||
}).where(
|
||||
self.dashboardClientsPeerAssignmentTable.c.AssignmentID == AssignmentID
|
||||
)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True
|
||||
|
||||
def UnassignPeers(self, ClientID):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.update().values({
|
||||
"UnassignedDate": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(
|
||||
self.dashboardClientsPeerAssignmentTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPeerAssignmentTable.c.UnassignedDate.is_(db.null())
|
||||
)
|
||||
)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True
|
||||
|
||||
def GetAssignedClients(self, ConfigurationName, PeerID) -> list[Assignment]:
|
||||
self.__getAssignments()
|
||||
return list(filter(
|
||||
lambda c : c.ConfigurationName == ConfigurationName and
|
||||
c.PeerID == PeerID, self.assignments))
|
||||
|
||||
def GetAssignedPeers(self, ClientID):
|
||||
self.__getAssignments()
|
||||
|
||||
peers = []
|
||||
assigned = filter(lambda e:
|
||||
e.ClientID == ClientID, self.assignments)
|
||||
|
||||
for a in assigned:
|
||||
peer = filter(lambda e : e.id == a.PeerID,
|
||||
self.wireguardConfigurations[a.ConfigurationName].Peers)
|
||||
for p in peer:
|
||||
peers.append({
|
||||
'assignment_id': a.AssignmentID,
|
||||
'protocol': self.wireguardConfigurations[a.ConfigurationName].Protocol,
|
||||
'id': p.id,
|
||||
'private_key': p.private_key,
|
||||
'name': p.name,
|
||||
'received_data': p.total_receive + p.cumu_receive,
|
||||
'sent_data': p.total_sent + p.cumu_sent,
|
||||
'data': p.total_data + p.cumu_data,
|
||||
'status': p.status,
|
||||
'latest_handshake': p.latest_handshake,
|
||||
'allowed_ip': p.allowed_ip,
|
||||
'jobs': p.jobs,
|
||||
'configuration_name': a.ConfigurationName,
|
||||
'peer_configuration_data': p.downloadPeer()
|
||||
})
|
||||
return peers
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class DashboardClientsTOTP:
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.dashboardClientsTOTPTable = db.Table(
|
||||
'DashboardClientsTOTPTokens', self.metadata,
|
||||
db.Column("Token", db.String(500), primary_key=True, index=True),
|
||||
db.Column("ClientID", db.String(500), index=True),
|
||||
db.Column(
|
||||
"ExpireTime", (db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)
|
||||
)
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.metadata.reflect(self.engine)
|
||||
self.dashboardClientsTable = self.metadata.tables['DashboardClients']
|
||||
|
||||
def GenerateToken(self, ClientID) -> str:
|
||||
token = hashlib.sha512(f"{ClientID}_{datetime.datetime.now()}_{uuid.uuid4()}".encode()).hexdigest()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.update().values({
|
||||
"ExpireTime": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(self.dashboardClientsTOTPTable.c.ClientID == ClientID, self.dashboardClientsTOTPTable.c.ExpireTime > datetime.datetime.now())
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.insert().values({
|
||||
"Token": token,
|
||||
"ClientID": ClientID,
|
||||
"ExpireTime": datetime.datetime.now() + datetime.timedelta(minutes=10)
|
||||
})
|
||||
)
|
||||
return token
|
||||
|
||||
def RevokeToken(self, Token) -> bool:
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.update().values({
|
||||
"ExpireTime": datetime.datetime.now()
|
||||
}).where(
|
||||
self.dashboardClientsTOTPTable.c.Token == Token
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return False
|
||||
return True
|
||||
|
||||
def GetTotp(self, token: str) -> tuple[bool, dict] or tuple[bool, None]:
|
||||
with self.engine.connect() as conn:
|
||||
totp = conn.execute(
|
||||
db.select(
|
||||
self.dashboardClientsTable.c.ClientID,
|
||||
self.dashboardClientsTable.c.Email,
|
||||
self.dashboardClientsTable.c.TotpKey,
|
||||
self.dashboardClientsTable.c.TotpKeyVerified,
|
||||
).select_from(
|
||||
self.dashboardClientsTOTPTable
|
||||
).where(
|
||||
db.and_(
|
||||
self.dashboardClientsTOTPTable.c.Token == token,
|
||||
self.dashboardClientsTOTPTable.c.ExpireTime > datetime.datetime.now()
|
||||
)
|
||||
).join(
|
||||
self.dashboardClientsTable,
|
||||
self.dashboardClientsTOTPTable.c.ClientID == self.dashboardClientsTable.c.ClientID
|
||||
)
|
||||
).mappings().fetchone()
|
||||
if totp:
|
||||
return True, dict(totp)
|
||||
return False, None
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,285 @@
|
|||
"""
|
||||
Dashboard Configuration
|
||||
"""
|
||||
import configparser, secrets, os, pyotp, ipaddress, bcrypt
|
||||
from sqlalchemy_utils import database_exists, create_database
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
from .Utilities import (
|
||||
GetRemoteEndpoint, ValidateDNSAddress
|
||||
)
|
||||
from .DashboardAPIKey import DashboardAPIKey
|
||||
|
||||
|
||||
|
||||
class DashboardConfig:
|
||||
DashboardVersion = 'v4.3'
|
||||
ConfigurationPath = os.getenv('CONFIGURATION_PATH', '.')
|
||||
ConfigurationFilePath = os.path.join(ConfigurationPath, 'wg-dashboard.ini')
|
||||
|
||||
def __init__(self):
|
||||
if not os.path.exists(DashboardConfig.ConfigurationFilePath):
|
||||
open(DashboardConfig.ConfigurationFilePath, "x")
|
||||
self.__config = configparser.RawConfigParser(strict=False)
|
||||
self.__config.read_file(open(DashboardConfig.ConfigurationFilePath, "r+"))
|
||||
self.hiddenAttribute = ["totp_key", "auth_req"]
|
||||
self.__default = {
|
||||
"Account": {
|
||||
"username": "admin",
|
||||
"password": "admin",
|
||||
"enable_totp": "false",
|
||||
"totp_verified": "false",
|
||||
"totp_key": pyotp.random_base32()
|
||||
},
|
||||
"Server": {
|
||||
"wg_conf_path": "/etc/wireguard",
|
||||
"awg_conf_path": "/etc/amnezia/amneziawg",
|
||||
"app_prefix": "",
|
||||
"app_ip": "0.0.0.0",
|
||||
"app_port": "10086",
|
||||
"auth_req": "true",
|
||||
"version": DashboardConfig.DashboardVersion,
|
||||
"dashboard_refresh_interval": "60000",
|
||||
"dashboard_peer_list_display": "grid",
|
||||
"dashboard_sort": "status",
|
||||
"dashboard_theme": "dark",
|
||||
"dashboard_api_key": "false",
|
||||
"dashboard_language": "en-US"
|
||||
},
|
||||
"Peers": {
|
||||
"peer_global_DNS": "1.1.1.1",
|
||||
"peer_endpoint_allowed_ip": "0.0.0.0/0",
|
||||
"peer_display_mode": "grid",
|
||||
"remote_endpoint": GetRemoteEndpoint(),
|
||||
"peer_MTU": "1420",
|
||||
"peer_keep_alive": "21"
|
||||
},
|
||||
"Other": {
|
||||
"welcome_session": "true"
|
||||
},
|
||||
"Database":{
|
||||
"type": "sqlite",
|
||||
"host": "",
|
||||
"port": "",
|
||||
"username": "",
|
||||
"password": ""
|
||||
},
|
||||
"Email":{
|
||||
"server": "",
|
||||
"port": "",
|
||||
"encryption": "",
|
||||
"username": "",
|
||||
"email_password": "",
|
||||
"authentication_required": "true",
|
||||
"send_from": "",
|
||||
"email_template": ""
|
||||
},
|
||||
"OIDC": {
|
||||
"admin_enable": "false",
|
||||
"client_enable": "false"
|
||||
},
|
||||
"Clients": {
|
||||
"enable": "true",
|
||||
},
|
||||
"WireGuardConfiguration": {
|
||||
"autostart": ""
|
||||
}
|
||||
}
|
||||
|
||||
for section, keys in self.__default.items():
|
||||
for key, value in keys.items():
|
||||
exist, currentData = self.GetConfig(section, key)
|
||||
if not exist:
|
||||
self.SetConfig(section, key, value, True)
|
||||
|
||||
self.engine = db.create_engine(ConnectionString('wgdashboard'))
|
||||
self.dbMetadata = db.MetaData()
|
||||
self.__createAPIKeyTable()
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
self.APIAccessed = False
|
||||
self.SetConfig("Server", "version", DashboardConfig.DashboardVersion)
|
||||
|
||||
def getConnectionString(self, database) -> str or None:
|
||||
sqlitePath = os.path.join(DashboardConfig.ConfigurationPath, "db")
|
||||
|
||||
if not os.path.isdir(sqlitePath):
|
||||
os.mkdir(sqlitePath)
|
||||
|
||||
if self.GetConfig("Database", "type")[1] == "postgresql":
|
||||
cn = f'postgresql+psycopg2://{self.GetConfig("Database", "username")[1]}:{self.GetConfig("Database", "password")[1]}@{self.GetConfig("Database", "host")[1]}/{database}'
|
||||
elif self.GetConfig("Database", "type")[1] == "mysql":
|
||||
cn = f'mysql+mysqldb://{self.GetConfig("Database", "username")[1]}:{self.GetConfig("Database", "password")[1]}@{self.GetConfig("Database", "host")[1]}/{database}'
|
||||
else:
|
||||
cn = f'sqlite:///{os.path.join(sqlitePath, f"{database}.db")}'
|
||||
if not database_exists(cn):
|
||||
create_database(cn)
|
||||
return cn
|
||||
|
||||
def __createAPIKeyTable(self):
|
||||
self.apiKeyTable = db.Table('DashboardAPIKeys', self.dbMetadata,
|
||||
db.Column("Key", db.String(255), nullable=False, primary_key=True),
|
||||
db.Column("CreatedAt",
|
||||
(db.DATETIME if self.GetConfig('Database', 'type')[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()
|
||||
),
|
||||
db.Column("ExpiredAt",
|
||||
(db.DATETIME if self.GetConfig('Database', 'type')[1] == 'sqlite' else db.TIMESTAMP)
|
||||
)
|
||||
)
|
||||
self.dbMetadata.create_all(self.engine)
|
||||
def __getAPIKeys(self) -> list[DashboardAPIKey]:
|
||||
try:
|
||||
with self.engine.connect() as conn:
|
||||
keys = conn.execute(self.apiKeyTable.select().where(
|
||||
db.or_(self.apiKeyTable.columns.ExpiredAt.is_(None), self.apiKeyTable.columns.ExpiredAt > datetime.now())
|
||||
)).fetchall()
|
||||
fKeys = []
|
||||
for k in keys:
|
||||
fKeys.append(DashboardAPIKey(k[0], k[1].strftime("%Y-%m-%d %H:%M:%S"), (k[2].strftime("%Y-%m-%d %H:%M:%S") if k[2] else None)))
|
||||
return fKeys
|
||||
except Exception as e:
|
||||
current_app.logger.error("API Keys error", e)
|
||||
return []
|
||||
|
||||
def createAPIKeys(self, ExpiredAt = None):
|
||||
newKey = secrets.token_urlsafe(32)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.apiKeyTable.insert().values({
|
||||
"Key": newKey,
|
||||
"ExpiredAt": ExpiredAt
|
||||
})
|
||||
)
|
||||
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
|
||||
def deleteAPIKey(self, key):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.apiKeyTable.update().values({
|
||||
"ExpiredAt": datetime.now(),
|
||||
}).where(self.apiKeyTable.columns.Key == key)
|
||||
)
|
||||
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
|
||||
def __configValidation(self, section : str, key: str, value: Any) -> tuple[bool, str]:
|
||||
if (type(value) is str and len(value) == 0
|
||||
and section not in ['Email', 'WireGuardConfiguration'] and
|
||||
(section == 'Peer' and key == 'peer_global_dns')):
|
||||
return False, "Field cannot be empty!"
|
||||
if section == "Peers" and key == "peer_global_dns" and len(value) > 0:
|
||||
return ValidateDNSAddress(value)
|
||||
if section == "Peers" and key == "peer_endpoint_allowed_ip":
|
||||
value = value.split(",")
|
||||
for i in value:
|
||||
i = i.strip()
|
||||
try:
|
||||
ipaddress.ip_network(i, strict=False)
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
if section == "Server" and key == "wg_conf_path":
|
||||
if not os.path.exists(value):
|
||||
return False, f"{value} is not a valid path"
|
||||
if section == "Account" and key == "password":
|
||||
if self.GetConfig("Account", "password")[0]:
|
||||
if not self.__checkPassword(
|
||||
value["currentPassword"], self.GetConfig("Account", "password")[1].encode("utf-8")):
|
||||
return False, "Current password does not match."
|
||||
if value["newPassword"] != value["repeatNewPassword"]:
|
||||
return False, "New passwords does not match"
|
||||
return True, ""
|
||||
|
||||
def generatePassword(self, plainTextPassword: str):
|
||||
return bcrypt.hashpw(plainTextPassword.encode("utf-8"), bcrypt.gensalt())
|
||||
|
||||
def __checkPassword(self, plainTextPassword: str, hashedPassword: bytes):
|
||||
return bcrypt.checkpw(plainTextPassword.encode("utf-8"), hashedPassword)
|
||||
|
||||
def SetConfig(self, section: str, key: str, value: str | bool | list[str] | dict[str, str], init: bool = False) -> tuple[bool, str] | tuple[bool, None]:
|
||||
if key in self.hiddenAttribute and not init:
|
||||
return False, None
|
||||
|
||||
if not init:
|
||||
valid, msg = self.__configValidation(section, key, value)
|
||||
if not valid:
|
||||
return False, msg
|
||||
|
||||
if section == "Account" and key == "password":
|
||||
if not init:
|
||||
value = self.generatePassword(value["newPassword"]).decode("utf-8")
|
||||
else:
|
||||
value = self.generatePassword(value).decode("utf-8")
|
||||
|
||||
if section == "Email" and key == "email_template":
|
||||
value = value.encode('unicode_escape').decode('utf-8')
|
||||
|
||||
if section == "Server" and key == "wg_conf_path":
|
||||
if not os.path.exists(value):
|
||||
return False, "Path does not exist"
|
||||
|
||||
if section not in self.__config:
|
||||
if init:
|
||||
self.__config[section] = {}
|
||||
else:
|
||||
return False, "Section does not exist"
|
||||
|
||||
if ((key not in self.__config[section].keys() and init) or
|
||||
(key in self.__config[section].keys())):
|
||||
if type(value) is bool:
|
||||
if value:
|
||||
self.__config[section][key] = "true"
|
||||
else:
|
||||
self.__config[section][key] = "false"
|
||||
elif type(value) in [int, float]:
|
||||
self.__config[section][key] = str(value)
|
||||
elif type(value) is list:
|
||||
self.__config[section][key] = "||".join(value).strip("||")
|
||||
else:
|
||||
self.__config[section][key] = fr"{value}"
|
||||
return self.SaveConfig(), ""
|
||||
else:
|
||||
return False, f"{key} does not exist under {section}"
|
||||
|
||||
def SaveConfig(self) -> bool:
|
||||
try:
|
||||
with open(DashboardConfig.ConfigurationFilePath, "w+", encoding='utf-8') as configFile:
|
||||
self.__config.write(configFile)
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
def GetConfig(self, section, key) ->tuple[bool, bool] | tuple[bool, str] | tuple[bool, list[str]] | tuple[bool, None]:
|
||||
if section not in self.__config:
|
||||
return False, None
|
||||
|
||||
if key not in self.__config[section]:
|
||||
return False, None
|
||||
|
||||
if section == "Email" and key == "email_template":
|
||||
return True, self.__config[section][key].encode('utf-8').decode('unicode_escape')
|
||||
|
||||
if section == "WireGuardConfiguration" and key == "autostart":
|
||||
return True, list(filter(lambda x: len(x) > 0, self.__config[section][key].split("||")))
|
||||
|
||||
if self.__config[section][key] in ["1", "yes", "true", "on"]:
|
||||
return True, True
|
||||
|
||||
if self.__config[section][key] in ["0", "no", "false", "off"]:
|
||||
return True, False
|
||||
|
||||
|
||||
return True, self.__config[section][key]
|
||||
|
||||
def toJson(self) -> dict[str, dict[Any, Any]]:
|
||||
the_dict = {}
|
||||
|
||||
for section in self.__config.sections():
|
||||
the_dict[section] = {}
|
||||
for key, val in self.__config.items(section):
|
||||
if key not in self.hiddenAttribute:
|
||||
the_dict[section][key] = self.GetConfig(section, key)[1]
|
||||
return the_dict
|
||||
|
|
@ -1,35 +1,44 @@
|
|||
"""
|
||||
Dashboard Logger Class
|
||||
"""
|
||||
import sqlite3, os, uuid
|
||||
import uuid
|
||||
import sqlalchemy as db
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class DashboardLogger:
|
||||
def __init__(self, CONFIGURATION_PATH):
|
||||
self.loggerdb = sqlite3.connect(os.path.join(CONFIGURATION_PATH, 'db', 'wgdashboard_log.db'),
|
||||
isolation_level=None,
|
||||
check_same_thread=False)
|
||||
self.loggerdb.row_factory = sqlite3.Row
|
||||
self.__createLogDatabase()
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard_log"))
|
||||
self.metadata = db.MetaData()
|
||||
self.dashboardLoggerTable = db.Table('DashboardLog', self.metadata,
|
||||
|
||||
db.Column('LogID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('LogDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('URL', db.String(255)),
|
||||
db.Column('IP', db.String(255)),
|
||||
|
||||
db.Column('Status', db.String(255), nullable=False),
|
||||
db.Column('Message', db.Text), extend_existing=True,
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.log(Message="WGDashboard started")
|
||||
def __createLogDatabase(self):
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
existingTable = loggerdbCursor.execute("SELECT name from sqlite_master where type='table'").fetchall()
|
||||
existingTable = [t['name'] for t in existingTable]
|
||||
if "DashboardLog" not in existingTable:
|
||||
loggerdbCursor.execute(
|
||||
"CREATE TABLE DashboardLog (LogID VARCHAR NOT NULL, LogDate DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S','now', 'localtime')), URL VARCHAR, IP VARCHAR, Status VARCHAR, Message VARCHAR, PRIMARY KEY (LogID))")
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
|
||||
def log(self, URL: str = "", IP: str = "", Status: str = "true", Message: str = "") -> bool:
|
||||
try:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
loggerdbCursor.execute(
|
||||
"INSERT INTO DashboardLog (LogID, URL, IP, Status, Message) VALUES (?, ?, ?, ?, ?);", (str(uuid.uuid4()), URL, IP, Status, Message,))
|
||||
loggerdbCursor.close()
|
||||
self.loggerdb.commit()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardLoggerTable.insert().values(
|
||||
LogID=str(uuid.uuid4()),
|
||||
URL=URL,
|
||||
IP=IP,
|
||||
Status=Status,
|
||||
Message=Message
|
||||
)
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[WGDashboard] Access Log Error: {str(e)}")
|
||||
current_app.logger.error(f"Access Log Error", e)
|
||||
return False
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
import os
|
||||
import json
|
||||
import requests
|
||||
from jose import jwt
|
||||
import certifi
|
||||
from flask import current_app
|
||||
|
||||
class DashboardOIDC:
|
||||
ConfigurationPath = os.getenv('CONFIGURATION_PATH', '.')
|
||||
ConfigurationFilePath = os.path.join(ConfigurationPath, 'wg-dashboard-oidc-providers.json')
|
||||
def __init__(self, mode):
|
||||
self.mode = mode
|
||||
self.providers: dict[str, dict] = {}
|
||||
self.provider_secret: dict[str, str] = {}
|
||||
self.__default = {
|
||||
"Admin": {
|
||||
'Provider': {
|
||||
'client_id': '',
|
||||
'client_secret': '',
|
||||
'issuer': '',
|
||||
},
|
||||
},
|
||||
"Client": {
|
||||
'Provider': {
|
||||
'client_id': '',
|
||||
'client_secret': '',
|
||||
'issuer': '',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if not os.path.exists(DashboardOIDC.ConfigurationFilePath):
|
||||
with open(DashboardOIDC.ConfigurationFilePath, "w+") as f:
|
||||
encoder = json.JSONEncoder(indent=4)
|
||||
f.write(encoder.encode(self.__default))
|
||||
|
||||
self.ReadFile()
|
||||
|
||||
def GetProviders(self):
|
||||
return self.providers
|
||||
|
||||
def GetProviderNameByIssuer(self, issuer):
|
||||
for (key, val) in self.providers.items():
|
||||
if val.get('openid_configuration').get('issuer') == issuer:
|
||||
return key
|
||||
return issuer
|
||||
|
||||
def VerifyToken(self, provider, code, redirect_uri):
|
||||
try:
|
||||
if not all([provider, code, redirect_uri]):
|
||||
return False, "Please provide all parameters"
|
||||
|
||||
if provider not in self.providers.keys():
|
||||
return False, "Provider does not exist"
|
||||
|
||||
secrete = self.provider_secret.get(provider)
|
||||
oidc_config_status, oidc_config = self.GetProviderConfiguration(provider)
|
||||
provider_info = self.providers.get(provider)
|
||||
|
||||
|
||||
data = {
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"redirect_uri": redirect_uri,
|
||||
"client_id": provider_info.get('client_id'),
|
||||
"client_secret": secrete
|
||||
}
|
||||
|
||||
try:
|
||||
tokens = requests.post(oidc_config.get('token_endpoint'), data=data).json()
|
||||
if not all([tokens.get('access_token'), tokens.get('id_token')]):
|
||||
return False, tokens.get('error_description', None)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Verify token failed", e)
|
||||
return False, str(e)
|
||||
|
||||
access_token = tokens.get('access_token')
|
||||
id_token = tokens.get('id_token')
|
||||
jwks_uri = oidc_config.get("jwks_uri")
|
||||
issuer = oidc_config.get("issuer")
|
||||
jwks = requests.get(jwks_uri, verify=certifi.where()).json()
|
||||
|
||||
headers = jwt.get_unverified_header(id_token)
|
||||
kid = headers["kid"]
|
||||
|
||||
key = next(k for k in jwks["keys"] if k["kid"] == kid)
|
||||
|
||||
payload = jwt.decode(
|
||||
id_token,
|
||||
key,
|
||||
algorithms=[key["alg"]],
|
||||
audience=provider_info.get('client_id'),
|
||||
issuer=issuer,
|
||||
access_token=access_token
|
||||
)
|
||||
print(payload)
|
||||
return True, payload
|
||||
except Exception as e:
|
||||
current_app.logger.error('Read OIDC file failed. Reason: ' + str(e), provider, code, redirect_uri)
|
||||
return False, str(e)
|
||||
|
||||
def GetProviderConfiguration(self, provider_name):
|
||||
if not all([provider_name]):
|
||||
return False, None
|
||||
provider = self.providers.get(provider_name)
|
||||
try:
|
||||
oidc_config = requests.get(
|
||||
f"{provider.get('issuer').strip('/')}/.well-known/openid-configuration",
|
||||
verify=certifi.where()
|
||||
).json()
|
||||
except Exception as e:
|
||||
current_app.logger.error("Failed to get OpenID Configuration of " + provider.get('issuer'), exc_info=e)
|
||||
return False, None
|
||||
return True, oidc_config
|
||||
|
||||
def ReadFile(self):
|
||||
decoder = json.JSONDecoder()
|
||||
try:
|
||||
providers = decoder.decode(
|
||||
open(DashboardOIDC.ConfigurationFilePath, 'r').read()
|
||||
)
|
||||
providers = providers[self.mode]
|
||||
for k in providers.keys():
|
||||
if all([providers[k]['client_id'], providers[k]['client_secret'], providers[k]['issuer']]):
|
||||
try:
|
||||
oidc_config = requests.get(
|
||||
f"{providers[k]['issuer'].strip('/')}/.well-known/openid-configuration",
|
||||
timeout=3,
|
||||
verify=certifi.where()
|
||||
).json()
|
||||
self.providers[k] = {
|
||||
'client_id': providers[k]['client_id'],
|
||||
'issuer': providers[k]['issuer'].strip('/'),
|
||||
'openid_configuration': oidc_config
|
||||
}
|
||||
self.provider_secret[k] = providers[k]['client_secret']
|
||||
current_app.logger.info(f"Registered OIDC Provider: {k}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to register OIDC config for {k}", exc_info=e)
|
||||
except Exception as e:
|
||||
current_app.logger.error('Read OIDC file failed. Reason: ' + str(e))
|
||||
return False
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
import os
|
||||
import sys
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
from typing import Dict, Callable, List, Optional
|
||||
import threading
|
||||
|
||||
|
||||
class DashboardPlugins:
|
||||
|
||||
def __init__(self, app, WireguardConfigurations, directory: str = 'plugins'):
|
||||
self.directory = Path('plugins')
|
||||
self.loadedPlugins: dict[str, Callable] = {}
|
||||
self.errorPlugins: List[str] = []
|
||||
self.logger = app.logger
|
||||
self.WireguardConfigurations = WireguardConfigurations
|
||||
|
||||
def startThreads(self):
|
||||
self.loadAllPlugins()
|
||||
self.executeAllPlugins()
|
||||
|
||||
def preparePlugins(self) -> list[Path]:
|
||||
|
||||
readyPlugins = []
|
||||
|
||||
if not self.directory.exists():
|
||||
os.mkdir(self.directory)
|
||||
return []
|
||||
|
||||
for plugin in self.directory.iterdir():
|
||||
if plugin.is_dir():
|
||||
codeFile = plugin / "main.py"
|
||||
if codeFile.exists():
|
||||
self.logger.info(f"Prepared plugin: {plugin.name}")
|
||||
readyPlugins.append(plugin)
|
||||
|
||||
return readyPlugins
|
||||
|
||||
def loadPlugin(self, path: Path) -> Optional[Callable]:
|
||||
pluginName = path.name
|
||||
codeFile = path / "main.py"
|
||||
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
f"WGDashboardPlugin_{pluginName}",
|
||||
codeFile
|
||||
)
|
||||
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Failed to create spec for {pluginName}")
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
|
||||
plugin_dir_str = str(path)
|
||||
if plugin_dir_str not in sys.path:
|
||||
sys.path.insert(0, plugin_dir_str)
|
||||
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
finally:
|
||||
if plugin_dir_str in sys.path:
|
||||
sys.path.remove(plugin_dir_str)
|
||||
|
||||
if hasattr(module, 'main'):
|
||||
main_func = getattr(module, 'main')
|
||||
if callable(main_func):
|
||||
self.logger.info(f"Successfully loaded plugin [{pluginName}]")
|
||||
return main_func
|
||||
else:
|
||||
raise AttributeError(f"'main' in {pluginName} is not callable")
|
||||
else:
|
||||
raise AttributeError(f"Plugin {pluginName} does not have a 'main' function")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load the plugin [{pluginName}]. Reason: {str(e)}")
|
||||
self.errorPlugins.append(pluginName)
|
||||
return None
|
||||
|
||||
def loadAllPlugins(self):
|
||||
self.loadedPlugins.clear()
|
||||
self.errorPlugins.clear()
|
||||
|
||||
preparedPlugins = self.preparePlugins()
|
||||
|
||||
for plugin in preparedPlugins:
|
||||
pluginName = plugin.name
|
||||
mainFunction = self.loadPlugin(plugin)
|
||||
|
||||
if mainFunction:
|
||||
self.loadedPlugins[pluginName] = mainFunction
|
||||
if self.errorPlugins:
|
||||
self.logger.warning(f"Failed to load {len(self.errorPlugins)} plugin(s): {self.errorPlugins}")
|
||||
|
||||
def executePlugin(self, pluginName: str):
|
||||
if pluginName not in self.loadedPlugins.keys():
|
||||
self.logger.error(f"Failed to execute plugin [{pluginName}]. Reason: Not loaded")
|
||||
return False
|
||||
|
||||
plugin = self.loadedPlugins.get(pluginName)
|
||||
|
||||
try:
|
||||
t = threading.Thread(target=plugin, args=(self.WireguardConfigurations,), daemon=True)
|
||||
t.name = f'WGDashboardPlugin_{pluginName}'
|
||||
t.start()
|
||||
|
||||
if t.is_alive():
|
||||
self.logger.info(f"Execute plugin [{pluginName}] success. PID: {t.native_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to execute plugin [{pluginName}]. Reason: {str(e)}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def executeAllPlugins(self):
|
||||
for plugin in self.loadedPlugins.keys():
|
||||
self.executePlugin(plugin)
|
||||
|
|
@ -0,0 +1,287 @@
|
|||
import json
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
from pydantic import BaseModel, field_serializer
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
from flask import current_app
|
||||
|
||||
WebHookActions = ['peer_created', 'peer_deleted', 'peer_updated']
|
||||
class WebHook(BaseModel):
|
||||
WebHookID: str = ''
|
||||
PayloadURL: str = ''
|
||||
ContentType: str = 'application/json'
|
||||
Headers: dict[str, dict[str, str]] = {}
|
||||
VerifySSL: bool = True
|
||||
SubscribedActions: list[str] = WebHookActions
|
||||
IsActive: bool = True
|
||||
CreationDate: datetime = ''
|
||||
Notes: str = ''
|
||||
|
||||
class WebHookSessionLog(BaseModel):
|
||||
LogTime: datetime
|
||||
Status: int
|
||||
Message: str = ''
|
||||
|
||||
@field_serializer('LogTime')
|
||||
def logTimeSerializer(self, LogTime: datetime):
|
||||
return LogTime.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
class WebHookSessionLogs(BaseModel):
|
||||
Logs: list[WebHookSessionLog] = []
|
||||
|
||||
def addLog(self, status: int, message: str):
|
||||
self.Logs.append(WebHookSessionLog(LogTime=datetime.now(), Status=status, Message=message))
|
||||
|
||||
class DashboardWebHooks:
|
||||
def __init__(self, DashboardConfig):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.webHooksTable = db.Table(
|
||||
'DashboardWebHooks', self.metadata,
|
||||
db.Column('WebHookID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('PayloadURL', db.Text, nullable=False),
|
||||
db.Column('ContentType', db.String(255), nullable=False),
|
||||
db.Column('Headers', db.JSON),
|
||||
db.Column('VerifySSL', db.Boolean, nullable=False),
|
||||
db.Column('SubscribedActions', db.JSON),
|
||||
db.Column('IsActive', db.Boolean, nullable=False),
|
||||
db.Column('CreationDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now(),
|
||||
nullable=False),
|
||||
db.Column('Notes', db.Text),
|
||||
extend_existing=True
|
||||
)
|
||||
self.webHookSessionsTable = db.Table(
|
||||
'DashboardWebHookSessions', self.metadata,
|
||||
db.Column('WebHookSessionID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('WebHookID', db.String(255), nullable=False),
|
||||
db.Column('StartDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now(),
|
||||
nullable=False
|
||||
),
|
||||
db.Column('EndDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
),
|
||||
db.Column('Data', db.JSON),
|
||||
db.Column('Status', db.INTEGER),
|
||||
db.Column('Logs', db.JSON)
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
self.WebHooks: list[WebHook] = []
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"EndDate": datetime.now(),
|
||||
"Status": 2
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.Status == -1
|
||||
)
|
||||
)
|
||||
|
||||
self.__getWebHooks()
|
||||
|
||||
def __getWebHooks(self):
|
||||
with self.engine.connect() as conn:
|
||||
webhooks = conn.execute(
|
||||
self.webHooksTable.select().order_by(
|
||||
self.webHooksTable.c.CreationDate
|
||||
)
|
||||
).mappings().fetchall()
|
||||
self.WebHooks.clear()
|
||||
self.WebHooks = [WebHook(**webhook) for webhook in webhooks]
|
||||
|
||||
def GetWebHooks(self):
|
||||
self.__getWebHooks()
|
||||
return list(map(lambda x : x.model_dump(), self.WebHooks))
|
||||
|
||||
def GetWebHookSessions(self, webHook: WebHook):
|
||||
with self.engine.connect() as conn:
|
||||
sessions = conn.execute(
|
||||
self.webHookSessionsTable.select().where(
|
||||
self.webHookSessionsTable.c.WebHookID == webHook.WebHookID
|
||||
).order_by(
|
||||
db.desc(self.webHookSessionsTable.c.StartDate)
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return sessions
|
||||
|
||||
def CreateWebHook(self) -> WebHook:
|
||||
return WebHook(WebHookID=str(uuid.uuid4()))
|
||||
|
||||
def SearchWebHook(self, webHook: WebHook) -> WebHook | None:
|
||||
try:
|
||||
first = next(filter(lambda x : x.WebHookID == webHook.WebHookID, self.WebHooks))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def SearchWebHookByID(self, webHookID: str) -> WebHook | None:
|
||||
try:
|
||||
first = next(filter(lambda x : x.WebHookID == webHookID, self.WebHooks))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def UpdateWebHook(self, webHook: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
webHook = WebHook(**webHook)
|
||||
|
||||
if len(webHook.PayloadURL) == 0:
|
||||
return False, "Payload URL cannot be empty"
|
||||
|
||||
if len(webHook.ContentType) == 0 or webHook.ContentType not in [
|
||||
'application/json', 'application/x-www-form-urlencoded'
|
||||
]:
|
||||
return False, "Content Type is invalid"
|
||||
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
if self.SearchWebHook(webHook):
|
||||
conn.execute(
|
||||
self.webHooksTable.update().values(
|
||||
webHook.model_dump(exclude={'WebHookID'})
|
||||
).where(
|
||||
self.webHooksTable.c.WebHookID == webHook.WebHookID
|
||||
)
|
||||
)
|
||||
else:
|
||||
webHook.CreationDate = datetime.now()
|
||||
conn.execute(
|
||||
self.webHooksTable.insert().values(
|
||||
webHook.model_dump()
|
||||
)
|
||||
)
|
||||
self.__getWebHooks()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def DeleteWebHook(self, webHook) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
webHook = WebHook(**webHook)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHooksTable.delete().where(
|
||||
self.webHooksTable.c.WebHookID == webHook.WebHookID
|
||||
)
|
||||
)
|
||||
self.__getWebHooks()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def RunWebHook(self, action: str, data):
|
||||
try:
|
||||
if action not in WebHookActions:
|
||||
return False
|
||||
self.__getWebHooks()
|
||||
subscribedWebHooks = filter(lambda webhook: action in webhook.SubscribedActions and webhook.IsActive,
|
||||
self.WebHooks)
|
||||
data['action'] = action
|
||||
for i in subscribedWebHooks:
|
||||
try:
|
||||
ws = WebHookSession(i, data)
|
||||
t = threading.Thread(target=ws.Execute, daemon=True)
|
||||
t.start()
|
||||
current_app.logger.info(f"Requesting {i.PayloadURL}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Requesting {i.PayloadURL} error", e)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Error when running WebHook")
|
||||
|
||||
class WebHookSession:
|
||||
def __init__(self, webHook: WebHook, data: dict[str, str]):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.webHookSessionsTable = db.Table('DashboardWebHookSessions', self.metadata, autoload_with=self.engine)
|
||||
self.webHook = webHook
|
||||
self.sessionID = str(uuid.uuid4())
|
||||
self.webHookSessionLogs: WebHookSessionLogs = WebHookSessionLogs()
|
||||
self.time = datetime.now()
|
||||
data['time'] = self.time.strftime("%Y-%m-%d %H:%M:%S")
|
||||
data['webhook_id'] = webHook.WebHookID
|
||||
data['webhook_session'] = self.sessionID
|
||||
self.data = data
|
||||
self.Prepare()
|
||||
|
||||
def Prepare(self):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.insert().values({
|
||||
"WebHookSessionID": self.sessionID,
|
||||
"WebHookID": self.webHook.WebHookID,
|
||||
"Data": self.data,
|
||||
"StartDate": self.time,
|
||||
"Status": -1,
|
||||
"Logs": self.webHookSessionLogs.model_dump()
|
||||
})
|
||||
)
|
||||
self.UpdateSessionLog(-1, "Preparing webhook session")
|
||||
|
||||
def UpdateSessionLog(self, status, message):
|
||||
self.webHookSessionLogs.addLog(status, message)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"Logs": self.webHookSessionLogs.model_dump()
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.WebHookSessionID == self.sessionID
|
||||
)
|
||||
)
|
||||
|
||||
def UpdateStatus(self, status: int):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"Status": status,
|
||||
"EndDate": datetime.now()
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.WebHookSessionID == self.sessionID
|
||||
)
|
||||
)
|
||||
|
||||
def Execute(self):
|
||||
success = False
|
||||
|
||||
for i in range(5):
|
||||
headerDictionary = {
|
||||
'Content-Type': self.webHook.ContentType
|
||||
}
|
||||
for header in self.webHook.Headers.values():
|
||||
if header['key'] not in ['Content-Type']:
|
||||
headerDictionary[header['key']] = header['value']
|
||||
|
||||
if self.webHook.ContentType == "application/json":
|
||||
reqData = json.dumps(self.data)
|
||||
else:
|
||||
for (key, val) in self.data.items():
|
||||
if type(self.data[key]) not in [str, int]:
|
||||
self.data[key] = json.dumps(self.data[key])
|
||||
reqData = urllib.parse.urlencode(self.data)
|
||||
try:
|
||||
req = requests.post(
|
||||
self.webHook.PayloadURL, headers=headerDictionary, timeout=10, data=reqData, verify=self.webHook.VerifySSL
|
||||
)
|
||||
req.raise_for_status()
|
||||
success = True
|
||||
self.UpdateSessionLog(0, "Webhook request finished")
|
||||
self.UpdateSessionLog(0, json.dumps({"returned_data": req.text}))
|
||||
self.UpdateStatus(0)
|
||||
break
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.UpdateSessionLog(1, f"Attempt #{i + 1}/5. Request errored. Reason: " + str(e))
|
||||
time.sleep(10)
|
||||
|
||||
if not success:
|
||||
self.UpdateSessionLog(1, "Webhook request failed & terminated.")
|
||||
self.UpdateStatus(1)
|
||||
|
|
@ -31,18 +31,25 @@ class EmailSender:
|
|||
|
||||
def SendFrom(self):
|
||||
return self.DashboardConfig.GetConfig("Email", "send_from")[1]
|
||||
|
||||
# Thank you, @gdeeble from GitHub
|
||||
def AuthenticationRequired(self):
|
||||
return self.DashboardConfig.GetConfig("Email", "authentication_required")[1]
|
||||
|
||||
def ready(self):
|
||||
return len(self.Server()) > 0 and len(self.Port()) > 0 and len(self.Encryption()) > 0 and len(self.Username()) > 0 and len(self.Password()) > 0 and len(self.SendFrom())
|
||||
if self.AuthenticationRequired():
|
||||
return all([self.Server(), self.Port(), self.Encryption(), self.Username(), self.Password(), self.SendFrom()])
|
||||
return all([self.Server(), self.Port(), self.Encryption(), self.SendFrom()])
|
||||
|
||||
def send(self, receiver, subject, body, includeAttachment = False, attachmentName = ""):
|
||||
def send(self, receiver, subject, body, includeAttachment = False, attachmentName = "") -> tuple[bool, str] | tuple[bool, None]:
|
||||
if self.ready():
|
||||
try:
|
||||
self.smtp = smtplib.SMTP(self.Server(), port=int(self.Port()))
|
||||
self.smtp.ehlo()
|
||||
if self.Encryption() == "STARTTLS":
|
||||
self.smtp.starttls()
|
||||
self.smtp.login(self.Username(), self.Password())
|
||||
if self.AuthenticationRequired():
|
||||
self.smtp.login(self.Username(), self.Password())
|
||||
message = MIMEMultipart()
|
||||
message['Subject'] = subject
|
||||
message['From'] = self.SendFrom()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,88 @@
|
|||
import uuid
|
||||
|
||||
from pydantic import BaseModel, field_serializer
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class NewConfigurationTemplate(BaseModel):
|
||||
TemplateID: str = ''
|
||||
Subnet: str = ''
|
||||
ListenPortStart: int = 0
|
||||
ListenPortEnd: int = 0
|
||||
Notes: str = ""
|
||||
|
||||
class NewConfigurationTemplates:
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.templatesTable = db.Table(
|
||||
'NewConfigurationTemplates', self.metadata,
|
||||
db.Column('TemplateID', db.String(255), primary_key=True),
|
||||
db.Column('Subnet', db.String(255)),
|
||||
db.Column('ListenPortStart', db.Integer),
|
||||
db.Column('ListenPortEnd', db.Integer),
|
||||
db.Column('Notes', db.Text),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.Templates: list[NewConfigurationTemplate] = []
|
||||
self.__getTemplates()
|
||||
|
||||
def GetTemplates(self):
|
||||
self.__getTemplates()
|
||||
return list(map(lambda x : x.model_dump(), self.Templates))
|
||||
|
||||
def __getTemplates(self):
|
||||
with self.engine.connect() as conn:
|
||||
templates = conn.execute(
|
||||
self.templatesTable.select()
|
||||
).mappings().fetchall()
|
||||
self.Templates.clear()
|
||||
self.Templates = [NewConfigurationTemplate(**template) for template in templates]
|
||||
|
||||
def CreateTemplate(self) -> NewConfigurationTemplate:
|
||||
return NewConfigurationTemplate(TemplateID=str(uuid.uuid4()))
|
||||
|
||||
def SearchTemplate(self, template: NewConfigurationTemplate):
|
||||
try:
|
||||
first = next(filter(lambda x : x.TemplateID == template.TemplateID, self.Templates))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def UpdateTemplate(self, template: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
template = NewConfigurationTemplate(**template)
|
||||
with self.engine.begin() as conn:
|
||||
if self.SearchTemplate(template):
|
||||
conn.execute(
|
||||
self.templatesTable.update().values(
|
||||
template.model_dump(exclude={'TemplateID'})
|
||||
).where(
|
||||
self.templatesTable.c.TemplateID == template.TemplateID
|
||||
)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.templatesTable.insert().values(
|
||||
template.model_dump()
|
||||
)
|
||||
)
|
||||
self.__getTemplates()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def DeleteTemplate(self, template: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
template = NewConfigurationTemplate(**template)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.templatesTable.delete().where(
|
||||
self.templatesTable.c.TemplateID == template.TemplateID
|
||||
)
|
||||
)
|
||||
self.__getTemplates()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
|
@ -0,0 +1,350 @@
|
|||
"""
|
||||
Peer
|
||||
"""
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import os, subprocess, uuid, random, re
|
||||
from datetime import timedelta
|
||||
|
||||
import jinja2
|
||||
import sqlalchemy as db
|
||||
from .PeerJob import PeerJob
|
||||
from .PeerShareLink import PeerShareLink
|
||||
from .Utilities import GenerateWireguardPublicKey, ValidateIPAddressesWithRange, ValidateDNSAddress
|
||||
|
||||
|
||||
class Peer:
|
||||
def __init__(self, tableData, configuration):
|
||||
self.configuration = configuration
|
||||
self.id = tableData["id"]
|
||||
self.private_key = tableData["private_key"]
|
||||
self.DNS = tableData["DNS"]
|
||||
self.endpoint_allowed_ip = tableData["endpoint_allowed_ip"]
|
||||
self.name = tableData["name"]
|
||||
self.total_receive = tableData["total_receive"]
|
||||
self.total_sent = tableData["total_sent"]
|
||||
self.total_data = tableData["total_data"]
|
||||
self.endpoint = tableData["endpoint"]
|
||||
self.status = tableData["status"]
|
||||
self.latest_handshake = tableData["latest_handshake"]
|
||||
self.allowed_ip = tableData["allowed_ip"]
|
||||
self.cumu_receive = tableData["cumu_receive"]
|
||||
self.cumu_sent = tableData["cumu_sent"]
|
||||
self.cumu_data = tableData["cumu_data"]
|
||||
self.mtu = tableData["mtu"]
|
||||
self.keepalive = tableData["keepalive"]
|
||||
self.remote_endpoint = tableData["remote_endpoint"]
|
||||
self.preshared_key = tableData["preshared_key"]
|
||||
self.jobs: list[PeerJob] = []
|
||||
self.ShareLink: list[PeerShareLink] = []
|
||||
self.getJobs()
|
||||
self.getShareLink()
|
||||
|
||||
def toJson(self):
|
||||
# self.getJobs()
|
||||
# self.getShareLink()
|
||||
return self.__dict__
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.toJson())
|
||||
|
||||
def updatePeer(self, name: str, private_key: str,
|
||||
preshared_key: str,
|
||||
dns_addresses: str, allowed_ip: str, endpoint_allowed_ip: str, mtu: int,
|
||||
keepalive: int) -> tuple[bool, str] or tuple[bool, None]:
|
||||
if not self.configuration.getStatus():
|
||||
self.configuration.toggleConfiguration()
|
||||
|
||||
existingAllowedIps = [item for row in list(
|
||||
map(lambda x: [q.strip() for q in x.split(',')],
|
||||
map(lambda y: y.allowed_ip,
|
||||
list(filter(lambda k: k.id != self.id, self.configuration.getPeersList()))))) for item in row]
|
||||
|
||||
if allowed_ip in existingAllowedIps:
|
||||
return False, "Allowed IP already taken by another peer"
|
||||
|
||||
if not ValidateIPAddressesWithRange(endpoint_allowed_ip):
|
||||
return False, f"Endpoint Allowed IPs format is incorrect"
|
||||
|
||||
if len(dns_addresses) > 0 and not ValidateDNSAddress(dns_addresses):
|
||||
return False, f"DNS format is incorrect"
|
||||
|
||||
if type(mtu) is str or mtu is None:
|
||||
mtu = 0
|
||||
|
||||
if mtu < 0 or mtu > 1460:
|
||||
return False, "MTU format is not correct"
|
||||
|
||||
if type(keepalive) is str or keepalive is None:
|
||||
keepalive = 0
|
||||
|
||||
if keepalive < 0:
|
||||
return False, "Persistent Keepalive format is not correct"
|
||||
if len(private_key) > 0:
|
||||
pubKey = GenerateWireguardPublicKey(private_key)
|
||||
if not pubKey[0] or pubKey[1] != self.id:
|
||||
return False, "Private key does not match with the public key"
|
||||
try:
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
pskExist = len(preshared_key) > 0
|
||||
|
||||
if pskExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(preshared_key)
|
||||
newAllowedIPs = allowed_ip.replace(" ", "")
|
||||
updateAllowedIp = subprocess.check_output(
|
||||
f"{self.configuration.Protocol} set {self.configuration.Name} peer {self.id} allowed-ips {newAllowedIPs} {f'preshared-key {uid}' if pskExist else 'preshared-key /dev/null'}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
if pskExist: os.remove(uid)
|
||||
if len(updateAllowedIp.decode().strip("\n")) != 0:
|
||||
return False, "Update peer failed when updating Allowed IPs"
|
||||
saveConfig = subprocess.check_output(f"{self.configuration.Protocol}-quick save {self.configuration.Name}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if f"wg showconf {self.configuration.Name}" not in saveConfig.decode().strip('\n'):
|
||||
return False, "Update peer failed when saving the configuration"
|
||||
with self.configuration.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"name": name,
|
||||
"private_key": private_key,
|
||||
"DNS": dns_addresses,
|
||||
"endpoint_allowed_ip": endpoint_allowed_ip,
|
||||
"mtu": mtu,
|
||||
"keepalive": keepalive,
|
||||
"preshared_key": preshared_key
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
return True, None
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return False, exc.output.decode("UTF-8").strip()
|
||||
|
||||
def downloadPeer(self) -> dict[str, str]:
|
||||
final = {
|
||||
"fileName": "",
|
||||
"file": ""
|
||||
}
|
||||
filename = self.name
|
||||
if len(filename) == 0:
|
||||
filename = "UntitledPeer"
|
||||
filename = "".join(filename.split(' '))
|
||||
filename = f"{filename}"
|
||||
illegal_filename = [".", ",", "/", "?", "<", ">", "\\", ":", "*", '|' '\"', "com1", "com2", "com3",
|
||||
"com4", "com5", "com6", "com7", "com8", "com9", "lpt1", "lpt2", "lpt3", "lpt4",
|
||||
"lpt5", "lpt6", "lpt7", "lpt8", "lpt9", "con", "nul", "prn"]
|
||||
for i in illegal_filename:
|
||||
filename = filename.replace(i, "")
|
||||
|
||||
for i in filename:
|
||||
if re.match("^[a-zA-Z0-9_=+.-]$", i):
|
||||
final["fileName"] += i
|
||||
|
||||
interfaceSection = {
|
||||
"PrivateKey": self.private_key,
|
||||
"Address": self.allowed_ip,
|
||||
"MTU": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.MTU
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.MTU else self.mtu
|
||||
),
|
||||
"DNS": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.DNS
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.DNS else self.DNS
|
||||
)
|
||||
}
|
||||
|
||||
if self.configuration.Protocol == "awg":
|
||||
interfaceSection.update({
|
||||
"Jc": self.configuration.Jc,
|
||||
"Jmin": self.configuration.Jmin,
|
||||
"Jmax": self.configuration.Jmax,
|
||||
"S1": self.configuration.S1,
|
||||
"S2": self.configuration.S2,
|
||||
"H1": self.configuration.H1,
|
||||
"H2": self.configuration.H2,
|
||||
"H3": self.configuration.H3,
|
||||
"H4": self.configuration.H4
|
||||
})
|
||||
|
||||
peerSection = {
|
||||
"PublicKey": self.configuration.PublicKey,
|
||||
"AllowedIPs": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.EndpointAllowedIPs
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.EndpointAllowedIPs else self.endpoint_allowed_ip
|
||||
),
|
||||
"Endpoint": f'{(self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint if self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint else self.configuration.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1])}:{(self.configuration.configurationInfo.OverridePeerSettings.ListenPort if self.configuration.configurationInfo.OverridePeerSettings.ListenPort else self.configuration.ListenPort)}',
|
||||
"PersistentKeepalive": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.PersistentKeepalive
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.PersistentKeepalive
|
||||
else self.keepalive
|
||||
),
|
||||
"PresharedKey": self.preshared_key
|
||||
}
|
||||
combine = [interfaceSection.items(), peerSection.items()]
|
||||
for s in range(len(combine)):
|
||||
if s == 0:
|
||||
final["file"] += "[Interface]\n"
|
||||
else:
|
||||
final["file"] += "\n[Peer]\n"
|
||||
for (key, val) in combine[s]:
|
||||
if val is not None and ((type(val) is str and len(val) > 0) or (type(val) is int and val > 0)):
|
||||
final["file"] += f"{key} = {val}\n"
|
||||
if self.configuration.Protocol == "awg":
|
||||
final["amneziaVPN"] = json.dumps({
|
||||
"containers": [{
|
||||
"awg": {
|
||||
"isThirdPartyConfig": True,
|
||||
"last_config": final['file'],
|
||||
"port": self.configuration.ListenPort,
|
||||
"transport_proto": "udp"
|
||||
},
|
||||
"container": "amnezia-awg"
|
||||
}],
|
||||
"defaultContainer": "amnezia-awg",
|
||||
"description": self.name,
|
||||
"hostName": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint
|
||||
else self.configuration.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1])
|
||||
})
|
||||
return final
|
||||
|
||||
def getJobs(self):
|
||||
self.jobs = self.configuration.AllPeerJobs.searchJob(self.configuration.Name, self.id)
|
||||
|
||||
def getShareLink(self):
|
||||
self.ShareLink = self.configuration.AllPeerShareLinks.getLink(self.configuration.Name, self.id)
|
||||
|
||||
def resetDataUsage(self, mode: str):
|
||||
try:
|
||||
with self.configuration.engine.begin() as conn:
|
||||
if mode == "total":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_data": 0,
|
||||
"cumu_data": 0,
|
||||
"total_receive": 0,
|
||||
"cumu_receive": 0,
|
||||
"total_sent": 0,
|
||||
"cumu_sent": 0
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.total_data = 0
|
||||
self.total_receive = 0
|
||||
self.total_sent = 0
|
||||
self.cumu_data = 0
|
||||
self.cumu_sent = 0
|
||||
self.cumu_receive = 0
|
||||
elif mode == "receive":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_receive": 0,
|
||||
"cumu_receive": 0,
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.cumu_receive = 0
|
||||
self.total_receive = 0
|
||||
elif mode == "sent":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_sent": 0,
|
||||
"cumu_sent": 0
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.cumu_sent = 0
|
||||
self.total_sent = 0
|
||||
else:
|
||||
return False
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def getEndpoints(self):
|
||||
result = []
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersHistoryEndpointTable.c.endpoint
|
||||
).group_by(
|
||||
self.configuration.peersHistoryEndpointTable.c.endpoint
|
||||
).where(
|
||||
self.configuration.peersHistoryEndpointTable.c.id == self.id
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return list(result)
|
||||
|
||||
def getTraffics(self, interval: int = 30, startDate: datetime.datetime = None, endDate: datetime.datetime = None):
|
||||
if startDate is None and endDate is None:
|
||||
endDate = datetime.datetime.now()
|
||||
startDate = endDate - timedelta(minutes=interval)
|
||||
else:
|
||||
endDate = endDate.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
startDate = startDate.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersTransferTable.c.cumu_data,
|
||||
self.configuration.peersTransferTable.c.total_data,
|
||||
self.configuration.peersTransferTable.c.cumu_receive,
|
||||
self.configuration.peersTransferTable.c.total_receive,
|
||||
self.configuration.peersTransferTable.c.cumu_sent,
|
||||
self.configuration.peersTransferTable.c.total_sent,
|
||||
self.configuration.peersTransferTable.c.time
|
||||
).where(
|
||||
db.and_(
|
||||
self.configuration.peersTransferTable.c.id == self.id,
|
||||
self.configuration.peersTransferTable.c.time <= endDate,
|
||||
self.configuration.peersTransferTable.c.time >= startDate,
|
||||
)
|
||||
).order_by(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return list(result)
|
||||
|
||||
|
||||
def getSessions(self, startDate: datetime.datetime = None, endDate: datetime.datetime = None):
|
||||
if endDate is None:
|
||||
endDate = datetime.datetime.now()
|
||||
|
||||
if startDate is None:
|
||||
startDate = endDate
|
||||
|
||||
endDate = endDate.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
startDate = startDate.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
).where(
|
||||
db.and_(
|
||||
self.configuration.peersTransferTable.c.id == self.id,
|
||||
self.configuration.peersTransferTable.c.time <= endDate,
|
||||
self.configuration.peersTransferTable.c.time >= startDate,
|
||||
)
|
||||
).order_by(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
)
|
||||
).fetchall()
|
||||
time = list(map(lambda x : x[0], result))
|
||||
return time
|
||||
|
||||
def __duration(self, t1: datetime.datetime, t2: datetime.datetime):
|
||||
delta = t1 - t2
|
||||
|
||||
hours, remainder = divmod(delta.total_seconds(), 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}"
|
||||
|
|
@ -23,8 +23,8 @@ class PeerJob:
|
|||
"Field": self.Field,
|
||||
"Operator": self.Operator,
|
||||
"Value": self.Value,
|
||||
"CreationDate": self.CreationDate,
|
||||
"ExpireDate": self.ExpireDate,
|
||||
"CreationDate": self.CreationDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"ExpireDate": (self.ExpireDate.strftime("%Y-%m-%d %H:%M:%S") if self.ExpireDate is not None else None),
|
||||
"Action": self.Action
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,53 +1,59 @@
|
|||
"""
|
||||
Peer Job Logger
|
||||
"""
|
||||
import sqlite3, os, uuid
|
||||
import uuid
|
||||
import sqlalchemy as db
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
from .Log import Log
|
||||
|
||||
class PeerJobLogger:
|
||||
def __init__(self, CONFIGURATION_PATH, AllPeerJobs):
|
||||
self.loggerdb = sqlite3.connect(os.path.join(CONFIGURATION_PATH, 'db', 'wgdashboard_log.db'),
|
||||
check_same_thread=False)
|
||||
self.loggerdb.row_factory = sqlite3.Row
|
||||
def __init__(self, AllPeerJobs, DashboardConfig):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard_log"))
|
||||
self.metadata = db.MetaData()
|
||||
self.jobLogTable = db.Table('JobLog', self.metadata,
|
||||
db.Column('LogID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('JobID', db.String(255), nullable=False),
|
||||
db.Column('LogDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('Status', db.String(255), nullable=False),
|
||||
db.Column('Message', db.Text)
|
||||
)
|
||||
self.logs: list[Log] = []
|
||||
self.__createLogDatabase()
|
||||
self.metadata.create_all(self.engine)
|
||||
self.AllPeerJobs = AllPeerJobs
|
||||
def __createLogDatabase(self):
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
|
||||
existingTable = loggerdbCursor.execute("SELECT name from sqlite_master where type='table'").fetchall()
|
||||
existingTable = [t['name'] for t in existingTable]
|
||||
|
||||
if "JobLog" not in existingTable:
|
||||
loggerdbCursor.execute("CREATE TABLE JobLog (LogID VARCHAR NOT NULL, JobID NOT NULL, LogDate DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S','now', 'localtime')), Status VARCHAR NOT NULL, Message VARCHAR, PRIMARY KEY (LogID))")
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
def log(self, JobID: str, Status: bool = True, Message: str = "") -> bool:
|
||||
try:
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
loggerdbCursor.execute(f"INSERT INTO JobLog (LogID, JobID, Status, Message) VALUES (?, ?, ?, ?)",
|
||||
(str(uuid.uuid4()), JobID, Status, Message,))
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.jobLogTable.insert().values(
|
||||
{
|
||||
"LogID": str(uuid.uuid4()),
|
||||
"JobID": JobID,
|
||||
"Status": Status,
|
||||
"Message": Message
|
||||
}
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[WGDashboard] Peer Job Log Error: {str(e)}")
|
||||
current_app.logger.error(f"Peer Job Log Error", e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def getLogs(self, all: bool = False, configName = None) -> list[Log]:
|
||||
def getLogs(self, configName = None) -> list[Log]:
|
||||
logs: list[Log] = []
|
||||
try:
|
||||
allJobs = self.AllPeerJobs.getAllJobs(configName)
|
||||
allJobsID = ", ".join([f"'{x.JobID}'" for x in allJobs])
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
table = loggerdbCursor.execute(f"SELECT * FROM JobLog WHERE JobID IN ({allJobsID}) ORDER BY LogDate DESC").fetchall()
|
||||
self.logs.clear()
|
||||
allJobsID = [x.JobID for x in allJobs]
|
||||
stmt = self.jobLogTable.select().where(self.jobLogTable.columns.JobID.in_(
|
||||
allJobsID
|
||||
))
|
||||
with self.engine.connect() as conn:
|
||||
table = conn.execute(stmt).fetchall()
|
||||
for l in table:
|
||||
logs.append(
|
||||
Log(l["LogID"], l["JobID"], l["LogDate"], l["Status"], l["Message"]))
|
||||
Log(l.LogID, l.JobID, l.LogDate.strftime("%Y-%m-%d %H:%M:%S"), l.Status, l.Message))
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Getting Peer Job Log Error", e)
|
||||
return logs
|
||||
return logs
|
||||
return logs
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
"""
|
||||
Peer Jobs
|
||||
"""
|
||||
from .ConnectionString import ConnectionString
|
||||
from .PeerJob import PeerJob
|
||||
from .PeerJobLogger import PeerJobLogger
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
from flask import current_app
|
||||
|
||||
class PeerJobs:
|
||||
def __init__(self, DashboardConfig, WireguardConfigurations):
|
||||
self.Jobs: list[PeerJob] = []
|
||||
self.engine = db.create_engine(ConnectionString('wgdashboard_job'))
|
||||
self.metadata = db.MetaData()
|
||||
self.peerJobTable = db.Table('PeerJobs', self.metadata,
|
||||
db.Column('JobID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Configuration', db.String(255), nullable=False),
|
||||
db.Column('Peer', db.String(255), nullable=False),
|
||||
db.Column('Field', db.String(255), nullable=False),
|
||||
db.Column('Operator', db.String(255), nullable=False),
|
||||
db.Column('Value', db.String(255), nullable=False),
|
||||
db.Column('CreationDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP), nullable=False),
|
||||
db.Column('ExpireDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP)),
|
||||
db.Column('Action', db.String(255), nullable=False),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.__getJobs()
|
||||
self.JobLogger: PeerJobLogger = PeerJobLogger(self, DashboardConfig)
|
||||
self.WireguardConfigurations = WireguardConfigurations
|
||||
|
||||
def __getJobs(self):
|
||||
self.Jobs.clear()
|
||||
with self.engine.connect() as conn:
|
||||
jobs = conn.execute(self.peerJobTable.select().where(
|
||||
self.peerJobTable.columns.ExpireDate.is_(None)
|
||||
)).mappings().fetchall()
|
||||
for job in jobs:
|
||||
self.Jobs.append(PeerJob(
|
||||
job['JobID'], job['Configuration'], job['Peer'], job['Field'], job['Operator'], job['Value'],
|
||||
job['CreationDate'], job['ExpireDate'], job['Action']))
|
||||
|
||||
def getAllJobs(self, configuration: str = None):
|
||||
if configuration is not None:
|
||||
with self.engine.connect() as conn:
|
||||
jobs = conn.execute(self.peerJobTable.select().where(
|
||||
self.peerJobTable.columns.Configuration == configuration
|
||||
)).mappings().fetchall()
|
||||
j = []
|
||||
for job in jobs:
|
||||
j.append(PeerJob(
|
||||
job['JobID'], job['Configuration'], job['Peer'], job['Field'], job['Operator'], job['Value'],
|
||||
job['CreationDate'], job['ExpireDate'], job['Action']))
|
||||
return j
|
||||
return []
|
||||
|
||||
def toJson(self):
|
||||
return [x.toJson() for x in self.Jobs]
|
||||
|
||||
def searchJob(self, Configuration: str, Peer: str):
|
||||
return list(filter(lambda x: x.Configuration == Configuration and x.Peer == Peer, self.Jobs))
|
||||
|
||||
def searchJobById(self, JobID):
|
||||
return list(filter(lambda x: x.JobID == JobID, self.Jobs))
|
||||
|
||||
def saveJob(self, Job: PeerJob) -> tuple[bool, list] | tuple[bool, str]:
|
||||
import traceback
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
currentJob = self.searchJobById(Job.JobID)
|
||||
if len(currentJob) == 0:
|
||||
conn.execute(
|
||||
self.peerJobTable.insert().values(
|
||||
{
|
||||
"JobID": Job.JobID,
|
||||
"Configuration": Job.Configuration,
|
||||
"Peer": Job.Peer,
|
||||
"Field": Job.Field,
|
||||
"Operator": Job.Operator,
|
||||
"Value": Job.Value,
|
||||
"CreationDate": datetime.now(),
|
||||
"ExpireDate": None,
|
||||
"Action": Job.Action
|
||||
}
|
||||
)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is created if {Job.Field} {Job.Operator} {Job.Value} then {Job.Action}")
|
||||
else:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values({
|
||||
"Field": Job.Field,
|
||||
"Operator": Job.Operator,
|
||||
"Value": Job.Value,
|
||||
"Action": Job.Action
|
||||
}).where(self.peerJobTable.columns.JobID == Job.JobID)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is updated from if {currentJob[0].Field} {currentJob[0].Operator} {currentJob[0].Value} then {currentJob[0].Action}; to if {Job.Field} {Job.Operator} {Job.Value} then {Job.Action}")
|
||||
self.__getJobs()
|
||||
self.WireguardConfigurations.get(Job.Configuration).searchPeer(Job.Peer)[1].getJobs()
|
||||
return True, list(
|
||||
filter(lambda x: x.Configuration == Job.Configuration and x.Peer == Job.Peer and x.JobID == Job.JobID,
|
||||
self.Jobs))
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
return False, str(e)
|
||||
|
||||
def deleteJob(self, Job: PeerJob) -> tuple[bool, None] | tuple[bool, str]:
|
||||
try:
|
||||
if len(self.searchJobById(Job.JobID)) == 0:
|
||||
return False, "Job does not exist"
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values(
|
||||
{
|
||||
"ExpireDate": datetime.now()
|
||||
}
|
||||
).where(self.peerJobTable.columns.JobID == Job.JobID)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is removed due to being deleted or finshed.")
|
||||
self.__getJobs()
|
||||
self.WireguardConfigurations.get(Job.Configuration).searchPeer(Job.Peer)[1].getJobs()
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def updateJobConfigurationName(self, ConfigurationName: str, NewConfigurationName: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values({
|
||||
"Configuration": NewConfigurationName
|
||||
}).where(self.peerJobTable.columns.Configuration == ConfigurationName)
|
||||
)
|
||||
self.__getJobs()
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def getPeerJobLogs(self, configurationName):
|
||||
return self.JobLogger.getLogs(configurationName)
|
||||
|
||||
|
||||
def runJob(self):
|
||||
current_app.logger.info("Running scheduled jobs")
|
||||
needToDelete = []
|
||||
self.__getJobs()
|
||||
for job in self.Jobs:
|
||||
c = self.WireguardConfigurations.get(job.Configuration)
|
||||
if c is not None:
|
||||
f, fp = c.searchPeer(job.Peer)
|
||||
if f:
|
||||
if job.Field in ["total_receive", "total_sent", "total_data"]:
|
||||
s = job.Field.split("_")[1]
|
||||
x: float = getattr(fp, f"total_{s}") + getattr(fp, f"cumu_{s}")
|
||||
y: float = float(job.Value)
|
||||
else:
|
||||
x: datetime = datetime.now()
|
||||
y: datetime = datetime.strptime(job.Value, "%Y-%m-%d %H:%M:%S")
|
||||
runAction: bool = self.__runJob_Compare(x, y, job.Operator)
|
||||
if runAction:
|
||||
s = False
|
||||
if job.Action == "restrict":
|
||||
s, msg = c.restrictPeers([fp.id])
|
||||
elif job.Action == "delete":
|
||||
s, msg = c.deletePeers([fp.id])
|
||||
elif job.Action == "reset_total_data_usage":
|
||||
s = fp.resetDataUsage("total")
|
||||
c.restrictPeers([fp.id])
|
||||
c.allowAccessPeers([fp.id])
|
||||
if s is True:
|
||||
self.JobLogger.log(job.JobID, s,
|
||||
f"Peer {fp.id} from {c.Name} is successfully {job.Action}ed."
|
||||
)
|
||||
current_app.logger.info(f"Peer {fp.id} from {c.Name} is successfully {job.Action}ed.")
|
||||
needToDelete.append(job)
|
||||
else:
|
||||
current_app.logger.info(f"Peer {fp.id} from {c.Name} is failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, s,
|
||||
f"Peer {fp.id} from {c.Name} failed {job.Action}ed."
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(f"Somehow can't find this peer {job.Peer} from {c.Name} failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, False,
|
||||
f"Somehow can't find this peer {job.Peer} from {c.Name} failed {job.Action}ed."
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(f"Somehow can't find this peer {job.Peer} from {c.Name} failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, False,
|
||||
f"Somehow can't find this peer {job.Peer} from {job.Configuration} failed {job.Action}ed."
|
||||
)
|
||||
for j in needToDelete:
|
||||
self.deleteJob(j)
|
||||
|
||||
def __runJob_Compare(self, x: float | datetime, y: float | datetime, operator: str):
|
||||
if operator == "eq":
|
||||
return x == y
|
||||
if operator == "neq":
|
||||
return x != y
|
||||
if operator == "lgt":
|
||||
return x > y
|
||||
if operator == "lst":
|
||||
return x < y
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
from datetime import datetime
|
||||
"""
|
||||
Peer Share Link
|
||||
"""
|
||||
class PeerShareLink:
|
||||
def __init__(self, ShareID:str, Configuration: str, Peer: str, ExpireDate: datetime, SharedDate: datetime):
|
||||
self.ShareID = ShareID
|
||||
self.Peer = Peer
|
||||
self.Configuration = Configuration
|
||||
self.SharedDate = SharedDate
|
||||
self.ExpireDate = ExpireDate
|
||||
if not self.ExpireDate:
|
||||
self.ExpireDate = datetime.strptime("2199-12-31","%Y-%m-%d")
|
||||
|
||||
def toJson(self):
|
||||
return {
|
||||
"ShareID": self.ShareID,
|
||||
"Peer": self.Peer,
|
||||
"Configuration": self.Configuration,
|
||||
"ExpireDate": self.ExpireDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"SharedDate": self.SharedDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
}
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
from .ConnectionString import ConnectionString
|
||||
from .PeerShareLink import PeerShareLink
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
"""
|
||||
Peer Share Links
|
||||
"""
|
||||
class PeerShareLinks:
|
||||
def __init__(self, DashboardConfig, WireguardConfigurations):
|
||||
self.Links: list[PeerShareLink] = []
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.peerShareLinksTable = db.Table(
|
||||
'PeerShareLinks', self.metadata,
|
||||
db.Column('ShareID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Configuration', db.String(255), nullable=False),
|
||||
db.Column('Peer', db.String(255), nullable=False),
|
||||
db.Column('ExpireDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP)),
|
||||
db.Column('SharedDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations = WireguardConfigurations
|
||||
def __getSharedLinks(self):
|
||||
self.Links.clear()
|
||||
with self.engine.connect() as conn:
|
||||
allLinks = conn.execute(
|
||||
self.peerShareLinksTable.select().where(
|
||||
db.or_(self.peerShareLinksTable.columns.ExpireDate.is_(None), self.peerShareLinksTable.columns.ExpireDate > datetime.now())
|
||||
)
|
||||
).mappings().fetchall()
|
||||
for link in allLinks:
|
||||
self.Links.append(PeerShareLink(**link))
|
||||
|
||||
|
||||
|
||||
def getLink(self, Configuration: str, Peer: str) -> list[PeerShareLink]:
|
||||
self.__getSharedLinks()
|
||||
return list(filter(lambda x : x.Configuration == Configuration and x.Peer == Peer, self.Links))
|
||||
|
||||
def getLinkByID(self, ShareID: str) -> list[PeerShareLink]:
|
||||
self.__getSharedLinks()
|
||||
return list(filter(lambda x : x.ShareID == ShareID, self.Links))
|
||||
|
||||
def addLink(self, Configuration: str, Peer: str, ExpireDate: datetime = None) -> tuple[bool, str]:
|
||||
try:
|
||||
newShareID = str(uuid.uuid4())
|
||||
with self.engine.begin() as conn:
|
||||
if len(self.getLink(Configuration, Peer)) > 0:
|
||||
conn.execute(
|
||||
self.peerShareLinksTable.update().values(
|
||||
{
|
||||
"ExpireDate": datetime.now()
|
||||
}
|
||||
).where(db.and_(self.peerShareLinksTable.columns.Configuration == Configuration, self.peerShareLinksTable.columns.Peer == Peer))
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
self.peerShareLinksTable.insert().values(
|
||||
{
|
||||
"ShareID": newShareID,
|
||||
"Configuration": Configuration,
|
||||
"Peer": Peer,
|
||||
"ExpireDate": ExpireDate
|
||||
}
|
||||
)
|
||||
)
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations.get(Configuration).searchPeer(Peer)[1].getShareLink()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, newShareID
|
||||
|
||||
def updateLinkExpireDate(self, ShareID, ExpireDate: datetime = None) -> tuple[bool, str]:
|
||||
with self.engine.begin() as conn:
|
||||
updated = conn.execute(
|
||||
self.peerShareLinksTable.update().values(
|
||||
{
|
||||
"ExpireDate": ExpireDate
|
||||
}
|
||||
).returning(self.peerShareLinksTable.c.Configuration, self.peerShareLinksTable.c.Peer)
|
||||
.where(self.peerShareLinksTable.columns.ShareID == ShareID)
|
||||
).mappings().fetchone()
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations.get(updated.Configuration).searchPeer(updated.Peer)[1].getShareLink()
|
||||
return True, ""
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
import psutil
|
||||
import shutil, subprocess, time, threading, psutil
|
||||
from flask import current_app
|
||||
|
||||
class SystemStatus:
|
||||
def __init__(self):
|
||||
self.CPU = CPU()
|
||||
|
|
@ -8,6 +10,17 @@ class SystemStatus:
|
|||
self.NetworkInterfaces = NetworkInterfaces()
|
||||
self.Processes = Processes()
|
||||
def toJson(self):
|
||||
process = [
|
||||
threading.Thread(target=self.CPU.getCPUPercent),
|
||||
threading.Thread(target=self.CPU.getPerCPUPercent),
|
||||
threading.Thread(target=self.NetworkInterfaces.getData)
|
||||
]
|
||||
for p in process:
|
||||
p.start()
|
||||
for p in process:
|
||||
p.join()
|
||||
|
||||
|
||||
return {
|
||||
"CPU": self.CPU,
|
||||
"Memory": {
|
||||
|
|
@ -16,6 +29,7 @@ class SystemStatus:
|
|||
},
|
||||
"Disks": self.Disks,
|
||||
"NetworkInterfaces": self.NetworkInterfaces,
|
||||
"NetworkInterfacesPriority": self.NetworkInterfaces.getInterfacePriorities(),
|
||||
"Processes": self.Processes
|
||||
}
|
||||
|
||||
|
|
@ -24,14 +38,20 @@ class CPU:
|
|||
def __init__(self):
|
||||
self.cpu_percent: float = 0
|
||||
self.cpu_percent_per_cpu: list[float] = []
|
||||
def getData(self):
|
||||
|
||||
def getCPUPercent(self):
|
||||
try:
|
||||
self.cpu_percent_per_cpu = psutil.cpu_percent(interval=0.5, percpu=True)
|
||||
self.cpu_percent = psutil.cpu_percent(interval=0.5)
|
||||
self.cpu_percent = psutil.cpu_percent(interval=1)
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get CPU Percent error", e)
|
||||
|
||||
def getPerCPUPercent(self):
|
||||
try:
|
||||
self.cpu_percent_per_cpu = psutil.cpu_percent(interval=1, percpu=True)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Get Per CPU Percent error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
|
||||
class Memory:
|
||||
|
|
@ -44,13 +64,15 @@ class Memory:
|
|||
try:
|
||||
if self.__memoryType__ == "virtual":
|
||||
memory = psutil.virtual_memory()
|
||||
self.available = memory.available
|
||||
else:
|
||||
memory = psutil.swap_memory()
|
||||
self.available = memory.free
|
||||
self.total = memory.total
|
||||
self.available = memory.available
|
||||
|
||||
self.percent = memory.percent
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Memory percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
|
|
@ -62,7 +84,7 @@ class Disks:
|
|||
try:
|
||||
self.disks = list(map(lambda x : Disk(x.mountpoint), psutil.disk_partitions()))
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Disk percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.disks
|
||||
|
|
@ -82,7 +104,7 @@ class Disk:
|
|||
self.used = disk.used
|
||||
self.percent = disk.percent
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Disk percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
|
|
@ -90,15 +112,38 @@ class Disk:
|
|||
class NetworkInterfaces:
|
||||
def __init__(self):
|
||||
self.interfaces = {}
|
||||
|
||||
def getInterfacePriorities(self):
|
||||
if shutil.which("ip"):
|
||||
result = subprocess.check_output(["ip", "route", "show"]).decode()
|
||||
priorities = {}
|
||||
for line in result.splitlines():
|
||||
if "metric" in line and "dev" in line:
|
||||
parts = line.split()
|
||||
dev = parts[parts.index("dev")+1]
|
||||
metric = int(parts[parts.index("metric")+1])
|
||||
if dev not in priorities:
|
||||
priorities[dev] = metric
|
||||
return priorities
|
||||
return {}
|
||||
|
||||
def getData(self):
|
||||
self.interfaces.clear()
|
||||
try:
|
||||
network = psutil.net_io_counters(pernic=True, nowrap=True)
|
||||
for i in network.keys():
|
||||
self.interfaces[i] = network[i]._asdict()
|
||||
time.sleep(1)
|
||||
network = psutil.net_io_counters(pernic=True, nowrap=True)
|
||||
for i in network.keys():
|
||||
self.interfaces[i]['realtime'] = {
|
||||
'sent': round((network[i].bytes_sent - self.interfaces[i]['bytes_sent']) / 1024 / 1024, 4),
|
||||
'recv': round((network[i].bytes_recv - self.interfaces[i]['bytes_recv']) / 1024 / 1024, 4)
|
||||
}
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get network error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.interfaces
|
||||
|
||||
class Process:
|
||||
|
|
@ -126,7 +171,8 @@ class Processes:
|
|||
key=lambda x : x.percent, reverse=True)[:20]
|
||||
break
|
||||
except Exception as e:
|
||||
break
|
||||
current_app.logger.error("Get processes error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -59,6 +59,15 @@ def ValidateDNSAddress(addresses) -> tuple[bool, str]:
|
|||
return False, f"{address} does not appear to be an valid DNS address"
|
||||
return True, ""
|
||||
|
||||
def ValidateEndpointAllowedIPs(IPs) -> tuple[bool, str] | tuple[bool, None]:
|
||||
ips = IPs.replace(" ", "").split(",")
|
||||
for ip in ips:
|
||||
try:
|
||||
ipaddress.ip_network(ip, strict=False)
|
||||
except ValueError as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def GenerateWireguardPublicKey(privateKey: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
publicKey = subprocess.check_output(f"wg pubkey", input=privateKey.encode(), shell=True,
|
||||
|
|
@ -73,4 +82,23 @@ def GenerateWireguardPrivateKey() -> tuple[bool, str] | tuple[bool, None]:
|
|||
stderr=subprocess.STDOUT)
|
||||
return True, publicKey.decode().strip('\n')
|
||||
except subprocess.CalledProcessError:
|
||||
return False, None
|
||||
return False, None
|
||||
|
||||
def ValidatePasswordStrength(password: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
# Rules:
|
||||
# - Must be over 8 characters & numbers
|
||||
# - Must contain at least 1 Uppercase & Lowercase letters
|
||||
# - Must contain at least 1 Numbers (0-9)
|
||||
# - Must contain at least 1 special characters from $&+,:;=?@#|'<>.-^*()%!~_-
|
||||
if len(password) < 8:
|
||||
return False, "Password must be 8 characters or more"
|
||||
if not re.search(r'[a-z]', password):
|
||||
return False, "Password must contain at least 1 lowercase character"
|
||||
if not re.search(r'[A-Z]', password):
|
||||
return False, "Password must contain at least 1 uppercase character"
|
||||
if not re.search(r'\d', password):
|
||||
return False, "Password must contain at least 1 number"
|
||||
if not re.search(r'[$&+,:;=?@#|\'<>.\-^*()%!~_-]', password):
|
||||
return False, "Password must contain at least 1 special character from $&+,:;=?@#|'<>.-^*()%!~_-"
|
||||
|
||||
return True, None
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,21 @@
|
|||
from pydantic import BaseModel
|
||||
|
||||
class OverridePeerSettingsClass(BaseModel):
|
||||
DNS: str = ''
|
||||
EndpointAllowedIPs: str = ''
|
||||
MTU: str | int = ''
|
||||
PersistentKeepalive: int | str = ''
|
||||
PeerRemoteEndpoint: str = ''
|
||||
ListenPort: int | str = ''
|
||||
|
||||
class PeerGroupsClass(BaseModel):
|
||||
GroupName: str = ''
|
||||
Description: str = ''
|
||||
BackgroundColor: str = ''
|
||||
Icon: str = ''
|
||||
Peers: list[str] = []
|
||||
|
||||
class WireguardConfigurationInfo(BaseModel):
|
||||
Description: str = ''
|
||||
OverridePeerSettings: OverridePeerSettingsClass = OverridePeerSettingsClass(**{})
|
||||
PeerGroups: dict[str, PeerGroupsClass] = {}
|
||||
|
|
@ -7,4 +7,11 @@ flask-cors
|
|||
icmplib
|
||||
gunicorn
|
||||
requests
|
||||
tcconfig
|
||||
tcconfig
|
||||
sqlalchemy
|
||||
sqlalchemy_utils
|
||||
psycopg
|
||||
PyMySQL
|
||||
tzlocal
|
||||
python-jose
|
||||
pydantic
|
||||
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{_ as r,c as i,d as o,w as e,k as l,a as t,j as _,i as a,l as d,S as u}from"./index-oBQzjt8-.js";const m={name:"configuration"},p={class:"mt-md-5 mt-3 text-body"};function f(k,x,h,w,$,v){const n=l("RouterView");return t(),i("div",p,[o(n,null,{default:e(({Component:s,route:c})=>[o(_,{name:"fade2",mode:"out-in"},{default:e(()=>[(t(),a(u,null,{default:e(()=>[(t(),a(d(s),{key:c.path}))]),_:2},1024))]),_:2},1024)]),_:1})])}const B=r(m,[["render",f]]);export{B as default};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
.fade-enter-active[data-v-dafd6275]{transition-delay:var(--7d032b58)!important}.progress-bar[data-v-851170e4]{width:0;transition:all 1s cubic-bezier(.42,0,.22,1)}.filter a[data-v-ea61b607]{text-decoration:none}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
function f(e){return e.includes(":")?6:e.includes(".")?4:0}function b(e){const i=f(e);if(!i)throw new Error(`Invalid IP address: ${e}`);let n=0n,o=0n;const r=Object.create(null);if(i===4)for(const s of e.split(".").map(BigInt).reverse())n+=s*2n**o,o+=8n;else{if(e.includes(".")&&(r.ipv4mapped=!0,e=e.split(":").map(t=>{if(t.includes(".")){const[c,l,d,a]=t.split(".").map($=>Number($).toString(16).padStart(2,"0"));return`${c}${l}:${d}${a}`}else return t}).join(":")),e.includes("%")){let t;[,e,t]=/(.+)%(.+)/.exec(e)||[],r.scopeid=t}const s=e.split(":"),u=s.indexOf("");if(u!==-1)for(;s.length<8;)s.splice(u,0,"");for(const t of s.map(c=>BigInt(parseInt(c||"0",16))).reverse())n+=t*2n**o,o+=16n}return r.number=n,r.version=i,r}const p={4:32,6:128},I=e=>e.includes("/")?f(e):0;function m(e){const i=I(e),n=Object.create(null);if(i)n.cidr=e,n.version=i;else{const a=f(e);if(a)n.cidr=`${e}/${p[a]}`,n.version=a;else throw new Error(`Network is not a CIDR or IP: ${e}`)}const[o,r]=n.cidr.split("/");if(!/^[0-9]+$/.test(r))throw new Error(`Network is not a CIDR or IP: ${e}`);n.prefix=r,n.single=r===String(p[n.version]);const{number:s,version:u}=b(o),t=p[u],c=s.toString(2).padStart(t,"0"),l=Number(t-r),d=c.substring(0,t-l);return n.start=BigInt(`0b${d}${"0".repeat(l)}`),n.end=BigInt(`0b${d}${"1".repeat(l)}`),n}export{m as p};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{_ as e,G as t,a as o,c as a,t as c}from"./index-oBQzjt8-.js";const s={name:"localeText",props:{t:""},computed:{getLocaleText(){return t(this.t)}}};function n(r,p,l,_,i,x){return o(),a("span",null,c(this.getLocaleText),1)}const u=e(s,[["render",n]]);export{u as L};
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{L as l}from"./localeText-CuybU_0U.js";import{d as c}from"./dayjs.min-BHDUvWAB.js";import{_ as h,a as o,c as a,b as e,d as i,w as u,f as p,t as n,j as g,n as f,k as _}from"./index-oBQzjt8-.js";const x={name:"message",methods:{dayjs:c,hide(){this.ct(),this.message.show=!1},show(){this.timeout=setTimeout(()=>{this.message.show=!1},5e3)},ct(){clearTimeout(this.timeout)}},components:{LocaleText:l},props:{message:Object},mounted(){this.show()},data(){return{dismiss:!1,timeout:null}}},v=["id"],b={key:0,class:"d-flex"},w={class:"fw-bold d-block",style:{"text-transform":"uppercase"}},y={class:"ms-auto"},k={key:1},T={class:"card-body d-flex align-items-center gap-3"};function M(j,s,C,L,t,m){const d=_("LocaleText");return o(),a("div",{onMouseenter:s[1]||(s[1]=r=>{t.dismiss=!0,this.ct()}),onMouseleave:s[2]||(s[2]=r=>{t.dismiss=!1,this.show()}),class:"card shadow rounded-3 position-relative message ms-auto",id:this.message.id},[e("div",{class:f([{"text-bg-danger":this.message.type==="danger","text-bg-success":this.message.type==="success","text-bg-warning":this.message.type==="warning"},"card-header pos"])},[i(g,{name:"zoom",mode:"out-in"},{default:u(()=>[t.dismiss?(o(),a("div",k,[e("small",{onClick:s[0]||(s[0]=r=>m.hide()),class:"d-block mx-auto w-100 text-center",style:{cursor:"pointer"}},[s[3]||(s[3]=e("i",{class:"bi bi-x-lg me-2"},null,-1)),i(d,{t:"Dismiss"})])])):(o(),a("div",b,[e("small",w,[i(d,{t:"FROM "}),p(" "+n(this.message.from),1)]),e("small",y,n(m.dayjs().format("hh:mm A")),1)]))]),_:1})],2),e("div",T,[e("div",null,n(this.message.content),1)])],40,v)}const z=h(x,[["render",M],["__scopeId","data-v-94c76b54"]]);export{z as M};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
.protocolBtnGroup a[data-v-b97242f3]{transition:all .2s ease-in-out}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{_ as v,D as g,r as o,o as h,J as x,g as y,a as i,c as n,b as s,d as c,n as w,e as C,w as k,j as F}from"./index-oBQzjt8-.js";import{L as T}from"./localeText-CuybU_0U.js";import"./browser-CjSdxGTc.js";const M={class:"peerSettingContainer w-100 h-100 position-absolute top-0 start-0"},S={class:"container d-flex h-100 w-100"},D={class:"m-auto modal-dialog-centered dashboardModal justify-content-center"},P={class:"card rounded-3 shadow w-100"},j={class:"card-header bg-transparent d-flex align-items-center gap-2 border-0 p-4 pb-0"},B={class:"mb-0"},G={class:"card-body p-4 d-flex flex-column gap-3"},L={style:{height:"300px"},class:"d-flex"},N=["value"],V={key:0,class:"spinner-border m-auto",role:"status"},I={class:"d-flex"},W=["disabled"],$={key:0,class:"d-block"},q={key:1,class:"d-block",id:"check"},z={__name:"peerConfigurationFile",props:{selectedPeer:Object},emits:["close"],setup(u,{emit:p}){const m=p,f=u,r=g(),t=o(!1),l=o(""),a=o(!0);o({error:!1,message:void 0}),h(()=>{const d=x();y("/api/downloadPeer/"+d.params.id,{id:f.selectedPeer.id},e=>{e.status?(l.value=e.data.file,a.value=!1):this.dashboardStore.newMessage("Server",e.message,"danger")})});const b=async()=>{navigator.clipboard&&navigator.clipboard.writeText?navigator.clipboard.writeText(l.value).then(()=>{t.value=!0,setTimeout(()=>{t.value=!1},3e3)}).catch(()=>{r.newMessage("WGDashboard","Failed to copy","danger")}):(document.querySelector("#peerConfigurationFile").select(),document.execCommand("copy")?(t.value=!0,setTimeout(()=>{t.value=!1},3e3)):r.newMessage("WGDashboard","Failed to copy","danger"))};return(d,e)=>(i(),n("div",M,[s("div",S,[s("div",D,[s("div",P,[s("div",j,[s("h4",B,[c(T,{t:"Peer Configuration File"})]),s("button",{type:"button",class:"btn-close ms-auto",onClick:e[0]||(e[0]=_=>m("close"))})]),s("div",G,[s("div",L,[s("textarea",{style:{height:"300px"},class:w(["form-control w-100 rounded-3 animate__fadeIn animate__faster animate__animated",{"d-none":a.value}]),id:"peerConfigurationFile",value:l.value},null,10,N),a.value?(i(),n("div",V,e[2]||(e[2]=[s("span",{class:"visually-hidden"},"Loading...",-1)]))):C("",!0)]),s("div",I,[s("button",{onClick:e[1]||(e[1]=_=>b()),disabled:t.value||a.value,class:"ms-auto btn bg-primary-subtle border-primary-subtle text-primary-emphasis rounded-3 position-relative"},[c(F,{name:"slide-up",mode:"out-in"},{default:k(()=>[t.value?(i(),n("span",q,e[4]||(e[4]=[s("i",{class:"bi bi-check-circle-fill"},null,-1)]))):(i(),n("span",$,e[3]||(e[3]=[s("i",{class:"bi bi-clipboard-fill"},null,-1)])))]),_:1})],8,W)])])])])])]))}},R=v(z,[["__scopeId","data-v-b0ea2d46"]]);export{R as default};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
@media screen and (max-width: 768px){#qrcode[data-v-7c287bf3]{width:100%!important;height:auto!important;aspect-ratio:1/1}}
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{b as i}from"./browser-CjSdxGTc.js";import{L as c}from"./localeText-CuybU_0U.js";import{_ as l,D as p,g as _,k as m,a as n,c as r,b as e,d as u,n as h,e as f}from"./index-oBQzjt8-.js";const g={name:"peerQRCode",components:{LocaleText:c},props:{selectedPeer:Object},setup(){return{dashboardStore:p()}},data(){return{loading:!0}},mounted(){_("/api/downloadPeer/"+this.$route.params.id,{id:this.selectedPeer.id},o=>{if(this.loading=!1,o.status){let t="";if(this.selectedPeer.configuration.Protocol==="awg"){let a={containers:[{awg:{isThirdPartyConfig:!0,last_config:o.data.file,port:this.selectedPeer.configuration.ListenPort,transport_proto:"udp"},container:"amnezia-awg"}],defaultContainer:"amnezia-awg",description:this.selectedPeer.name,hostName:this.dashboardStore.Configuration.Peers.remote_endpoint};t=btoa(JSON.stringify(a))}else t=o.data.file;i.toCanvas(document.querySelector("#qrcode"),t,a=>{a&&console.error(a)})}else this.dashboardStore.newMessage("Server",o.message,"danger")})}},b={class:"peerSettingContainer w-100 h-100 position-absolute top-0 start-0"},v={class:"container d-flex h-100 w-100"},C={class:"m-auto modal-dialog-centered dashboardModal justify-content-center"},w={class:"card rounded-3 shadow"},P={class:"card-header bg-transparent d-flex align-items-center gap-2 border-0 p-4 pb-0"},x={class:"mb-0"},S={class:"card-body p-4"},y={class:"d-flex"},L={key:0,class:"spinner-border m-auto",role:"status"};function k(o,t,a,N,s,$){const d=m("LocaleText");return n(),r("div",b,[e("div",v,[e("div",C,[e("div",w,[e("div",P,[e("h4",x,[u(d,{t:"QR Code"})]),e("button",{type:"button",class:"btn-close ms-auto",onClick:t[0]||(t[0]=Q=>this.$emit("close"))})]),e("div",S,[e("div",y,[e("canvas",{id:"qrcode",class:h(["rounded-3 shadow animate__animated animate__fadeIn animate__faster",{"d-none":s.loading}])},null,2),s.loading?(n(),r("div",L,t[1]||(t[1]=[e("span",{class:"visually-hidden"},"Loading...",-1)]))):f("",!0)])])])])])])}const q=l(g,[["render",k],["__scopeId","data-v-7c287bf3"]]);export{q as default};
|
||||
|
|
@ -1 +0,0 @@
|
|||
.searchPeersContainer[data-v-b741afe7]{width:100%}
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{_ as u,q as m,G as p,r as b,W as f,a2 as h,o as g,a as v,i as y,w as _,b as e,m as x,y as w,d as S,j as B}from"./index-oBQzjt8-.js";import{L as T}from"./localeText-CuybU_0U.js";const C={class:"fixed-bottom w-100 bottom-0 z-2",style:{"z-index":"1"}},P={class:"container-fluid"},k={class:"row g-0"},L={class:"col-md-9 col-lg-10 d-flex justify-content-center py-2"},V={class:"rounded-3 p-2 border shadow searchPeersContainer bg-body-tertiary"},j={class:"d-flex gap-1 align-items-center px-2"},z=["placeholder"],D={__name:"peerSearchBar",emits:["close"],setup(G,{emit:n}){const l=m(()=>p("Search Peers..."));let t;const o=b(""),r=f(),i=()=>{t?(clearTimeout(t),t=setTimeout(()=>{r.searchString=o.value},300)):t=setTimeout(()=>{r.searchString=o.value},300)},d=n,c=h("searchBar");return g(()=>{c.value.focus()}),(M,s)=>(v(),y(B,{name:"slideUp",appear:"",type:"animation",style:{"animation-delay":"1s"}},{default:_(()=>[e("div",C,[e("div",P,[e("div",k,[s[5]||(s[5]=e("div",{class:"col-md-3 col-lg-2"},null,-1)),e("div",L,[e("div",V,[e("div",j,[s[4]||(s[4]=e("h6",{class:"mb-0 me-2"},[e("label",{for:"searchPeers"},[e("i",{class:"bi bi-search"})])],-1)),x(e("input",{ref:"searchBar",class:"flex-grow-1 form-control rounded-3 bg-secondary-subtle border-1 border-secondary-subtle",placeholder:l.value,id:"searchPeers",onKeyup:s[0]||(s[0]=a=>i()),"onUpdate:modelValue":s[1]||(s[1]=a=>o.value=a)},null,40,z),[[w,o.value]]),e("button",{onClick:s[2]||(s[2]=a=>d("close")),style:{"white-space":"nowrap"},class:"btn bg-secondary-subtle text-secondary-emphasis border-secondary-subtle rounded-3 d-flex align-items-center"},[e("span",null,[s[3]||(s[3]=e("i",{class:"bi bi-x-circle-fill me-2"},null,-1)),S(T,{t:"Done"})])])])])])])])])]),_:1}))}},W=u(D,[["__scopeId","data-v-b741afe7"]]);export{W as default};
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
.card[data-v-8cfb4d4d]{border-color:var(--bs-border-color)!important}textarea[data-v-6e705c87]:focus,input[data-v-6e705c87]:focus{box-shadow:none;border-color:var(--bs-border-color)!important}textarea[data-v-6e705c87]{padding:var(--bs-card-spacer-y) var(--bs-card-spacer-x)}
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{L as r}from"./localeText-CuybU_0U.js";import{a as t,c as n,f as i,i as s,e as a}from"./index-oBQzjt8-.js";const d={key:0,class:"badge wireguardBg rounded-3 shadow"},c={key:1,class:"badge amneziawgBg rounded-3 shadow"},u={__name:"protocolBadge",props:{protocol:String,mini:!1},setup(e){return(m,o)=>e.protocol==="wg"?(t(),n("span",d,[o[0]||(o[0]=i(" WireGuard ")),e.mini?a("",!0):(t(),s(r,{key:0,t:"Configuration"}))])):e.protocol==="awg"?(t(),n("span",c,[o[1]||(o[1]=i(" AmneziaWG ")),e.mini?a("",!0):(t(),s(r,{key:0,t:"Configuration"}))])):a("",!0)}};export{u as _};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
@media screen and (max-width: 992px){.apiKey-card-body{&[data-v-a76253c8]{flex-direction:column!important;align-items:start!important}div.ms-auto[data-v-a76253c8]{margin-left:0!important}div[data-v-a76253c8]{width:100%;align-items:start!important}small[data-v-a76253c8]{margin-right:auto}}}.apiKey-move[data-v-100ee9f9],.apiKey-enter-active[data-v-100ee9f9],.apiKey-leave-active[data-v-100ee9f9]{transition:all .5s ease}.apiKey-enter-from[data-v-100ee9f9],.apiKey-leave-to[data-v-100ee9f9]{opacity:0;transform:translateY(30px) scale(.9)}.apiKey-leave-active[data-v-100ee9f9]{position:absolute;width:100%}.dropdown-menu[data-v-4e34593e]{width:100%}.list-group{&[data-v-4aa2aed9]:first-child{border-top-left-radius:var(--bs-border-radius-lg);border-top-right-radius:var(--bs-border-radius-lg)}&[data-v-4aa2aed9]:last-child{border-bottom-left-radius:var(--bs-border-radius-lg);border-bottom-right-radius:var(--bs-border-radius-lg)}}
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{_,r,D as p,g as u,c as m,b as t,d as c,J as h,a as f,k as b}from"./index-oBQzjt8-.js";import{b as v}from"./browser-CjSdxGTc.js";import{L as y}from"./localeText-CuybU_0U.js";const g={name:"share",components:{LocaleText:y},async setup(){const o=h(),e=r(!1),i=p(),n=r(""),s=r(void 0),l=r(new Blob);await u("/api/getDashboardTheme",{},d=>{n.value=d.data});const a=o.query.ShareID;return a===void 0||a.length===0?(s.value=void 0,e.value=!0):await u("/api/sharePeer/get",{ShareID:a},d=>{d.status?(s.value=d.data,l.value=new Blob([s.value.file],{type:"text/plain"})):s.value=void 0,e.value=!0}),{store:i,theme:n,peerConfiguration:s,blob:l}},mounted(){this.peerConfiguration&&v.toCanvas(document.querySelector("#qrcode"),this.peerConfiguration.file,o=>{o&&console.error(o)})},methods:{download(){const o=new Blob([this.peerConfiguration.file],{type:"text/plain"}),e=URL.createObjectURL(o),i=`${this.peerConfiguration.fileName}.conf`,n=document.createElement("a");n.href=e,n.download=i,n.click()}},computed:{getBlob(){return URL.createObjectURL(this.blob)}}},w=["data-bs-theme"],x={class:"m-auto text-body",style:{width:"500px"}},C={key:0,class:"text-center position-relative",style:{}},U={class:"position-absolute w-100 h-100 top-0 start-0 d-flex animate__animated animate__fadeInUp",style:{"animation-delay":"0.1s"}},I={class:"m-auto"},L={key:1,class:"d-flex align-items-center flex-column gap-3"},k={class:"h1 dashboardLogo text-center animate__animated animate__fadeInUp"},B={id:"qrcode",class:"rounded-3 shadow animate__animated animate__fadeInUp mb-3",ref:"qrcode"},D={class:"text-muted animate__animated animate__fadeInUp mb-1",style:{"animation-delay":"0.2s"}},R=["download","href"];function q(o,e,i,n,s,l){const a=b("LocaleText");return f(),m("div",{class:"container-fluid login-container-fluid d-flex main pt-5 overflow-scroll","data-bs-theme":this.theme},[t("div",x,[this.peerConfiguration?(f(),m("div",L,[t("div",k,[e[1]||(e[1]=t("h6",null,"WGDashboard",-1)),c(a,{t:"Scan QR Code with the WireGuard App to add peer"})]),t("canvas",B,null,512),t("p",D,[c(a,{t:"or click the button below to download the "}),e[2]||(e[2]=t("samp",null,".conf",-1)),c(a,{t:" file"})]),t("a",{download:this.peerConfiguration.fileName+".conf",href:l.getBlob,class:"btn btn-lg bg-primary-subtle text-primary-emphasis border-1 border-primary-subtle animate__animated animate__fadeInUp shadow-sm",style:{"animation-delay":"0.25s"}},e[3]||(e[3]=[t("i",{class:"bi bi-download"},null,-1)]),8,R)])):(f(),m("div",C,[e[0]||(e[0]=t("div",{class:"animate__animated animate__fadeInUp"},[t("h1",{style:{"font-size":"20rem",filter:"blur(1rem)","animation-duration":"7s"},class:"animate__animated animate__flash animate__infinite"},[t("i",{class:"bi bi-file-binary"})])],-1)),t("div",U,[t("h3",I,[c(a,{t:"Oh no... This link is either expired or invalid."})])])]))])],8,w)}const N=_(g,[["render",q],["__scopeId","data-v-1b44aacd"]]);export{N as default};
|
||||
|
|
@ -1 +0,0 @@
|
|||
.dot.inactive[data-v-ed7817c7]{background-color:#dc3545;box-shadow:0 0 0 .2rem #dc354545}.spin[data-v-ed7817c7]{animation:spin-ed7817c7 1s infinite cubic-bezier(.82,.58,.17,.9)}@keyframes spin-ed7817c7{0%{transform:rotate(0)}to{transform:rotate(360deg)}}@media screen and (max-width: 768px){.remoteServerContainer[data-v-ed7817c7]{flex-direction:column}.remoteServerContainer .button-group button[data-v-ed7817c7]{width:100%}}@media screen and (max-width: 768px){.login-box[data-v-80e20da4]{width:100%!important}.login-box div[data-v-80e20da4]{width:auto!important}}
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{_ as b,p as m,r as p,q as v,a as t,c as r,d as g,w as x,s as n,n as f,b as l,t as d,e as C,j as w}from"./index-oBQzjt8-.js";const y={class:"text-muted me-2"},_={class:"fw-bold"},k={__name:"cpuCore",props:{core_number:Number,percentage:Number,align:Boolean,square:Boolean},setup(e){m(c=>({"2ec4d3bc":o.value}));const u=e,s=p(!1),o=v(()=>u.square?"40px":"25px");return(c,a)=>(t(),r("div",{class:"flex-grow-1 square rounded-3 border position-relative p-2",onMouseenter:a[0]||(a[0]=i=>s.value=!0),onMouseleave:a[1]||(a[1]=i=>s.value=!1),style:n({"background-color":`rgb(13 110 253 / ${e.percentage*10}%)`})},[g(w,{name:"zoomReversed"},{default:x(()=>[s.value?(t(),r("div",{key:0,style:n([{"white-space":"nowrap"},{top:o.value}]),class:f(["floatingLabel z-3 border position-absolute d-block p-1 px-2 bg-body text-body rounded-3 border shadow d-flex",[e.align?"end-0":"start-0"]])},[l("small",y," Core #"+d(e.core_number+1),1),l("small",_,d(e.percentage)+"% ",1)],6)):C("",!0)]),_:1})],36))}},B=b(k,[["__scopeId","data-v-2ad535bb"]]);export{B as C};
|
||||
|
|
@ -1 +0,0 @@
|
|||
.title[data-v-ffe5ad8f]{height:18px;text-overflow:ellipsis;overflow:hidden;white-space:nowrap}.process-move[data-v-977dc46d],.process-enter-active[data-v-977dc46d],.process-leave-active[data-v-977dc46d]{transition:all .5s cubic-bezier(.42,0,.22,1)}.process-enter-from[data-v-977dc46d],.process-leave-to[data-v-977dc46d]{opacity:0;transform:scale(.9)}.process-leave-active[data-v-977dc46d]{position:absolute;width:100%}.progress-bar[data-v-977dc46d]{width:0;transition:all 1s cubic-bezier(.42,0,.22,1)}.fadeIn[data-v-977dc46d]{opacity:0;animation:fadeIn-977dc46d .5s forwards cubic-bezier(.42,0,.22,1)}@keyframes fadeIn-977dc46d{0%{opacity:0;transform:translateY(30px)}to{opacity:1;transform:translateY(0)}}
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -1 +0,0 @@
|
|||
import{_ as h,D as m,g as p,A as f,c as b,b as t,d as i,t as _,m as v,y as g,i as d,w as r,k as c,a as n}from"./index-oBQzjt8-.js";import{b as x}from"./browser-CjSdxGTc.js";import{L as y}from"./localeText-CuybU_0U.js";const T={name:"totp",components:{LocaleText:y},async setup(){const s=m();let e="";return await p("/api/Welcome_GetTotpLink",{},a=>{a.status&&(e=a.data)}),{l:e,store:s}},mounted(){this.l&&x.toCanvas(document.getElementById("qrcode"),this.l,function(s){})},data(){return{totp:"",totpInvalidMessage:"",verified:!1}},methods:{validateTotp(){}},watch:{totp(s){const e=document.querySelector("#totp");e.classList.remove("is-invalid","is-valid"),s.length===6&&(console.log(s),/[0-9]{6}/.test(s)?f("/api/Welcome_VerifyTotpLink",{totp:s},a=>{a.status?(this.verified=!0,e.classList.add("is-valid"),this.$emit("verified")):(e.classList.add("is-invalid"),this.totpInvalidMessage="TOTP does not match.")}):(e.classList.add("is-invalid"),this.totpInvalidMessage="TOTP can only contain numbers"))}}},k=["data-bs-theme"],w={class:"m-auto text-body",style:{width:"500px"}},L={class:"d-flex flex-column"},M={class:"dashboardLogo display-4"},C={class:"mb-2"},P={class:"text-muted"},I={class:"p-3 bg-body-secondary rounded-3 border mb-3"},O={class:"text-muted mb-0"},B=["href"],$={style:{"line-break":"anywhere"}},A={for:"totp",class:"mb-2"},D={class:"text-muted"},S={class:"form-group mb-2"},q=["disabled"],E={class:"invalid-feedback"},F={class:"valid-feedback"},R={class:"d-flex gap-3 mt-5 flex-column"};function G(s,e,a,N,W,Q){const o=c("LocaleText"),l=c("RouterLink");return n(),b("div",{class:"container-fluid login-container-fluid d-flex main pt-5 overflow-scroll","data-bs-theme":this.store.Configuration.Server.dashboard_theme},[t("div",w,[t("div",L,[t("div",null,[t("h1",M,[i(o,{t:"Multi-Factor Authentication (MFA)"})]),t("p",C,[t("small",P,[i(o,{t:"1. Please scan the following QR Code to generate TOTP with your choice of authenticator"})])]),e[1]||(e[1]=t("canvas",{id:"qrcode",class:"rounded-3 mb-2"},null,-1)),t("div",I,[t("p",O,[t("small",null,[i(o,{t:"Or you can click the link below:"})])]),t("a",{href:this.l},[t("code",$,_(this.l),1)],8,B)]),t("label",A,[t("small",D,[i(o,{t:"2. Enter the TOTP generated by your authenticator to verify"})])]),t("div",S,[v(t("input",{class:"form-control text-center totp",id:"totp",maxlength:"6",type:"text",inputmode:"numeric",autocomplete:"one-time-code","onUpdate:modelValue":e[0]||(e[0]=u=>this.totp=u),disabled:this.verified},null,8,q),[[g,this.totp]]),t("div",E,[i(o,{t:this.totpInvalidMessage},null,8,["t"])]),t("div",F,[i(o,{t:"TOTP verified!"})])])]),e[4]||(e[4]=t("hr",null,null,-1)),t("div",R,[this.verified?(n(),d(l,{key:1,to:"/",class:"btn btn-dark btn-lg d-flex btn-brand shadow align-items-center flex-grow-1 rounded-3"},{default:r(()=>[i(o,{t:"Complete"}),e[3]||(e[3]=t("i",{class:"bi bi-chevron-right ms-auto"},null,-1))]),_:1})):(n(),d(l,{key:0,to:"/",class:"btn bg-secondary-subtle text-secondary-emphasis rounded-3 flex-grow-1 btn-lg border-1 border-secondary-subtle shadow d-flex"},{default:r(()=>[i(o,{t:"I don't need MFA"}),e[2]||(e[2]=t("i",{class:"bi bi-chevron-right ms-auto"},null,-1))]),_:1}))])])])],8,k)}const z=h(T,[["render",G]]);export{z as default};
|
||||
|
|
@ -1 +0,0 @@
|
|||
.pingPlaceholder[data-v-3e75b4d4]{width:100%;height:40px}.ping-move[data-v-3e75b4d4],.ping-enter-active[data-v-3e75b4d4],.ping-leave-active[data-v-3e75b4d4]{transition:all .4s cubic-bezier(.82,.58,.17,.9)}.ping-leave-active[data-v-3e75b4d4]{position:absolute;width:100%}.ping-enter-from[data-v-3e75b4d4],.ping-leave-to[data-v-3e75b4d4]{opacity:0;filter:blur(3px)}.ping-leave-active[data-v-3e75b4d4]{position:absolute}table th[data-v-3e75b4d4],table td[data-v-3e75b4d4]{padding:.5rem}.table[data-v-3e75b4d4]>:not(caption)>*>*{background-color:transparent!important}
|
||||
|
|
@ -1 +0,0 @@
|
|||
import{_ as h,W as g,g as b,c as o,b as t,d as n,m as y,y as f,C as x,w as r,j as c,a as l,f as v,F as u,h as m,n as k,s as T,t as i,k as _}from"./index-oBQzjt8-.js";import{O as A}from"./osmap-DL0KYfmh.js";import{L as w}from"./localeText-CuybU_0U.js";const R={name:"traceroute",components:{LocaleText:w,OSMap:A},data(){return{tracing:!1,ipAddress:void 0,tracerouteResult:void 0}},setup(){return{store:g()}},methods:{execute(){this.ipAddress&&(this.tracing=!0,this.tracerouteResult=void 0,b("/api/traceroute/execute",{ipAddress:this.ipAddress},d=>{d.status?this.tracerouteResult=d.data:this.store.newMessage("Server",d.message,"danger"),this.tracing=!1}))}}},M={class:"mt-md-5 mt-3 text-body"},S={class:"container-md"},$={class:"mb-3 text-body"},C={class:"d-flex gap-2 mb-3 flex-column"},L={class:"flex-grow-1"},P={class:"mb-1 text-muted",for:"ipAddress"},O=["disabled"],V=["disabled"],B={key:0,class:"d-block"},I={key:1,class:"d-block"},N={class:"position-relative"},z={key:"pingPlaceholder"},D={key:1},E={key:"table",class:"w-100 mt-2"},F={class:"table table-sm rounded-3 w-100"},G={scope:"col"},H={scope:"col"},K={scope:"col"},W={scope:"col"},j={scope:"col"},U={scope:"col"},q={key:0},J={key:1};function Q(d,s,X,Y,Z,tt){const a=_("LocaleText"),p=_("OSMap");return l(),o("div",M,[t("div",S,[t("h3",$,[n(a,{t:"Traceroute"})]),t("div",C,[t("div",L,[t("label",P,[t("small",null,[n(a,{t:"Enter IP Address / Hostname"})])]),y(t("input",{disabled:this.tracing,id:"ipAddress",class:"form-control rounded-3","onUpdate:modelValue":s[0]||(s[0]=e=>this.ipAddress=e),onKeyup:s[1]||(s[1]=x(e=>this.execute(),["enter"])),type:"text"},null,40,O),[[f,this.ipAddress]])]),t("button",{class:"btn btn-primary rounded-3 position-relative flex-grow-1",disabled:this.tracing||!this.ipAddress,onClick:s[2]||(s[2]=e=>this.execute())},[n(c,{name:"slide"},{default:r(()=>[this.tracing?(l(),o("span",I,s[4]||(s[4]=[t("span",{class:"spinner-border spinner-border-sm","aria-hidden":"true"},null,-1),t("span",{class:"visually-hidden",role:"status"},"Loading...",-1)]))):(l(),o("span",B,s[3]||(s[3]=[t("i",{class:"bi bi-person-walking me-2"},null,-1),v("Trace! ")])))]),_:1})],8,V)]),t("div",N,[n(c,{name:"ping"},{default:r(()=>[this.tracerouteResult?(l(),o("div",D,[n(p,{d:this.tracerouteResult,type:"traceroute"},null,8,["d"]),t("div",E,[t("table",F,[t("thead",null,[t("tr",null,[t("th",G,[n(a,{t:"Hop"})]),t("th",H,[n(a,{t:"IP Address"})]),t("th",K,[n(a,{t:"Average RTT (ms)"})]),t("th",W,[n(a,{t:"Min RTT (ms)"})]),t("th",j,[n(a,{t:"Max RTT (ms)"})]),t("th",U,[n(a,{t:"Geolocation"})])])]),t("tbody",null,[(l(!0),o(u,null,m(this.tracerouteResult,(e,et)=>(l(),o("tr",null,[t("td",null,[t("small",null,i(e.hop),1)]),t("td",null,[t("small",null,[t("samp",null,i(e.ip),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.avg_rtt),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.min_rtt),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.max_rtt),1)])]),t("td",null,[e.geo.city&&e.geo.country?(l(),o("span",q,[t("small",null,i(e.geo.city)+", "+i(e.geo.country),1)])):(l(),o("span",J," - "))])]))),256))])])])])):(l(),o("div",z,[s[5]||(s[5]=t("div",{class:"pingPlaceholder bg-body-secondary rounded-3 mb-3",style:{height:"300px !important"}},null,-1)),(l(),o(u,null,m(5,e=>t("div",{class:k(["pingPlaceholder bg-body-secondary rounded-3 mb-3",{"animate__animated animate__flash animate__slower animate__infinite":this.tracing}]),style:T({"animation-delay":`${e*.05}s`})},null,6)),64))]))]),_:1})])])])}const lt=h(R,[["render",Q],["__scopeId","data-v-3e75b4d4"]]);export{lt as default};
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -13,6 +13,7 @@
|
|||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
||||
<script type="module" src="./src/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,42 +1,46 @@
|
|||
{
|
||||
"name": "app",
|
||||
"version": "4.2.5",
|
||||
"version": "4.3.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"module": "es2022",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"build": "vite build --emptyOutDir",
|
||||
"buildcommitpush": "./build.sh",
|
||||
"build electron": "vite build && vite build --mode electron && cd ../../../../WGDashboard-Desktop && /opt/homebrew/bin/npm run \"electron dist\"",
|
||||
"build electron": "vite build --emptyOutDir && vite build --mode electron && cd ../../../../WGDashboard-Desktop && /opt/homebrew/bin/npm run \"electron dist\"",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vue/language-server": "^2.1.10",
|
||||
"@vuepic/vue-datepicker": "^9.0.1",
|
||||
"@vueuse/core": "^10.9.0",
|
||||
"@vueuse/shared": "^10.9.0",
|
||||
"@volar/language-server": "2.4.23",
|
||||
"@vue/language-server": "3.0.5",
|
||||
"@vuepic/vue-datepicker": "^11.0.2",
|
||||
"@vueuse/core": "^13.5.0",
|
||||
"@vueuse/shared": "^13.5.0",
|
||||
"animate.css": "^4.1.1",
|
||||
"bootstrap": "^5.3.2",
|
||||
"bootstrap-icons": "^1.11.3",
|
||||
"cidr-tools": "^7.0.4",
|
||||
"cidr-tools": "^11.0.3",
|
||||
"css-color-converter": "^2.0.0",
|
||||
"dayjs": "^1.11.12",
|
||||
"electron-builder": "^24.13.3",
|
||||
"electron-builder": "^26.0.12",
|
||||
"fuse.js": "^7.0.0",
|
||||
"i": "^0.3.7",
|
||||
"is-cidr": "^5.0.3",
|
||||
"npm": "^10.5.0",
|
||||
"ol": "^10.2.1",
|
||||
"pinia": "^2.1.7",
|
||||
"pinia": "^3.0.3",
|
||||
"pinia-plugin-persistedstate": "^4.5.0",
|
||||
"qrcode": "^1.5.3",
|
||||
"qrcodejs": "^1.0.0",
|
||||
"simple-code-editor": "^2.0.9",
|
||||
"uuid": "^9.0.1",
|
||||
"vue": "^3.4.29",
|
||||
"uuid": "^11.1.0",
|
||||
"vue": "^3.5.17",
|
||||
"vue-chartjs": "^5.3.0",
|
||||
"vue-router": "^4.2.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-vue": "^5.0.0",
|
||||
"vite": "^5.0.10"
|
||||
"@vitejs/plugin-vue": "^6.0.0",
|
||||
"vite": "^7.0.5"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@
|
|||
"short_name": "WGDashboard",
|
||||
"screenshots": [
|
||||
{
|
||||
"src": "https://donaldzou.github.io/WGDashboard-Documentation/images/sign-in.png",
|
||||
"src": "https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/sign-in.png",
|
||||
"sizes": "2880x1826",
|
||||
"type": "image/png",
|
||||
"form_factor": "wide"
|
||||
},
|
||||
{
|
||||
"src": "https://donaldzou.github.io/WGDashboard-Documentation/images/sign-in.png",
|
||||
"src": "https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/index.png",
|
||||
"sizes": "2880x1826",
|
||||
"type": "image/png"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,10 +4,20 @@ import {DashboardConfigurationStore} from "@/stores/DashboardConfigurationStore.
|
|||
import {computed, watch} from "vue";
|
||||
const store = DashboardConfigurationStore();
|
||||
import "@/utilities/wireguard.js"
|
||||
import {fetchGet} from "@/utilities/fetch.js";
|
||||
store.initCrossServerConfiguration();
|
||||
if (window.IS_WGDASHBOARD_DESKTOP){
|
||||
store.IsElectronApp = true;
|
||||
store.CrossServerConfiguration.Enable = true;
|
||||
if (store.ActiveServerConfiguration){
|
||||
fetchGet("/api/locale", {}, (res) => {
|
||||
store.Locale = res.data
|
||||
})
|
||||
}
|
||||
}else{
|
||||
fetchGet("/api/locale", {}, (res) => {
|
||||
store.Locale = res.data
|
||||
})
|
||||
}
|
||||
watch(store.CrossServerConfiguration, () => {
|
||||
store.syncCrossServerConfiguration()
|
||||
|
|
@ -19,29 +29,31 @@ const route = useRoute()
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div style="z-index: 9999; height: 5px" class="position-absolute loadingBar top-0 start-0"></div>
|
||||
<nav class="navbar bg-dark sticky-top" data-bs-theme="dark" v-if="!route.meta.hideTopNav">
|
||||
<div class="container-fluid d-flex text-body align-items-center">
|
||||
<RouterLink to="/" class="navbar-brand mb-0 h1">
|
||||
<img src="/img/Logo-2-Rounded-512x512.png" alt="WGDashboard Logo" style="width: 32px">
|
||||
</RouterLink>
|
||||
<a role="button" class="navbarBtn text-body"
|
||||
@click="store.ShowNavBar = !store.ShowNavBar"
|
||||
style="line-height: 0; font-size: 2rem">
|
||||
<Transition name="fade2" mode="out-in">
|
||||
<i class="bi bi-list" v-if="!store.ShowNavBar"></i>
|
||||
<i class="bi bi-x-lg" v-else></i>
|
||||
<div class="h-100 bg-body" :data-bs-theme="store.Configuration?.Server.dashboard_theme">
|
||||
<div style="z-index: 9999; height: 5px" class="position-absolute loadingBar top-0 start-0"></div>
|
||||
<nav class="navbar bg-dark sticky-top" data-bs-theme="dark" v-if="!route.meta.hideTopNav">
|
||||
<div class="container-fluid d-flex text-body align-items-center">
|
||||
<RouterLink to="/" class="navbar-brand mb-0 h1">
|
||||
<img src="/img/Logo-2-Rounded-512x512.png" alt="WGDashboard Logo" style="width: 32px">
|
||||
</RouterLink>
|
||||
<a role="button" class="navbarBtn text-body"
|
||||
@click="store.ShowNavBar = !store.ShowNavBar"
|
||||
style="line-height: 0; font-size: 2rem">
|
||||
<Transition name="fade2" mode="out-in">
|
||||
<i class="bi bi-list" v-if="!store.ShowNavBar"></i>
|
||||
<i class="bi bi-x-lg" v-else></i>
|
||||
</Transition>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
<Suspense>
|
||||
<RouterView v-slot="{ Component }">
|
||||
<Transition name="app" mode="out-in" type="transition" appear>
|
||||
<Component :is="Component"></Component>
|
||||
</Transition>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
<Suspense>
|
||||
<RouterView v-slot="{ Component }">
|
||||
<Transition name="app" mode="out-in" type="transition" appear>
|
||||
<Component :is="Component"></Component>
|
||||
</Transition>
|
||||
</RouterView>
|
||||
</Suspense>
|
||||
</RouterView>
|
||||
</Suspense>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
|
@ -52,7 +64,8 @@ const route = useRoute()
|
|||
.app-enter-from,
|
||||
.app-leave-to{
|
||||
opacity: 0;
|
||||
transform: scale(1.1);
|
||||
transform: scale(1.05);
|
||||
filter: blur(8px);
|
||||
}
|
||||
@media screen and (min-width: 768px) {
|
||||
.navbar{
|
||||
|
|
|
|||
|
|
@ -0,0 +1,96 @@
|
|||
<script setup lang="ts">
|
||||
import {computed, ref} from "vue";
|
||||
import {DashboardClientAssignmentStore} from "@/stores/DashboardClientAssignmentStore.js";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
|
||||
const props = defineProps(['configuration', 'peers', 'clientAssignedPeers', 'availablePeerSearchString'])
|
||||
const emits = defineEmits(['assign', 'unassign'])
|
||||
const assignmentStore = DashboardClientAssignmentStore()
|
||||
const available = computed(() => {
|
||||
if (props.clientAssignedPeers){
|
||||
if (Object.keys(props.clientAssignedPeers).includes(props.configuration)){
|
||||
return props.peers.filter(
|
||||
x => {
|
||||
return !props.clientAssignedPeers[props.configuration].map(
|
||||
x => x.id
|
||||
).includes(x.id) &&
|
||||
(!props.availablePeerSearchString ||
|
||||
(props.availablePeerSearchString &&
|
||||
(x.id.includes(props.availablePeerSearchString) || x.name.includes(props.availablePeerSearchString))))
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
return props.availablePeerSearchString ? props.peers.filter(
|
||||
x => x.id.includes(props.availablePeerSearchString) || x.name.includes(props.availablePeerSearchString)
|
||||
) : props.peers
|
||||
})
|
||||
const confirmDelete = ref(false)
|
||||
const collapse = ref(false)
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="card rounded-0 border-0">
|
||||
<div
|
||||
@click="collapse = !collapse"
|
||||
role="button"
|
||||
class="card-header rounded-0 sticky-top z-5 bg-body-secondary border-0 border-bottom text-white d-flex">
|
||||
<small><samp>{{ configuration }}</samp></small>
|
||||
<a role="button" class="ms-auto text-white" >
|
||||
<i class="bi bi-chevron-compact-down" v-if="collapse"></i>
|
||||
<i class="bi bi-chevron-compact-up" v-else></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="card-body p-0" v-if="!collapse">
|
||||
<div class="list-group list-group-flush" >
|
||||
<div
|
||||
class="list-group-item d-flex border-bottom list-group-item-action d-flex align-items-center gap-3"
|
||||
:key="peer.id"
|
||||
v-for="peer in available" >
|
||||
<div v-if="!confirmDelete">
|
||||
<small class="text-body">
|
||||
<RouterLink
|
||||
class="text-decoration-none"
|
||||
target="_blank"
|
||||
:to="'/configuration/' + configuration +'/peers?id=' + encodeURIComponent(peer.id)">
|
||||
<samp>{{ peer.id }}</samp>
|
||||
</RouterLink>
|
||||
</small><br>
|
||||
<small class="text-muted">
|
||||
{{ peer.name ? peer.name : 'Untitled Peer'}}
|
||||
</small>
|
||||
</div>
|
||||
<div v-else>
|
||||
<small class="text-body">
|
||||
<LocaleText t="Are you sure to remove this peer?"></LocaleText>
|
||||
</small><br>
|
||||
<small class="text-muted">
|
||||
<samp>{{ peer.id }}</samp>
|
||||
</small>
|
||||
</div>
|
||||
<template v-if="clientAssignedPeers">
|
||||
<button
|
||||
@click="emits('assign', peer.id)"
|
||||
:class="{disabled: assignmentStore.assigning}"
|
||||
class="btn bg-success-subtle text-success-emphasis ms-auto">
|
||||
<i class="bi bi-plus-circle-fill" ></i>
|
||||
</button>
|
||||
</template>
|
||||
<button
|
||||
v-else
|
||||
@click="emits('unassign', peer.assignment_id)"
|
||||
:class="{disabled: assignmentStore.unassigning}"
|
||||
aria-label="Delete Assignment"
|
||||
class="btn bg-danger-subtle text-danger-emphasis ms-auto">
|
||||
<i class="bi bi-trash-fill"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
<script setup lang="ts" async>
|
||||
import {onMounted, ref, watch, watchEffect} from "vue";
|
||||
import { fetchGet } from "@/utilities/fetch.js"
|
||||
import {DashboardClientAssignmentStore} from "@/stores/DashboardClientAssignmentStore.js";
|
||||
import AvailablePeersGroup from "@/components/clientComponents/availablePeersGroup.vue";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
const props = defineProps(['client', 'clientAssignedPeers'])
|
||||
const loading = ref(false)
|
||||
const assignmentStore = DashboardClientAssignmentStore()
|
||||
const manage = ref(false)
|
||||
const emits = defineEmits(['refresh'])
|
||||
|
||||
const assign = async (ConfigurationName, Peer, ClientID) => {
|
||||
await assignmentStore.assignClient(ConfigurationName, Peer, ClientID, false)
|
||||
emits('refresh')
|
||||
}
|
||||
|
||||
const unassign = async (AssignmentID) => {
|
||||
await assignmentStore.unassignClient(undefined, undefined, AssignmentID)
|
||||
emits('refresh')
|
||||
}
|
||||
|
||||
const availablePeerSearchString = ref("")
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<div class="d-flex rounded-0 border-0 flex-column d-flex flex-column border-bottom pb-1" v-if="!loading">
|
||||
<div class="d-flex flex-column p-3 gap-3">
|
||||
<div class="d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Assigned Peers"></LocaleText>
|
||||
<span class="text-bg-primary badge ms-2">
|
||||
{{ Object.keys(clientAssignedPeers).length }} <LocaleText :t="Object.keys(clientAssignedPeers).length > 1 ? 'Configurations' : 'Configuration'"></LocaleText>
|
||||
</span>
|
||||
<span class="text-bg-info badge ms-2">
|
||||
{{ Object.values(clientAssignedPeers).flat().length }} <LocaleText :t="Object.values(clientAssignedPeers).flat().length > 1 ? 'Peers' : 'Peer'"></LocaleText>
|
||||
</span>
|
||||
</h6>
|
||||
<button class="btn btn-sm bg-primary-subtle text-primary-emphasis rounded-3 ms-auto"
|
||||
@click="manage = !manage">
|
||||
<template v-if="!manage">
|
||||
<i class="bi bi-list-check me-2"></i>
|
||||
<LocaleText t="Manage"></LocaleText>
|
||||
</template>
|
||||
<template v-else>
|
||||
<i class="bi bi-check me-2"></i>
|
||||
<LocaleText t="Done"></LocaleText>
|
||||
</template>
|
||||
</button>
|
||||
</div>
|
||||
<div class="rounded-3 availablePeers border h-100 overflow-scroll flex-grow-1 d-flex flex-column">
|
||||
<AvailablePeersGroup
|
||||
:configuration="configuration"
|
||||
:peers="peers"
|
||||
@unassign="async (id) => await unassign(id)"
|
||||
v-for="(peers, configuration) in clientAssignedPeers">
|
||||
</AvailablePeersGroup>
|
||||
<h6 class="text-muted m-auto p-3" v-if="Object.keys(clientAssignedPeers).length === 0">
|
||||
<LocaleText t="No peer assigned to this client"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
</div>
|
||||
<div style="height: 500px" class="d-flex flex-column p-3" v-if="manage">
|
||||
<div class="availablePeers border h-100 card rounded-3">
|
||||
<div class="card-header sticky-top p-3">
|
||||
<h6 class="mb-0 d-flex align-items-center">
|
||||
<LocaleText t="Available Peers"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
<div class="card-body p-0 overflow-scroll">
|
||||
<AvailablePeersGroup
|
||||
:availablePeerSearchString="availablePeerSearchString"
|
||||
:configuration="configuration"
|
||||
:clientAssignedPeers="clientAssignedPeers"
|
||||
:peers="peers"
|
||||
:key="configuration"
|
||||
@assign="async (id) => await assign(configuration, id, props.client.ClientID)"
|
||||
v-for="(peers, configuration) in assignmentStore.allConfigurationsPeers">
|
||||
</AvailablePeersGroup>
|
||||
<h6 class="text-muted m-auto" v-if="Object.keys(assignmentStore.allConfigurationsPeers).length === 0">
|
||||
<LocaleText t="No peer is available to assign"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
<div class="card-footer d-flex gap-2 p-3 align-items-center justify-content-end">
|
||||
<label for="availablePeerSearchString">
|
||||
<i class="bi bi-search me-2"></i>
|
||||
</label>
|
||||
<input
|
||||
id="availablePeerSearchString"
|
||||
v-model="availablePeerSearchString"
|
||||
class="form-control form-control-sm rounded-3 w-auto" type="text">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else>
|
||||
<div class="p-3 placeholder-glow border-bottom">
|
||||
<h6 class="placeholder w-100 rounded-3"></h6>
|
||||
<div class="placeholder w-100 rounded-3" style="height: 400px"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
<script setup lang="ts">
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import { fetchPost } from "@/utilities/fetch"
|
||||
import {ref} from "vue";
|
||||
import { DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore.js"
|
||||
|
||||
const props = defineProps(['client'])
|
||||
const deleting = ref(false)
|
||||
const confirmDelete = ref(false)
|
||||
const emits = defineEmits(['refresh'])
|
||||
const dashboardConfigurationStore = DashboardConfigurationStore()
|
||||
const deleteClient = async () => {
|
||||
deleting.value = true
|
||||
await fetchPost("/api/clients/deleteClient", {
|
||||
ClientID: props.client.ClientID
|
||||
}, (res) => {
|
||||
deleting.value = false
|
||||
if (res.status){
|
||||
emits("deleteSuccess")
|
||||
dashboardConfigurationStore.newMessage("Server", "Delete client successfully", "success")
|
||||
}else {
|
||||
dashboardConfigurationStore.newMessage("Server", "Failed to delete client", "danger")
|
||||
}
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="p-3 d-flex gap-3 flex-column border-bottom">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Delete Client" v-if="!confirmDelete"></LocaleText>
|
||||
<LocaleText t="Are you sure to delete this client?" v-else></LocaleText>
|
||||
</h6>
|
||||
<button class="btn btn-sm bg-danger-subtle text-danger-emphasis rounded-3 ms-auto"
|
||||
v-if="!confirmDelete"
|
||||
@click="confirmDelete = true"
|
||||
>
|
||||
<i class="bi bi-trash-fill me-2"></i>
|
||||
<LocaleText t="Delete"></LocaleText>
|
||||
</button>
|
||||
|
||||
<template v-if="confirmDelete">
|
||||
<button
|
||||
@click="deleteClient"
|
||||
class="btn btn-sm bg-danger-subtle text-danger-emphasis rounded-3 ms-auto">
|
||||
<i class="bi bi-trash-fill me-2"></i>
|
||||
<LocaleText t="Yes"></LocaleText>
|
||||
</button>
|
||||
<button class="btn btn-sm bg-secondary-subtle text-secondary-emphasis rounded-3"
|
||||
v-if="confirmDelete" @click="confirmDelete = false">
|
||||
<i class="bi bi-x-lg me-2"></i>
|
||||
<LocaleText t="No"></LocaleText>
|
||||
</button>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
<script setup lang="ts">
|
||||
import {computed, onMounted} from "vue";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
const props = defineProps(['groupName', 'clients', 'searchString'])
|
||||
|
||||
const getClients = computed(() => {
|
||||
const s = props.searchString.toLowerCase()
|
||||
if (!props.searchString){
|
||||
return props.clients
|
||||
}
|
||||
return props.clients.filter(
|
||||
x =>
|
||||
(x.ClientID && x.ClientID.toLowerCase().includes(s)) ||
|
||||
(x.Email && x.Email.toLowerCase().includes(s) ||
|
||||
(x.Name && x.Name.toLowerCase().includes(s)))
|
||||
)
|
||||
})
|
||||
const route = useRoute()
|
||||
onMounted(() => {
|
||||
document.querySelector(".clientList .active")?.scrollIntoView()
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="card rounded-0 border-0">
|
||||
<div class="card-header d-flex align-items-center rounded-0">
|
||||
<h6 class="my-2">{{ groupName }}</h6>
|
||||
<span class="badge text-bg-primary ms-auto">
|
||||
<LocaleText :t="getClients.length + ' Client' + (getClients.length > 1 ? 's': '')"></LocaleText>
|
||||
</span>
|
||||
</div>
|
||||
<div class="card-body p-0">
|
||||
<div class="list-group list-group-flush clientList">
|
||||
<RouterLink
|
||||
:key="client.ClientID"
|
||||
:id="'client_' + client.ClientID"
|
||||
active-class="active"
|
||||
:to="{ name: 'Client Viewer', params: { id: client.ClientID } }"
|
||||
class="list-group-item d-flex flex-column border-bottom list-group-item-action client"
|
||||
v-for="client in getClients" >
|
||||
<small class="text-body">
|
||||
{{ client.Email }}
|
||||
</small>
|
||||
<small class="text-muted">
|
||||
{{ client.Name ? client.Name : 'No Name'}}
|
||||
</small>
|
||||
</RouterLink>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
<script setup lang="ts">
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
<script setup lang="ts">
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import { fetchGet, fetchPost } from "@/utilities/fetch.js"
|
||||
import {ref} from "vue";
|
||||
import {DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore.js"
|
||||
import {useRouter} from "vue-router";
|
||||
const props = defineProps(['client'])
|
||||
|
||||
|
||||
const alert = ref(false)
|
||||
const alertStatus = ref(false)
|
||||
const alertMessage = ref(false)
|
||||
const resetting = ref(false)
|
||||
const store = DashboardConfigurationStore();
|
||||
const router = useRouter()
|
||||
|
||||
const getUrl = (token) => {
|
||||
const crossServer = store.getActiveCrossServer();
|
||||
if(crossServer){
|
||||
return new URL('/client/#/reset_password?token=' + token, crossServer.host).href
|
||||
}
|
||||
return new URL('/client/#/reset_password?token=' + token, window.location.href).href
|
||||
}
|
||||
|
||||
const sendResetLink = async () => {
|
||||
resetting.value = true
|
||||
let smtpReady = false;
|
||||
let token = undefined;
|
||||
await fetchPost('/api/clients/generatePasswordResetLink', {
|
||||
ClientID: props.client.ClientID
|
||||
},async (res) => {
|
||||
if (res.status){
|
||||
token = res.data
|
||||
alertStatus.value = true
|
||||
await fetchGet('/api/email/ready', {}, (res) => {
|
||||
smtpReady = res.status
|
||||
});
|
||||
if (smtpReady){
|
||||
let body = {
|
||||
"Receiver": props.client.Email,
|
||||
"Subject": "[WGDashboard | Client] Reset Password",
|
||||
"Body":
|
||||
`Hi${props.client.Name ? ' ' + props.client.Name: ''},\n\nWe received a request to reset the password for your account. You can reset your password by visiting the link below:\n\n${getUrl(token)}\n\nThis link will expire in 30 minutes for your security. If you didn’t request a password reset, you can safely ignore this email—your current password will remain unchanged.\n\nIf you need help, feel free to contact support.\n\nBest regards,\nWGDashboard`
|
||||
}
|
||||
await fetchPost('/api/email/send', body, (res) => {
|
||||
if (res.status){
|
||||
alertMessage.value = `Send email success.`
|
||||
alert.value = true;
|
||||
}else{
|
||||
alertMessage.value = `Send email failed.`
|
||||
alertStatus.value = false;
|
||||
alert.value = true;
|
||||
}
|
||||
});
|
||||
}else{
|
||||
alertMessage.value = `Please share this URL to your client to reset the password: ${getUrl(token)}`
|
||||
alert.value = true;
|
||||
|
||||
}
|
||||
}else{
|
||||
alertStatus.value = false
|
||||
alertMessage.value = res.message
|
||||
alert.value = true
|
||||
}
|
||||
})
|
||||
resetting.value = false;
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="p-3 d-flex gap-3 flex-column border-bottom">
|
||||
<div class="d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Reset Password"></LocaleText>
|
||||
</h6>
|
||||
<button class="btn btn-sm bg-primary-subtle text-primary-emphasis rounded-3 ms-auto"
|
||||
@click="sendResetLink()"
|
||||
:class="{disabled: resetting}"
|
||||
>
|
||||
<i class="bi bi-send me-2"></i>
|
||||
<LocaleText t="Send Password Reset Link" v-if="!resetting"></LocaleText>
|
||||
<LocaleText t="Sending..." v-else></LocaleText>
|
||||
</button>
|
||||
</div>
|
||||
<div class="alert rounded-3 mb-0"
|
||||
:class="[alertStatus ? 'alert-success' : 'alert-danger']"
|
||||
v-if="alert">
|
||||
{{ alertMessage }}
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
<script setup lang="ts">
|
||||
import {ref} from "vue"
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import { fetchGet } from "@/utilities/fetch.js"
|
||||
import { DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore"
|
||||
|
||||
const props = defineProps(['mode'])
|
||||
const dashboardConfigurationStore = DashboardConfigurationStore()
|
||||
const oidcStatus = ref(false)
|
||||
const oidcStatusLoading = ref(false)
|
||||
|
||||
const getStatus = async () => {
|
||||
await fetchGet("/api/oidc/status", {
|
||||
mode: props.mode
|
||||
}, (res) => {
|
||||
oidcStatus.value = res.data
|
||||
oidcStatusLoading.value = false
|
||||
})
|
||||
}
|
||||
await getStatus()
|
||||
const toggle = async () => {
|
||||
oidcStatusLoading.value = true
|
||||
await fetchGet('/api/oidc/toggle', {
|
||||
mode: props.mode
|
||||
}, (res) => {
|
||||
if (!res.status){
|
||||
oidcStatus.value = !oidcStatus.value
|
||||
dashboardConfigurationStore.newMessage("Server", res.message, "danger")
|
||||
}
|
||||
oidcStatusLoading.value = false
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="d-flex flex-column gap-2">
|
||||
<div class="d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="OpenID Connect (OIDC)"></LocaleText>
|
||||
</h6>
|
||||
<div class="form-check form-switch ms-auto">
|
||||
<label class="form-check-label" for="oidc_switch">
|
||||
<LocaleText :t="oidcStatus ? 'Enabled':'Disabled'"></LocaleText>
|
||||
</label>
|
||||
<input
|
||||
:disabled="oidcStatusLoading"
|
||||
v-model="oidcStatus"
|
||||
@change="toggle()"
|
||||
class="form-check-input" type="checkbox" role="switch" id="oidc_switch">
|
||||
</div>
|
||||
</div>
|
||||
<!-- <div>-->
|
||||
<!-- <div class="alert alert-dark rounded-3 mb-0">-->
|
||||
<!-- <LocaleText t="Due to security reason, in order to edit OIDC configuration, you will need to edit "></LocaleText>-->
|
||||
<!-- <code>wg-dashboard-oidc-providers.json</code> <LocaleText t="directly, then restart WGDashboard to apply the latest settings."></LocaleText>-->
|
||||
<!-- </div>-->
|
||||
<!-- </div>-->
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
<script setup lang="ts">
|
||||
import { ref, reactive } from "vue"
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import OidcSettings from "@/components/clientComponents/clientSettingComponents/oidcSettings.vue";
|
||||
import { fetchGet } from "@/utilities/fetch.js"
|
||||
const emits = defineEmits(['close'])
|
||||
import { DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore"
|
||||
const dashboardConfigurationStore = DashboardConfigurationStore()
|
||||
const loading = ref(false)
|
||||
const values = reactive({
|
||||
enableClients: dashboardConfigurationStore.Configuration.Clients.enable
|
||||
})
|
||||
|
||||
const toggling = ref(false)
|
||||
const toggleClientSideApp = async () => {
|
||||
toggling.value = true
|
||||
await fetchGet("/api/clients/toggleStatus", {}, (res) => {
|
||||
values.enableClients = res.data
|
||||
})
|
||||
toggling.value = false
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="position-absolute w-100 h-100 top-0 start-0 z-1 rounded-3 d-flex p-2" style="background-color: #00000070; z-index: 9999">
|
||||
<div class="card m-auto rounded-3" style="width: 700px">
|
||||
<div class="card-header bg-transparent d-flex align-items-center gap-2 border-0 p-4 pb-2">
|
||||
<h4 class="mb-0">
|
||||
<LocaleText t="Clients Settings"></LocaleText>
|
||||
</h4>
|
||||
<button type="button" class="btn-close ms-auto" @click="emits('close')"></button>
|
||||
</div>
|
||||
<div class="card-body px-4 d-flex gap-3 flex-column">
|
||||
<div class="d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Client Side App"></LocaleText>
|
||||
</h6>
|
||||
<div class="form-check form-switch ms-auto">
|
||||
<label class="form-check-label" for="oidc_switch">
|
||||
<LocaleText :t="values.enableClients ? 'Enabled':'Disabled'"></LocaleText>
|
||||
</label>
|
||||
<input
|
||||
:disabled="oidcStatusLoading"
|
||||
v-model="values.enableClients"
|
||||
@change="toggleClientSideApp()"
|
||||
class="form-check-input" type="checkbox" role="switch" id="oidc_switch">
|
||||
</div>
|
||||
</div>
|
||||
<OidcSettings mode="Client"></OidcSettings>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
<script setup lang="ts" async>
|
||||
import {useRoute, useRouter} from "vue-router";
|
||||
import { fetchGet, fetchPost } from "@/utilities/fetch.js"
|
||||
|
||||
|
||||
import {DashboardClientAssignmentStore} from "@/stores/DashboardClientAssignmentStore.js";
|
||||
import { DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore.js"
|
||||
|
||||
import {computed, reactive, ref, watch} from "vue";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import ClientAssignedPeers from "@/components/clientComponents/clientAssignedPeers.vue";
|
||||
import ClientResetPassword from "@/components/clientComponents/clientResetPassword.vue";
|
||||
import ClientDelete from "@/components/clientComponents/clientDelete.vue";
|
||||
const assignmentStore = DashboardClientAssignmentStore()
|
||||
const dashboardConfigurationStore = DashboardConfigurationStore()
|
||||
|
||||
const route = useRoute()
|
||||
const router = useRouter()
|
||||
const client = computed(() => {
|
||||
return assignmentStore.getClientById(route.params.id)
|
||||
})
|
||||
const clientAssignedPeers = ref({})
|
||||
const getAssignedPeers = async () => {
|
||||
await fetchGet('/api/clients/assignedPeers', {
|
||||
ClientID: client.value.ClientID
|
||||
}, (res) => {
|
||||
clientAssignedPeers.value = res.data;
|
||||
})
|
||||
}
|
||||
const emits = defineEmits(['deleteSuccess'])
|
||||
|
||||
const clientProfile = reactive({
|
||||
Name: undefined
|
||||
})
|
||||
|
||||
if (client.value){
|
||||
watch(() => client.value.ClientID, async () => {
|
||||
clientProfile.Name = client.value.Name;
|
||||
await getAssignedPeers()
|
||||
})
|
||||
await getAssignedPeers()
|
||||
clientProfile.Name = client.value.Name
|
||||
}else{
|
||||
router.push('/clients')
|
||||
dashboardConfigurationStore.newMessage("WGDashboard", "Client does not exist", "danger")
|
||||
}
|
||||
|
||||
|
||||
|
||||
const updatingProfile = ref(false)
|
||||
const updateProfile = async () => {
|
||||
updatingProfile.value = true
|
||||
await fetchPost("/api/clients/updateProfileName", {
|
||||
ClientID: client.value.ClientID,
|
||||
Name: clientProfile.Name
|
||||
}, (res) => {
|
||||
if (res.status){
|
||||
client.value.Name = clientProfile.Name;
|
||||
dashboardConfigurationStore.newMessage("Server", "Client name update success", "success")
|
||||
}else{
|
||||
clientProfile.Name = client.value.Name;
|
||||
dashboardConfigurationStore.newMessage("Server", "Client name update failed", "danger")
|
||||
}
|
||||
updatingProfile.value = false
|
||||
})
|
||||
}
|
||||
const deleteSuccess = async () => {
|
||||
await router.push('/clients')
|
||||
await assignmentStore.getClients()
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="text-body d-flex flex-column overflow-y-scroll h-100" v-if="client" :key="client.ClientID">
|
||||
<div class="p-4 border-bottom bg-body-tertiary z-0">
|
||||
<div class="mb-3 backLink">
|
||||
<RouterLink to="/clients" class="text-body text-decoration-none">
|
||||
<i class="bi bi-arrow-left me-2"></i>
|
||||
Back</RouterLink>
|
||||
</div>
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Email"></LocaleText>
|
||||
</small>
|
||||
<h1>
|
||||
{{ client.Email }}
|
||||
</h1>
|
||||
<div class="d-flex flex-column gap-2">
|
||||
<div class="d-flex align-items-center">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Client ID"></LocaleText>
|
||||
</small>
|
||||
<small class="ms-auto">
|
||||
<samp>{{ client.ClientID }}</samp>
|
||||
</small>
|
||||
</div>
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Client Name"></LocaleText>
|
||||
</small>
|
||||
<input class="form-control form-control-sm rounded-3 ms-auto"
|
||||
style="width: 300px"
|
||||
type="text" v-model="clientProfile.Name">
|
||||
<button
|
||||
@click="updateProfile()"
|
||||
aria-label="Save Client Name"
|
||||
class="btn btn-sm rounded-3 bg-success-subtle border-success-subtle text-success-emphasis">
|
||||
<i class="bi bi-save-fill"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style="flex: 1 0 0; overflow-y: scroll;">
|
||||
<ClientAssignedPeers
|
||||
@refresh="getAssignedPeers()"
|
||||
:clientAssignedPeers="clientAssignedPeers"
|
||||
:client="client"></ClientAssignedPeers>
|
||||
<!-- <ClientResetPassword-->
|
||||
<!-- :client="client" v-if="client.ClientGroup === 'Local'"></ClientResetPassword>-->
|
||||
<ClientDelete
|
||||
@deleteSuccess="deleteSuccess()"
|
||||
:client="client"></ClientDelete>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else class="d-flex w-100 h-100 text-muted">
|
||||
<div class="m-auto text-center">
|
||||
<h1>
|
||||
<i class="bi bi-person-x"></i>
|
||||
</h1>
|
||||
<p>
|
||||
<LocaleText t="Client does not exist"></LocaleText>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
@media screen and (min-width: 576px) {
|
||||
.backLink{
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
<script setup>
|
||||
import dayjs from "dayjs";
|
||||
import {computed, ref} from "vue";
|
||||
import {fetchGet, fetchPost} from "@/utilities/fetch.js";
|
||||
import {fetchGet, fetchPost, getUrl} from "@/utilities/fetch.js";
|
||||
import {useRoute} from "vue-router";
|
||||
import {DashboardConfigurationStore} from "@/stores/DashboardConfigurationStore.js";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
|
|
@ -51,7 +51,7 @@ const downloadBackup = () => {
|
|||
backupFileName: props.b.filename
|
||||
}, (res) => {
|
||||
if (res.status){
|
||||
window.open(`/fileDownload?file=${res.data}`, '_blank')
|
||||
window.open(getUrl(`/fileDownload?file=${res.data}`), '_blank')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
<script setup lang="ts">
|
||||
import {ref} from "vue";
|
||||
import { fetchPost } from "@/utilities/fetch.js"
|
||||
|
||||
const props = defineProps(['configuration'])
|
||||
const description = ref(props.configuration.Info.Description)
|
||||
const showStatus = ref(false)
|
||||
const status = ref(false)
|
||||
|
||||
const updateDescription = async () => {
|
||||
await fetchPost("/api/updateWireguardConfigurationInfo", {
|
||||
Name: props.configuration.Name,
|
||||
Key: "Description",
|
||||
Value: description.value
|
||||
}, (res) => {
|
||||
status.value = res.status
|
||||
toggleStatus()
|
||||
})
|
||||
}
|
||||
|
||||
const toggleSuccess = () => {
|
||||
status.value = true
|
||||
toggleStatus()
|
||||
}
|
||||
|
||||
const toggleFail = () => {
|
||||
status.value = false
|
||||
toggleStatus()
|
||||
}
|
||||
|
||||
const toggleStatus = () => {
|
||||
showStatus.value = true
|
||||
setTimeout(() => {
|
||||
showStatus.value = false
|
||||
}, 3000)
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="d-flex gap-1 flex-column">
|
||||
<label for="configurationDescription">
|
||||
<small style="white-space: nowrap" class="text-muted">
|
||||
<i class="bi bi-pencil-fill me-2"></i>Notes
|
||||
</small>
|
||||
</label>
|
||||
<input type="text"
|
||||
:class="[showStatus ? [status ? 'is-valid':'is-invalid'] : undefined]"
|
||||
id="configurationDescription"
|
||||
v-model="description"
|
||||
@change="updateDescription()"
|
||||
class="form-control rounded-3 bg-transparent form-control-sm">
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
<script setup>
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import {onMounted, reactive, ref, useTemplateRef, watch} from "vue";
|
||||
import {reactive, ref, watch} from "vue";
|
||||
import {WireguardConfigurationsStore} from "@/stores/WireguardConfigurationsStore.js";
|
||||
import {fetchPost} from "@/utilities/fetch.js";
|
||||
import {DashboardConfigurationStore} from "@/stores/DashboardConfigurationStore.js";
|
||||
|
|
@ -10,6 +10,8 @@ import EditRawConfigurationFile
|
|||
from "@/components/configurationComponents/editConfigurationComponents/editRawConfigurationFile.vue";
|
||||
import DeleteConfiguration from "@/components/configurationComponents/deleteConfiguration.vue";
|
||||
import ConfigurationBackupRestore from "@/components/configurationComponents/configurationBackupRestore.vue";
|
||||
import EditPeerSettingsOverride
|
||||
from "@/components/configurationComponents/editConfigurationComponents/editPeerSettingsOverride.vue";
|
||||
const props = defineProps({
|
||||
configurationInfo: Object
|
||||
})
|
||||
|
|
@ -114,7 +116,6 @@ const deleteConfigurationModal = ref(false)
|
|||
@close="updateConfigurationName = false"
|
||||
:configuration-name="data.Name"
|
||||
v-if="updateConfigurationName"></UpdateConfigurationName>
|
||||
|
||||
<template v-else>
|
||||
<hr>
|
||||
<div class="d-flex align-items-center gap-3">
|
||||
|
|
@ -211,18 +212,15 @@ const deleteConfigurationModal = ref(false)
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<div class="d-flex align-items-center gap-2 mt-4">
|
||||
<button class="btn bg-secondary-subtle border-secondary-subtle text-secondary-emphasis rounded-3 shadow ms-auto"
|
||||
<div class="d-flex align-items-center gap-2 mt-1">
|
||||
<button class="btn btn-sm bg-secondary-subtle border-secondary-subtle text-secondary-emphasis rounded-3 shadow ms-auto"
|
||||
@click="resetForm()"
|
||||
:disabled="!dataChanged || saving">
|
||||
<i class="bi bi-arrow-clockwise me-2"></i>
|
||||
<LocaleText t="Reset"></LocaleText>
|
||||
</button>
|
||||
<button class="btn bg-primary-subtle border-primary-subtle text-primary-emphasis rounded-3 shadow"
|
||||
<button class="btn btn-sm bg-primary-subtle border-primary-subtle text-primary-emphasis rounded-3 shadow"
|
||||
:disabled="!dataChanged || saving"
|
||||
@click="saveForm()"
|
||||
>
|
||||
|
|
@ -231,6 +229,8 @@ const deleteConfigurationModal = ref(false)
|
|||
</button>
|
||||
</div>
|
||||
<hr>
|
||||
<EditPeerSettingsOverride :configuration="configurationInfo"></EditPeerSettingsOverride>
|
||||
<hr>
|
||||
<h5 class="mb-3">
|
||||
<LocaleText t="Danger Zone"></LocaleText>
|
||||
</h5>
|
||||
|
|
@ -255,7 +255,6 @@ const deleteConfigurationModal = ref(false)
|
|||
<LocaleText t="Delete Configuration"></LocaleText>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,152 @@
|
|||
<script setup lang="ts">
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import { fetchPost } from "@/utilities/fetch.js"
|
||||
import {onMounted, reactive, ref} from "vue";
|
||||
const props = defineProps(['configuration'])
|
||||
const saving = ref(false)
|
||||
const overridePeerSettings = ref({...props.configuration.Info.OverridePeerSettings})
|
||||
const edited = ref(false)
|
||||
const errorMsg = ref("")
|
||||
|
||||
onMounted(() => {
|
||||
document.querySelectorAll("#editPeerSettingsOverride input").forEach(
|
||||
x => x.addEventListener("change", () => {
|
||||
edited.value = true
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
const resetForm = () => {
|
||||
overridePeerSettings.value = props.configuration.Info.OverridePeerSettings
|
||||
edited.value = false
|
||||
}
|
||||
|
||||
const submitForm = async () => {
|
||||
document.querySelectorAll("#editPeerSettingsOverride input").forEach(
|
||||
x => x.classList.remove("is-invalid", "is-valid")
|
||||
)
|
||||
await fetchPost("/api/updateWireguardConfigurationInfo", {
|
||||
Name: props.configuration.Name,
|
||||
Key: "OverridePeerSettings",
|
||||
Value: overridePeerSettings.value
|
||||
}, (res) => {
|
||||
if (res.status){
|
||||
edited.value = false
|
||||
props.configuration.Info.OverridePeerSettings = overridePeerSettings.value
|
||||
document.querySelectorAll("#editPeerSettingsOverride input").forEach(
|
||||
x => x.classList.add("is-valid")
|
||||
)
|
||||
}else{
|
||||
errorMsg.value = res.message
|
||||
document.querySelector(`#override_${res.data}`).classList.add("is-invalid")
|
||||
}
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div id="editPeerSettingsOverride">
|
||||
<h5 class="mb-0">
|
||||
<LocaleText t="Override Peer Settings"></LocaleText>
|
||||
</h5>
|
||||
<h6 class="mb-3 text-muted">
|
||||
<small>
|
||||
<LocaleText t="Only apply to peers in this configuration"></LocaleText>
|
||||
</small>
|
||||
</h6>
|
||||
<div class="d-flex gap-2 flex-column">
|
||||
<div>
|
||||
<label for="override_DNS" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="DNS"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text" class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.DNS"
|
||||
id="override_DNS">
|
||||
<div class="invalid-feedback">{{ errorMsg }}</div>
|
||||
</div>
|
||||
<div>
|
||||
<label for="override_EndpointAllowedIPs" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Endpoint Allowed IPs"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text" class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.EndpointAllowedIPs"
|
||||
id="override_EndpointAllowedIPs">
|
||||
<div class="invalid-feedback">{{ errorMsg }}</div>
|
||||
</div>
|
||||
<div>
|
||||
<label for="override_ListenPort" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Listen Port"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text" class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.ListenPort"
|
||||
id="override_ListenPort">
|
||||
<div class="invalid-feedback">{{ errorMsg }}</div>
|
||||
</div>
|
||||
<div>
|
||||
<label for="override_MTU" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="MTU"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text"
|
||||
class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.MTU"
|
||||
id="override_MTU">
|
||||
<div class="invalid-feedback">{{ errorMsg }}</div>
|
||||
</div>
|
||||
<div>
|
||||
<label for="override_PeerRemoteEndpoint" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Peer Remote Endpoint"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text" class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.PeerRemoteEndpoint"
|
||||
id="override_PeerRemoteEndpoint">
|
||||
</div>
|
||||
<div>
|
||||
<label for="override_persistent_keepalive" class="form-label">
|
||||
<small class="text-muted">
|
||||
<LocaleText t="Persistent Keepalive"></LocaleText>
|
||||
</small>
|
||||
</label>
|
||||
<input type="text" class="form-control form-control-sm rounded-3"
|
||||
:disabled="saving"
|
||||
v-model="overridePeerSettings.PersistentKeepalive"
|
||||
id="override_PersistentKeepalive">
|
||||
<div class="invalid-feedback">{{ errorMsg }}</div>
|
||||
</div>
|
||||
<div class="d-flex mt-1 gap-2">
|
||||
<button
|
||||
:class="{disabled: !edited}"
|
||||
@click="resetForm()"
|
||||
class="btn btn-sm bg-secondary-subtle border-secondary-subtle text-secondary-emphasis rounded-3 shadow ms-auto">
|
||||
<i class="bi bi-arrow-clockwise me-2"></i>
|
||||
<LocaleText t="Reset"></LocaleText>
|
||||
</button>
|
||||
<button
|
||||
:class="{disabled: !edited}"
|
||||
@click="submitForm()"
|
||||
class="btn btn-sm bg-primary-subtle border-primary-subtle text-primary-emphasis rounded-3 shadow">
|
||||
<i class="bi bi-save-fill me-2"></i>
|
||||
<LocaleText t="Save"></LocaleText>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue