mirror of
https://github.com/donaldzou/WGDashboard.git
synced 2025-10-03 15:56:17 +00:00
Compare commits
484 Commits
v4.2.3.0.1
...
docker-ent
Author | SHA1 | Date | |
---|---|---|---|
|
798580be5a | ||
|
602238d794 | ||
|
4d4a15740b | ||
|
524d50ee07 | ||
|
fc591b7fe8 | ||
|
c2f06193d0 | ||
|
f2ead12315 | ||
|
ca8700ac2a | ||
|
10a8d22efd | ||
|
fc3ec61373 | ||
|
094d1c0718 | ||
|
0d814ec03c | ||
|
5ccfe07e12 | ||
|
101ac5e985 | ||
|
113a780eec | ||
|
cf77610a56 | ||
|
84675fe521 | ||
|
5db5b35311 | ||
|
ff345c9609 | ||
|
6cccfec923 | ||
|
8231dd1463 | ||
|
d8ff020d8c | ||
|
238fb91360 | ||
|
9ecc16fcc1 | ||
|
7d9f60cf9b | ||
|
8ee16b4173 | ||
|
74861fa96b | ||
|
442a658487 | ||
|
3706b91b26 | ||
|
48cb54156b | ||
|
f3104c29ea | ||
|
689aee34ec | ||
|
3862ea4d28 | ||
|
a7e0eb52c2 | ||
|
537a88f618 | ||
|
93a5624294 | ||
|
37539990dd | ||
|
5cd8a80118 | ||
|
c74ecc3d75 | ||
|
404bccfb64 | ||
|
335c69f517 | ||
|
0d118f4d32 | ||
|
8f58a06731 | ||
|
4bfcc1abde | ||
|
25f39db690 | ||
|
da5ce7da9e | ||
|
e8126f3630 | ||
|
f842a7cc62 | ||
|
7250422aaa | ||
|
9661ff0b78 | ||
|
562201a342 | ||
|
1ee9f7bc68 | ||
|
18daa74ecd | ||
|
eadae9373f | ||
|
fb645dd84c | ||
|
0cffed3037 | ||
|
3bd5e02118 | ||
|
c4fe81fcbf | ||
|
40976870ee | ||
|
d526deb826 | ||
|
4ea3aa0a58 | ||
|
569ee8ac58 | ||
|
c42776a6d7 | ||
|
22af38579b | ||
|
a9ecf6c850 | ||
|
77112675ae | ||
|
0b054ae668 | ||
|
51ba9a86fa | ||
|
83eeaa0d73 | ||
|
a2316c8641 | ||
|
f231e9214c | ||
|
3673813e6a | ||
|
feb3c38113 | ||
|
73a969a6bf | ||
|
7ac6d6c498 | ||
|
b2f306789c | ||
|
1a26f757a8 | ||
|
1d66cda277 | ||
|
b52dad961b | ||
|
5a84136d87 | ||
|
73d701eb08 | ||
|
636ba5ebc8 | ||
|
627065e793 | ||
|
c9f395acbd | ||
|
919a5e9077 | ||
|
b5986fde82 | ||
|
cf95aded77 | ||
|
694a06ddb6 | ||
|
2eb3a17775 | ||
|
628464d2e1 | ||
|
ccaaa4bd21 | ||
|
1b285537ad | ||
|
eab31ba5d0 | ||
|
acc1233b11 | ||
|
91a3b52a4a | ||
|
b2532e305e | ||
|
06f7e7f74b | ||
|
a517867cdf | ||
|
e1d3ad11cc | ||
|
41dbdc8ccd | ||
|
df98ee8738 | ||
|
3be1cb6702 | ||
|
4644840009 | ||
|
4e75a95a73 | ||
|
92a2e26755 | ||
|
8ebd65cc0b | ||
|
15d51735d2 | ||
|
ee8afbd357 | ||
|
b3889bb1e3 | ||
|
8bbb4a48f7 | ||
|
2dce79bb85 | ||
|
1319c28f90 | ||
|
f95c6beeba | ||
|
6cf1eb6140 | ||
|
4b713ab66e | ||
|
43d055a8b4 | ||
|
28ce4bb1d6 | ||
|
604d53d2f0 | ||
|
62d3332522 | ||
|
9caed31185 | ||
|
030c1bdcba | ||
|
44af7eba11 | ||
|
41975973dc | ||
|
572c223854 | ||
|
8191668d60 | ||
|
632c4f3dc7 | ||
|
caacbfae03 | ||
|
3b3c071402 | ||
|
85fa427134 | ||
|
c3c7e50f08 | ||
|
45524eaee5 | ||
|
48ec4c7f6f | ||
|
593417a1fd | ||
|
e2c9941651 | ||
|
f865317600 | ||
|
c83a075886 | ||
|
56d894a8d1 | ||
|
bba7817c9b | ||
|
f360ef5d2f | ||
|
f6e625c5f8 | ||
|
e26639cdc4 | ||
|
38e0a939c2 | ||
|
2ec190ecfd | ||
|
549982db7f | ||
|
eab2c9d358 | ||
|
c5b72cb6d8 | ||
|
282a83829f | ||
|
20edd7cbcd | ||
|
a6311b4f63 | ||
|
b8792200c6 | ||
|
ba85879151 | ||
|
86bb847374 | ||
|
e61afba608 | ||
|
f418520f66 | ||
|
af3aebe34c | ||
|
8a568d787a | ||
|
0d943cb06f | ||
|
f9e9d32c52 | ||
|
6bf38d4346 | ||
|
2cd59fc310 | ||
|
1dfa1d62e1 | ||
|
854b9d252f | ||
|
0c0cf3a378 | ||
|
b4f29e63b4 | ||
|
afbb3df571 | ||
|
01caca707b | ||
|
8ed5826e6c | ||
|
2c1e36e54d | ||
|
1ed8b9f2d5 | ||
|
0ff8e9ed86 | ||
|
029081c610 | ||
|
63b9d15d34 | ||
|
7dcc3e7589 | ||
|
81e394a436 | ||
|
288068bf70 | ||
|
37546515be | ||
|
c32787ccd3 | ||
|
c9c7084db5 | ||
|
7b99441602 | ||
|
24940886f6 | ||
|
f777ac5f75 | ||
|
a511eb21fc | ||
|
f11a3c8c3b | ||
|
430a6053ef | ||
|
1867db5c99 | ||
|
f661cf0f83 | ||
|
b4814e281f | ||
|
9d4e5d8cb5 | ||
|
88f40b244a | ||
|
3e3047f23e | ||
|
ab4876e066 | ||
|
00d11880e8 | ||
|
c757cee988 | ||
|
ebbb681dd8 | ||
|
feff6ce027 | ||
|
39d01015e5 | ||
|
2aa2b15234 | ||
|
21c6d0b8f9 | ||
|
ffc9176225 | ||
|
d54609cc29 | ||
|
0a87453961 | ||
|
12d9058f1e | ||
|
f55c961e91 | ||
|
fbe4e7dc4c | ||
|
53079497a1 | ||
|
2d08171e7c | ||
|
e879ceb1bc | ||
|
37e2985b9a | ||
|
aaca74874d | ||
|
41c5b4bd64 | ||
|
71e43eb503 | ||
|
b52bb83c67 | ||
|
7322b7cbf0 | ||
|
62ffd97808 | ||
|
a4ee56648e | ||
|
f62e481fa0 | ||
|
fa26fce0cc | ||
|
2fbee4aacc | ||
|
530a7ef393 | ||
|
99cb546b59 | ||
|
084bec0f07 | ||
|
c199413d49 | ||
|
24eada4432 | ||
|
4df4aa07f4 | ||
|
91fd0f0e9a | ||
|
12f6244930 | ||
|
327ecbe34c | ||
|
2ca62293a9 | ||
|
f3cae0b005 | ||
|
8f15d5dcdd | ||
|
c8348f7be8 | ||
|
1839645360 | ||
|
2c73dc1df8 | ||
|
fefabe073f | ||
|
77b156c7f5 | ||
|
5ac84e109d | ||
|
83d105facd | ||
|
b7af06d59d | ||
|
4da32690a9 | ||
|
a49c2a1cc0 | ||
|
f633a9654a | ||
|
0d58a172a9 | ||
|
651784b1d1 | ||
|
68abc7ec1b | ||
|
9745e8b034 | ||
|
d946c108a3 | ||
|
2b66f9a5c4 | ||
|
4d321cf3f6 | ||
|
299d84b16a | ||
|
3d75f6bbbd | ||
|
6b194bba15 | ||
|
e63bccf274 | ||
|
4ca79ac1c9 | ||
|
aafef538f1 | ||
|
68d8546383 | ||
|
c43b3926b8 | ||
|
7e9cfc2872 | ||
|
e88936c05a | ||
|
9c1b4222d0 | ||
|
69ec55b638 | ||
|
67a455c403 | ||
|
5bf4df2d27 | ||
|
7797cc06d0 | ||
|
541d89e170 | ||
|
d775fb69e3 | ||
|
2a1a885056 | ||
|
a334ce1527 | ||
|
447cb5ccdc | ||
|
bca20e5b02 | ||
|
90675dcc2e | ||
|
e8deadaaff | ||
|
ecc4cc7670 | ||
|
4a5de5efd4 | ||
|
bdf557fde3 | ||
|
29600cb54c | ||
|
9f43fd7c92 | ||
|
6a6c1aa527 | ||
|
df7f9f2b14 | ||
|
568da8cc64 | ||
|
e16435f4fc | ||
|
76e9f3fd29 | ||
|
d0e46a517b | ||
|
c94345cb2f | ||
|
e2882acec1 | ||
|
3c2362177f | ||
|
5e92931108 | ||
|
d54e388b58 | ||
|
390cfa0cdf | ||
|
c6fc741aa8 | ||
|
a0e15e1671 | ||
|
8367cba259 | ||
|
f7bf709295 | ||
|
ab802ea5cf | ||
|
922d8eab58 | ||
|
409acc9f1a | ||
|
196dc78b4f | ||
|
61404d9c12 | ||
|
ceab5ead8c | ||
|
9b60acf3db | ||
|
90bb321a07 | ||
|
e56fa24a38 | ||
|
574aff605f | ||
|
9c6d0b56c3 | ||
|
e0761396b8 | ||
|
4b44eb5c80 | ||
|
eb66a44edf | ||
|
f8708b84e6 | ||
|
cc29091116 | ||
|
2f860772d2 | ||
|
2f5d1c0966 | ||
|
39c6817e65 | ||
|
2d63f56d64 | ||
|
a4a158a9e9 | ||
|
be78cb5321 | ||
|
1e483dc34d | ||
|
8ddf77973d | ||
|
d9a4858c4f | ||
|
c3e5406218 | ||
|
b92c345b3a | ||
|
ac9fd8f2ca | ||
|
20aae4769d | ||
|
1052c72863 | ||
|
4beb61c3af | ||
|
4b6c5db904 | ||
|
18493bb9b0 | ||
|
13a4bee725 | ||
|
7db0f7ec35 | ||
|
9936038603 | ||
|
ae9fb91c72 | ||
|
dcf7126f51 | ||
|
d3a512bf9e | ||
|
6809d97dd6 | ||
|
b89919546c | ||
|
cfa1c23506 | ||
|
e61b5d2a3f | ||
|
9089fd37e0 | ||
|
4eab083a30 | ||
|
11a07758aa | ||
|
ae712c1c98 | ||
|
9b2415f0f1 | ||
|
f130098937 | ||
|
e280a2e4a9 | ||
|
60bd4bc91b | ||
|
145c3d8f96 | ||
|
663c134e60 | ||
|
1e264ca4a1 | ||
|
207e9f7afd | ||
|
6e4c144af6 | ||
|
325c97cfe6 | ||
|
4d07845c7f | ||
|
93baa505c7 | ||
|
b81d4667b2 | ||
|
3dd065dd7b | ||
|
0a8692dcc0 | ||
|
26cc295167 | ||
|
48d9800b71 | ||
|
9424ad1f13 | ||
|
5c58f548c0 | ||
|
f59111025b | ||
|
e313776982 | ||
|
faa0bc952f | ||
|
87f8c60e2f | ||
|
e551c499db | ||
|
9aaa1edad6 | ||
|
d96b178a9c | ||
|
1c857c0781 | ||
|
ae160aef23 | ||
|
2ccce69180 | ||
|
a9f618891b | ||
|
85d1cc8be4 | ||
|
6315112b3b | ||
|
8cff8d85f6 | ||
|
d7a1007f41 | ||
|
0cd2c6864e | ||
|
0cb46e1444 | ||
|
cb9dfa1321 | ||
|
674fea7063 | ||
|
722cbb6054 | ||
|
042160e6bd | ||
|
06c44fe91f | ||
|
d92c636b69 | ||
|
0599503779 | ||
|
2cf337a606 | ||
|
b0bb320fb6 | ||
|
7a2a2846e1 | ||
|
90c35b67bd | ||
|
65287ba800 | ||
|
14af465aa3 | ||
|
95f0b60cac | ||
|
d69044231b | ||
|
72fde9860b | ||
|
481ada43d6 | ||
|
a9d74e834d | ||
|
43fd2fff2b | ||
|
b37c64f5a5 | ||
|
f1aa064b2d | ||
|
8abadd1070 | ||
|
3f9d9732a0 | ||
|
b9dc3c44a8 | ||
|
68e757aafc | ||
|
af045447e6 | ||
|
db6976a06a | ||
|
aa66a5ffb2 | ||
|
bf74150f62 | ||
|
2987216169 | ||
|
08a41f8f68 | ||
|
714a824823 | ||
|
2242dca27d | ||
|
681558126d | ||
|
927e637d88 | ||
|
3b97cb420d | ||
|
241fbd6be5 | ||
|
a619e7f571 | ||
|
491119d676 | ||
|
29a8c15d62 | ||
|
26741512ea | ||
|
a987d91ae1 | ||
|
380b9a73ab | ||
|
66bd1da571 | ||
|
79ad3c0a84 | ||
|
e69e7ff3c1 | ||
|
1483ef83d9 | ||
|
dbed799e20 | ||
|
4602b68425 | ||
|
2d3eaedaa7 | ||
|
4d60b21a5f | ||
|
50ee8374ee | ||
|
65eb23e8ce | ||
|
5c76b18ddd | ||
|
fc6f5d2535 | ||
|
6a0348e9dc | ||
|
40d3548c82 | ||
|
fc7bbf89c6 | ||
|
6f848e3df8 | ||
|
d80eb03707 | ||
|
85d4b8c487 | ||
|
90e6409b1e | ||
|
96b28a8e9b | ||
|
a818e87e96 | ||
|
dc715758a6 | ||
|
87069329d8 | ||
|
8a380a4545 | ||
|
a5e18cb761 | ||
|
e9da3e7b6a | ||
|
289fa23728 | ||
|
c6a44bfe09 | ||
|
c6af129960 | ||
|
6cb30bcd7f | ||
|
55027fd3cd | ||
|
41bf9b8baa | ||
|
68fae3b23c | ||
|
3525cd1083 | ||
|
e9730f24a0 | ||
|
c35d22a82f | ||
|
b76d92bfeb | ||
|
afa578aa34 | ||
|
519ccda5ed | ||
|
832513a7fc | ||
|
0300c26952 | ||
|
243071d4cc | ||
|
c95937d08b | ||
|
58f944c72e | ||
|
173cc57490 | ||
|
4c8ba6b0a8 | ||
|
be2ea8c6d5 | ||
|
16ec9d2938 | ||
|
93a23671e4 | ||
|
8a77fbfefd | ||
|
9ffb7f54c7 | ||
|
be10a644a0 | ||
|
a5a64eadc7 | ||
|
2cee252b14 | ||
|
050b4a5c9d | ||
|
6d4b5d4484 | ||
|
964a6c2e3e | ||
|
14336529d9 | ||
|
2e57285120 | ||
|
2784059a0f | ||
|
04e78f4de7 | ||
|
c051ab56b4 | ||
|
17004f704c | ||
|
fc34c1fc35 | ||
|
6e6cd9a7e5 | ||
|
16051981d7 |
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@@ -0,0 +1,5 @@
|
||||
.git
|
||||
.github
|
||||
*.md
|
||||
tests/
|
||||
docs/
|
31
.github/dependabot.yml
vendored
Normal file
31
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/src"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/src/static/app"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/.github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/docker"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "docker-compose"
|
||||
directory: "/docker"
|
||||
schedule:
|
||||
interval: "weekly"
|
32
.github/workflows/docker.yml
vendored
32
.github/workflows/docker.yml
vendored
@@ -9,23 +9,21 @@ on:
|
||||
- '*'
|
||||
release:
|
||||
types: [ published ]
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
DOCKERHUB_PREFIX: docker.io
|
||||
GITHUB_CONTAINER_PREFIX: ghcr.io
|
||||
DOCKER_IMAGE: donaldzou/wgdashboard
|
||||
DOCKER_IMAGE: WGDashboard
|
||||
|
||||
jobs:
|
||||
docker_build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Generate a shorter Git commit sha.
|
||||
id: gen_short_sha
|
||||
run: echo "SHORT_SHA=${GITHUB_SHA::8}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -59,15 +57,12 @@ jobs:
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKERHUB_PREFIX }}/${{ env.DOCKER_IMAGE }}
|
||||
${{ env.GITHUB_CONTAINER_PREFIX }}/${{ env.DOCKER_IMAGE }}
|
||||
${{ env.DOCKERHUB_PREFIX }}/donaldzou/${{ env.DOCKER_IMAGE }}
|
||||
${{ env.GITHUB_CONTAINER_PREFIX }}/${{ github.repository_owner }}/${{ env.DOCKER_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=raw,value=${{ env.SHORT_SHA }}
|
||||
|
||||
- name: Print a message
|
||||
run: echo "${{ steps.meta.outputs.tags }}"
|
||||
type=sha,format=short,prefix=
|
||||
|
||||
- name: Build and export (multi-arch)
|
||||
uses: docker/build-push-action@v6
|
||||
@@ -90,12 +85,19 @@ jobs:
|
||||
registry: ${{ env.DOCKERHUB_PREFIX }}
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.GITHUB_CONTAINER_PREFIX }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Docker Scout CVEs
|
||||
uses: docker/scout-action@v1
|
||||
with:
|
||||
command: cves
|
||||
image: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ env.DOCKER_IMAGE }}:main
|
||||
image: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ github.repository_owner }}/${{ env.DOCKER_IMAGE }}:main
|
||||
only-severities: critical,high
|
||||
only-fixed: true
|
||||
write-comment: true
|
||||
@@ -107,8 +109,8 @@ jobs:
|
||||
with:
|
||||
command: compare
|
||||
# Set to Github for maximum compat
|
||||
image: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ env.DOCKER_IMAGE }}:main
|
||||
to: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ env.DOCKER_IMAGE }}:latest
|
||||
image: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ github.repository_owner }}/${{ env.DOCKER_IMAGE }}:main
|
||||
to: ${{ env.GITHUB_CONTAINER_PREFIX }}/${{ github.repository_owner }}/${{ env.DOCKER_IMAGE }}:latest
|
||||
only-severities: critical,high
|
||||
ignore-unchanged: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
26
.github/workflows/stale.yml
vendored
Normal file
26
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '00 08 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'This issue has not been updated for 20 days'
|
||||
stale-pr-message: 'This pull request has not been updated for 20 days'
|
||||
stale-issue-label: 'stale'
|
||||
exempt-issue-labels: 'enhancement,ongoing'
|
||||
days-before-stale: 20
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -49,5 +49,4 @@ coverage
|
||||
*.sw?
|
||||
|
||||
*.tsbuildinfo
|
||||
.vite/*
|
||||
|
||||
.vite/*
|
15
README.md
15
README.md
@@ -1,8 +1,8 @@
|
||||
> [!TIP]
|
||||
> 🎉 I'm excited to announce that WGDashboard is officially listed on DigitalOcean's Marketplace! For more information, please visit [Host WGDashboard & WireGuard with DigitalOcean](https://donaldzou.dev/WGDashboard-Documentation/host-wgdashboard-wireguard-with-digitalocean.html) for more information!
|
||||
> 🎉 I'm excited to announce that WGDashboard is officially listed on DigitalOcean's Marketplace! For more information, please visit [Host WGDashboard & WireGuard with DigitalOcean](https://docs.wgdashboard.dev/host-wgdashboard-wireguard-with-digitalocean.html) for more information!
|
||||
|
||||
> [!NOTE]
|
||||
> **Help Wanted 🎉**: Localizing WGDashboard to other languages! If you're willing to help, please visit https://github.com/donaldzou/WGDashboard/issues/397. Many thanks!
|
||||
> **Help Wanted 🎉**: Localizing WGDashboard to other languages! If you're willing to help, please visit https://github.com/WGDashboard/WGDashboard/issues/397. Many thanks!
|
||||
|
||||
|
||||
|
||||
@@ -22,10 +22,12 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/donaldzou/wireguard-dashboard/releases/latest"><img src="https://img.shields.io/github/v/release/donaldzou/wireguard-dashboard"></a>
|
||||
<a href="https://wakatime.com/badge/github/donaldzou/WGDashboard"><img src="https://wakatime.com/badge/github/donaldzou/WGDashboard.svg" alt="wakatime"></a>
|
||||
<a href="https://hitscounter.dev"><img src="https://hitscounter.dev/api/hit?url=https%3A%2F%2Fgithub.com%2Fdonaldzou%2FWGDashboard&label=Visitor&icon=github&color=%230a58ca"></a>
|
||||
<img src="https://img.shields.io/docker/pulls/donaldzou/wgdashboard?logo=docker&label=Docker%20Image%20Pulls&labelColor=ffffff">
|
||||
<a href="https://github.com/WGDashboard/WGDashboard/releases/latest"><img src="https://img.shields.io/github/v/release/donaldzou/wireguard-dashboard?style=for-the-badge"></a>
|
||||
<a href="https://wakatime.com/badge/github/donaldzou/WGDashboard"><img src="https://wakatime.com/badge/github/donaldzou/WGDashboard.svg?style=for-the-badge" alt="wakatime"></a>
|
||||
<a href="https://hitscounter.dev"><img src="https://hitscounter.dev/api/hit?url=https%3A%2F%2Fgithub.com%2Fdonaldzou%2FWGDashboard&label=Visitor&icon=github&color=%230a58ca&style=for-the-badge"></a>
|
||||
<img src="https://img.shields.io/docker/pulls/donaldzou/wgdashboard?logo=docker&label=Docker%20Image%20Pulls&labelColor=ffffff&style=for-the-badge">
|
||||
<img src="https://github.com/WGDashboard/WGDashboard/actions/workflows/docker.yml/badge.svg?style=for-the-badge">
|
||||
<img src="https://github.com/WGDashboard/WGDashboard/actions/workflows/codeql-analyze.yaml/badge.svg">
|
||||
</p>
|
||||
<p align="center"><b>This project is supported by</b></p>
|
||||
<p align="center">
|
||||
@@ -74,7 +76,6 @@
|
||||
|
||||
|
||||
# Screenshots
|
||||
|
||||
<img src="https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/sign-in.png" alt=""/>
|
||||
<img src="https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/cross-server.png" alt=""/>
|
||||
<img src="https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/index.png" alt=""/>
|
||||
|
76
assets/legacy/Dockerfile-alpine-old
Normal file
76
assets/legacy/Dockerfile-alpine-old
Normal file
@@ -0,0 +1,76 @@
|
||||
FROM golang:1.24 AS awg-go
|
||||
|
||||
RUN git clone https://github.com/WGDashboard/amneziawg-go /awg
|
||||
WORKDIR /awg
|
||||
RUN go mod download && \
|
||||
go mod verify && \
|
||||
go build -ldflags '-linkmode external -extldflags "-fno-PIC -static"' -v -o /usr/bin
|
||||
|
||||
FROM alpine:latest AS awg-tools
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
make git build-base linux-headers \
|
||||
&& git clone https://github.com/WGDashboard/amneziawg-tools \
|
||||
&& cd amneziawg-tools/src \
|
||||
&& make \
|
||||
&& chmod +x wg*
|
||||
|
||||
FROM alpine:latest
|
||||
LABEL maintainer="dselen@nerthus.nl"
|
||||
|
||||
RUN apk update && apk add --no-cache \
|
||||
iproute2 iptables bash curl wget unzip procps sudo \
|
||||
tzdata wireguard-tools python3 py3-psutil py3-bcrypt openresolv
|
||||
|
||||
COPY --from=awg-go /usr/bin/amneziawg-go /usr/bin/amneziawg-go
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg /usr/bin/awg
|
||||
COPY --from=awg-tools /amneziawg-tools/src/wg-quick/linux.bash /usr/bin/awg-quick
|
||||
|
||||
# Declaring environment variables, change Peernet to an address you like, standard is a 24 bit subnet.
|
||||
ARG wg_net="10.0.0.1" \
|
||||
wg_port="51820"
|
||||
|
||||
# Following ENV variables are changable on container runtime because /entrypoint.sh handles that. See compose.yaml for more info.
|
||||
ENV TZ="Europe/Amsterdam" \
|
||||
global_dns="9.9.9.9" \
|
||||
wgd_port="10086" \
|
||||
public_ip=""
|
||||
|
||||
# Using WGDASH -- like wg_net functionally as a ARG command. But it is needed in entrypoint.sh so it needs to be exported as environment variable.
|
||||
ENV WGDASH=/opt/wgdashboard
|
||||
|
||||
# Doing WireGuard Dashboard installation measures. Modify the git clone command to get the preferred version, with a specific branch for example.
|
||||
RUN mkdir /data \
|
||||
&& mkdir /configs \
|
||||
&& mkdir -p ${WGDASH}/src \
|
||||
&& mkdir -p /etc/amnezia/amneziawg
|
||||
COPY ./src ${WGDASH}/src
|
||||
|
||||
# Generate basic WireGuard interface. Echoing the WireGuard interface config for readability, adjust if you want it for efficiency.
|
||||
# Also setting the pipefail option, verbose: https://github.com/hadolint/hadolint/wiki/DL4006.
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
RUN out_adapt=$(ip -o -4 route show to default | awk '{print $NF}') \
|
||||
&& echo -e "[Interface]\n\
|
||||
Address = ${wg_net}/24\n\
|
||||
PrivateKey =\n\
|
||||
PostUp = iptables -t nat -I POSTROUTING 1 -s ${wg_net}/24 -o ${out_adapt} -j MASQUERADE\n\
|
||||
PostUp = iptables -I FORWARD -i wg0 -o wg0 -j DROP\n\
|
||||
PreDown = iptables -t nat -D POSTROUTING -s ${wg_net}/24 -o ${out_adapt} -j MASQUERADE\n\
|
||||
PreDown = iptables -D FORWARD -i wg0 -o wg0 -j DROP\n\
|
||||
ListenPort = ${wg_port}\n\
|
||||
SaveConfig = true\n\
|
||||
DNS = ${global_dns}" > /configs/wg0.conf.template \
|
||||
&& chmod 600 /configs/wg0.conf.template
|
||||
|
||||
# Defining a way for Docker to check the health of the container. In this case: checking the gunicorn process.
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD sh -c 'pgrep gunicorn > /dev/null && pgrep tail > /dev/null' || exit 1
|
||||
|
||||
# Copy the basic entrypoint.sh script.
|
||||
COPY ./docker/entrypoint.sh /entrypoint.sh
|
||||
|
||||
# Exposing the default WireGuard Dashboard port for web access.
|
||||
EXPOSE 10086
|
||||
WORKDIR $WGDASH
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
@@ -1,47 +1,120 @@
|
||||
FROM golang:1.24 AS awg
|
||||
#
|
||||
# AWG GOLANG BUILDING STAGE
|
||||
# Base: Alpine
|
||||
#
|
||||
|
||||
RUN git clone https://github.com/amnezia-vpn/amneziawg-go /awg
|
||||
WORKDIR /awg
|
||||
# Pull the current golang-alpine image.
|
||||
FROM golang:1.25-alpine AS awg-go
|
||||
|
||||
# Install build-dependencies.
|
||||
RUN apk add --no-cache \
|
||||
git \
|
||||
gcc \
|
||||
musl-dev
|
||||
|
||||
# Standard working directory for WGDashboard
|
||||
RUN mkdir -p /workspace && \
|
||||
git clone https://github.com/WGDashboard/amneziawg-go /workspace/awg
|
||||
|
||||
# Enable CGO compilation for AmneziaWG
|
||||
ENV CGO_ENABLED=1
|
||||
|
||||
# Change directory
|
||||
WORKDIR /workspace/awg
|
||||
# Compile the binaries
|
||||
RUN go mod download && \
|
||||
go mod verify && \
|
||||
go build -ldflags '-linkmode external -extldflags "-fno-PIC -static"' -v -o /usr/bin
|
||||
#
|
||||
# AWG TOOLS BUILDING STAGE
|
||||
# Base: Debian
|
||||
#
|
||||
FROM alpine:latest AS awg-tools
|
||||
|
||||
FROM alpine:latest
|
||||
# Install needed dependencies.
|
||||
RUN apk add --no-cache \
|
||||
make \
|
||||
git \
|
||||
build-base \
|
||||
linux-headers \
|
||||
ca-certificates
|
||||
|
||||
# Get the workspace ready
|
||||
RUN mkdir -p /workspace && \
|
||||
git clone https://github.com/WGDashboard/amneziawg-tools /workspace/awg-tools
|
||||
|
||||
# Change directory
|
||||
WORKDIR /workspace/awg-tools/src
|
||||
# Compile and change permissions
|
||||
RUN make && chmod +x wg*
|
||||
|
||||
#
|
||||
# PIP DEPENDENCY BUILDING
|
||||
# Base: Alpine
|
||||
#
|
||||
|
||||
# Use the python-alpine image for building pip dependencies
|
||||
FROM python:3.13-alpine AS pip-builder
|
||||
|
||||
# Add the build dependencies and create a Python virtual environment.
|
||||
RUN apk add --no-cache \
|
||||
build-base \
|
||||
pkgconfig \
|
||||
python3-dev \
|
||||
libffi-dev \
|
||||
linux-headers \
|
||||
rust \
|
||||
cargo \
|
||||
&& mkdir -p /opt/wgdashboard/src \
|
||||
&& python3 -m venv /opt/wgdashboard/src/venv
|
||||
|
||||
# Copy the requirements file into the build layer.
|
||||
COPY ./src/requirements.txt /opt/wgdashboard/src
|
||||
# Install the pip packages
|
||||
RUN . /opt/wgdashboard/src/venv/bin/activate && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install -r /opt/wgdashboard/src/requirements.txt
|
||||
|
||||
#
|
||||
# WGDashboard RUNNING STAGE
|
||||
# Base: Alpine
|
||||
#
|
||||
|
||||
# Running with the python-alpine image.
|
||||
FROM python:3.13-alpine AS final
|
||||
LABEL maintainer="dselen@nerthus.nl"
|
||||
|
||||
RUN apk update && apk add \
|
||||
iproute2 iptables bash curl wget unzip procps sudo \
|
||||
tzdata wireguard-tools python3 py3-psutil py3-bcrypt openresolv \
|
||||
&& cd /usr/bin/ \
|
||||
&& wget $(curl -s https://api.github.com/repos/amnezia-vpn/amneziawg-tools/releases/latest | grep 'alpine' | cut -d : -f 2,3 | tr -d '", ' | tail -n 1) \
|
||||
&& unzip -j alpine-3.19-amneziawg-tools.zip \
|
||||
&& chmod +x /usr/bin/awg /usr/bin/awg-quick \
|
||||
&& rm alpine-3.19-amneziawg-tools.zip
|
||||
# Install only the runtime dependencies
|
||||
RUN apk add --no-cache \
|
||||
iproute2 iptables \
|
||||
bash curl \
|
||||
wget unzip \
|
||||
procps sudo \
|
||||
tzdata wireguard-tools \
|
||||
openresolv openrc
|
||||
|
||||
COPY --from=awg /usr/bin/amneziawg-go /usr/bin/amneziawg-go
|
||||
# Copy only the final binaries from the AWG builder stages
|
||||
COPY --from=awg-go /usr/bin/amneziawg-go /usr/bin/amneziawg-go
|
||||
COPY --from=awg-tools /workspace/awg-tools/src/wg /usr/bin/awg
|
||||
COPY --from=awg-tools /workspace/awg-tools/src/wg-quick/linux.bash /usr/bin/awg-quick
|
||||
|
||||
# Declaring environment variables, change Peernet to an address you like, standard is a 24 bit subnet.
|
||||
ARG wg_net="10.0.0.1" \
|
||||
wg_port="51820"
|
||||
|
||||
# Following ENV variables are changable on container runtime because /entrypoint.sh handles that. See compose.yaml for more info.
|
||||
# Environment variables
|
||||
ARG wg_net="10.0.0.1"
|
||||
ARG wg_port="51820"
|
||||
ENV TZ="Europe/Amsterdam" \
|
||||
global_dns="9.9.9.9" \
|
||||
wgd_port="10086" \
|
||||
public_ip=""
|
||||
public_ip="" \
|
||||
WGDASH=/opt/wgdashboard
|
||||
|
||||
# Using WGDASH -- like wg_net functionally as a ARG command. But it is needed in entrypoint.sh so it needs to be exported as environment variable.
|
||||
ENV WGDASH=/opt/wgdashboard
|
||||
# Create directories needed for operation
|
||||
RUN mkdir /data /configs -p ${WGDASH}/src /etc/amnezia/amneziawg
|
||||
|
||||
# Doing WireGuard Dashboard installation measures. Modify the git clone command to get the preferred version, with a specific branch for example.
|
||||
RUN mkdir /data \
|
||||
&& mkdir /configs \
|
||||
&& mkdir -p ${WGDASH}/src \
|
||||
&& mkdir -p /etc/amnezia/amneziawg
|
||||
# Copy the python virtual environment from the pip-builder stage
|
||||
COPY ./src ${WGDASH}/src
|
||||
COPY --from=pip-builder /opt/wgdashboard/src/venv /opt/wgdashboard/src/venv
|
||||
|
||||
# Generate basic WireGuard interface. Echoing the WireGuard interface config for readability, adjust if you want it for efficiency.
|
||||
# Also setting the pipefail option, verbose: https://github.com/hadolint/hadolint/wiki/DL4006.
|
||||
# First WireGuard interface template
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
RUN out_adapt=$(ip -o -4 route show to default | awk '{print $NF}') \
|
||||
&& echo -e "[Interface]\n\
|
||||
@@ -56,15 +129,15 @@ SaveConfig = true\n\
|
||||
DNS = ${global_dns}" > /configs/wg0.conf.template \
|
||||
&& chmod 600 /configs/wg0.conf.template
|
||||
|
||||
# Defining a way for Docker to check the health of the container. In this case: checking the gunicorn process.
|
||||
# Set a healthcheck to determine the container its health
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD sh -c 'pgrep gunicorn > /dev/null && pgrep tail > /dev/null' || exit 1
|
||||
|
||||
# Copy the basic entrypoint.sh script.
|
||||
# Copy in the runtime script, essential.
|
||||
COPY ./docker/entrypoint.sh /entrypoint.sh
|
||||
|
||||
# Exposing the default WireGuard Dashboard port for web access.
|
||||
#
|
||||
EXPOSE 10086
|
||||
WORKDIR $WGDASH
|
||||
WORKDIR $WGDASH/src
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
||||
|
@@ -3,8 +3,8 @@ Author: @DaanSelen<br>
|
||||
|
||||
This document delves into how the WGDashboard Docker container has been built.<br>
|
||||
Of course there are two stages (simply said), one before run-time and one at/after run-time.<br>
|
||||
The `Dockerfile` describes how the container image is made, and the `entrypoint.sh` is executed after running the container. <br>
|
||||
In this example, WireGuard is integrated into the container itself, so it should be a run-and-go(/out-of-the-box).<br>
|
||||
The `Dockerfile` describes how the container image is made, and the `entrypoint.sh` is executed after the container is started. <br>
|
||||
In this example, [WireGuard](https://www.wireguard.com/) is integrated into the container itself, so it should be a run-and-go(/out-of-the-box) experience.<br>
|
||||
For more details on the source-code specific to this Docker image, refer to the source files, they have lots of comments.
|
||||
|
||||
<br>
|
||||
@@ -18,20 +18,24 @@ For more details on the source-code specific to this Docker image, refer to the
|
||||
/>
|
||||
<br>
|
||||
|
||||
To get the container running you either pull the image from the repository, (docker.io)`donaldzou/wgdashboard:latest`.<br>
|
||||
From there either use the environment variables describe below as parameters or use the Docker Compose file: `compose.yaml`.<br>
|
||||
Be careful, the default generated WireGuard configuration file uses port 51820/udp. So use this port if you want to use it out of the box.<br>
|
||||
Otherwise edit the configuration file in `/etc/wireguard/wg0.conf`.
|
||||
To get the container running you either pull the pre-made image from a remote repository, there are 2 official options.<br>
|
||||
|
||||
- ghcr.io/wgdashboard/wgdashboard:<tag>
|
||||
- docker.io/donaldzou/wgdashboard:<tag>
|
||||
|
||||
> tags should be either: latest, main, <version> or <commit-sha>.
|
||||
|
||||
From there either use the environment variables described below as parameters or use the Docker Compose file: `compose.yaml`.<br>
|
||||
Be careful, the default generated WireGuard configuration file uses port 51820/udp. So make sure to use this port if you want to use it out of the box.<br>
|
||||
Otherwise edit the configuration file in WGDashboard under `Configuration Settings` -> `Edit Raw Configuration File`.
|
||||
|
||||
> Otherwise you need to enter the container and edit: `/etc/wireguard/wg0.conf`.
|
||||
|
||||
# WGDashboard: 🐳 Docker Deployment Guide
|
||||
|
||||
To run the container, you can either pull the image from Docker Hub or build it yourself. The image is available at:
|
||||
To run the container, you can either pull the image from the Github Container Registry (ghcr.io), Docker Hub (docker.io) or build it yourself. The image is available at:
|
||||
|
||||
```
|
||||
docker.io/donaldzou/wgdashboard:latest
|
||||
```
|
||||
|
||||
> `docker.io` is in most cases automatically resolved by the Docker application.
|
||||
> `docker.io` is in most cases automatically resolved by the Docker application. Therefor you can ofter specify: `donaldzou/wgdashboard:latest`
|
||||
|
||||
### 🔧 Quick Docker Run Command
|
||||
|
||||
@@ -44,7 +48,7 @@ docker run -d \
|
||||
-p 10086:10086/tcp \
|
||||
-p 51820:51820/udp \
|
||||
--cap-add NET_ADMIN \
|
||||
donaldzou/wgdashboard:latest
|
||||
ghcr.io/wgdashboard/wgdashboard:latest
|
||||
```
|
||||
|
||||
> ⚠️ The default WireGuard port is `51820/udp`. If you change this, update the `/etc/wireguard/wg0.conf` accordingly.
|
||||
@@ -58,23 +62,24 @@ You can also use Docker Compose for easier configuration:
|
||||
```yaml
|
||||
services:
|
||||
wgdashboard:
|
||||
image: donaldzou/wgdashboard:latest
|
||||
image: ghcr.io/wgdashboard/wgdashboard:latest
|
||||
restart: unless-stopped
|
||||
container_name: wgdashboard
|
||||
environment:
|
||||
# - tz=Europe/Amsterdam
|
||||
# - global_dns=1.1.1.1
|
||||
# - public_ip=YOUR_PUBLIC_IP
|
||||
|
||||
ports:
|
||||
- 10086:10086/tcp
|
||||
- 51820:51820/udp
|
||||
|
||||
volumes:
|
||||
- aconf:/etc/amnezia/amneziawg
|
||||
- conf:/etc/wireguard
|
||||
- data:/data
|
||||
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
|
||||
volumes:
|
||||
aconf:
|
||||
conf:
|
||||
data:
|
||||
```
|
||||
@@ -85,18 +90,29 @@ volumes:
|
||||
|
||||
## 🔄 Updating the Container
|
||||
|
||||
Updating WGDashboard is currently in **alpha** stage. While the update process may work, it's still under testing.
|
||||
Updating the WGDashboard container should be through 'The Docker Way' - by pulling the newest/newer image and replacing this old one.
|
||||
|
||||
---
|
||||
|
||||
## ⚙️ Environment Variables
|
||||
|
||||
| Variable | Accepted Values | Default | Example | Description |
|
||||
|---------------|------------------------------------------|-------------------------|------------------------|-----------------------------------------------------------------------------|
|
||||
| `tz` | Timezone | `Europe/Amsterdam` | `America/New_York` | Sets the container's timezone. Useful for accurate logs and scheduling. |
|
||||
| `global_dns` | IPv4 and IPv6 addresses | `9.9.9.9` | `8.8.8.8`, `1.1.1.1` | Default DNS for WireGuard clients. |
|
||||
| `public_ip` | Public IP address | Retrieved automatically | `253.162.134.73` | Used to generate accurate client configs. Needed if container is NAT’d. |
|
||||
| `wgd_port` | Any port that is allowed for the process | `10086` | `443` | This port is used to set the WGDashboard web port. |
|
||||
| Variable | Accepted Values | Default | Example | Description |
|
||||
| ------------------ | ---------------------------------------- | ----------------------- | --------------------- | ----------------------------------------------------------------------- |
|
||||
| `tz` | Timezone | `Europe/Amsterdam` | `America/New_York` | Sets the container's timezone. Useful for accurate logs and scheduling. |
|
||||
| `global_dns` | IPv4 and IPv6 addresses | `9.9.9.9` | `8.8.8.8`, `1.1.1.1` | Default DNS for WireGuard clients. |
|
||||
| `public_ip` | Public IP address | Retrieved automatically | `253.162.134.73` | Used to generate accurate client configs. Needed if container is NAT’d. |
|
||||
| `wgd_port` | Any port that is allowed for the process | `10086` | `443` | This port is used to set the WGDashboard web port. |
|
||||
| `username` | Any non‐empty string | `-` | `admin` | Username for the WGDashboard web interface account. |
|
||||
| `password` | Any non‐empty string | `-` | `s3cr3tP@ss` | Password for the WGDashboard web interface account (stored hashed). |
|
||||
| `enable_totp` | `true`, `false` | `true` | `false` | Enable TOTP‐based two‐factor authentication for the account. |
|
||||
| `wg_autostart` | Wireguard interface name | `false` | `true` | Auto‐start the WireGuard client when the container launches. |
|
||||
| `email_server` | SMTP server address | `-` | `smtp.gmail.com` | SMTP server for sending email notifications. |
|
||||
| `email_port` | SMTP port number | `-` | `587` | Port for connecting to the SMTP server. |
|
||||
| `email_encryption` | `TLS`, `SSL`, etc. | `-` | `TLS` | Encryption method for email communication. |
|
||||
| `email_username` | Any non-empty string | `-` | `user@example.com` | Username for SMTP authentication. |
|
||||
| `email_password` | Any non-empty string | `-` | `app_password` | Password for SMTP authentication. |
|
||||
| `email_from` | Valid email address | `-` | `noreply@example.com` | Email address used as the sender for notifications. |
|
||||
| `email_template` | Path to template file | `-` | `your-template` | Custom template for email notifications. |
|
||||
|
||||
---
|
||||
|
||||
@@ -122,7 +138,7 @@ Examples:
|
||||
To build from source:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/donaldzou/WGDashboard.git
|
||||
git clone https://github.com/WGDashboard/WGDashboard.git
|
||||
cd WGDashboard
|
||||
docker build . -f docker/Dockerfile -t yourname/wgdashboard:latest
|
||||
```
|
||||
@@ -194,4 +210,4 @@ ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
|
||||
|
||||
## Closing remarks:
|
||||
|
||||
For feedback please submit an issue to the repository. Or message dselen@nerthus.nl.
|
||||
For feedback please submit an issue to the repository. Or message dselen@nerthus.nl.
|
||||
|
@@ -1,22 +1,41 @@
|
||||
services:
|
||||
wireguard-dashboard:
|
||||
image: donaldzou/wgdashboard:latest
|
||||
wgdashboard:
|
||||
# Since the github organisation we recommend the ghcr.io.
|
||||
# Alternatively we also still push to docker.io under donaldzou/wgdashboard.
|
||||
# Both share the exact same tags. So they should be interchangable.
|
||||
image: ghcr.io/wgdashboard/wgdashboard:latest
|
||||
|
||||
# Make sure to set the restart policy. Because for a VPN its important to come back IF it crashes.
|
||||
restart: unless-stopped
|
||||
container_name: wgdashboard
|
||||
|
||||
# Environment variables can be used to configure certain values at startup. Without having to configure it from the dashboard.
|
||||
# By default its all disabled, but uncomment the following lines to apply these. (uncommenting is removing the # character)
|
||||
# Refer to the documentation on https://wgdashboard.dev/ for more info on what everything means.
|
||||
#environment:
|
||||
#- tz= # <--- Set container timezone, default: Europe/Amsterdam.
|
||||
#- public_ip= # <--- Set public IP to ensure the correct one is chosen, defaulting to the IP give by ifconfig.me.
|
||||
#- wgd_port= # <--- Set the port WGDashboard will use for its web-server.
|
||||
|
||||
# The following section, ports is very important for exposing more than one Wireguard/AmneziaWireguard interfaces.
|
||||
# Once you create a new configuration and assign a port in the dashboard, don't forget to add it to the ports as well.
|
||||
# Quick-tip: most Wireguard VPN tunnels use UDP. WGDashboard uses HTTP, so tcp.
|
||||
ports:
|
||||
- 10086:10086/tcp
|
||||
- 51820:51820/udp
|
||||
|
||||
# Volumes can be configured however you'd like. The default is using docker volumes.
|
||||
# If you want to use local paths, replace the path before the : with your path.
|
||||
volumes:
|
||||
- aconf:/etc/amnezia/amneziawg
|
||||
- conf:/etc/wireguard
|
||||
- data:/data
|
||||
|
||||
# Needed for network administration.
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
|
||||
# The following configuration is linked to the above default volumes.
|
||||
volumes:
|
||||
aconf:
|
||||
conf:
|
||||
|
@@ -1,10 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Path to the configuration file (exists because of previous function).
|
||||
config_file="/data/wg-dashboard.ini"
|
||||
|
||||
trap 'stop_service' SIGTERM
|
||||
|
||||
# Hash password with bcrypt
|
||||
hash_password() {
|
||||
python3 -c "import bcrypt; print(bcrypt.hashpw('$1'.encode(), bcrypt.gensalt(12)).decode())"
|
||||
}
|
||||
|
||||
# Function to set or update section/key/value in the INI file
|
||||
set_ini() {
|
||||
local section="$1" key="$2" value="$3"
|
||||
local current_value
|
||||
|
||||
# Add section if it doesn't exist
|
||||
grep -q "^\[${section}\]" "$config_file" \
|
||||
|| printf "\n[%s]\n" "${section}" >> "$config_file"
|
||||
|
||||
# Check current value if key exists
|
||||
if grep -q "^[[:space:]]*${key}[[:space:]]*=" "$config_file"; then
|
||||
current_value=$(grep "^[[:space:]]*${key}[[:space:]]*=" "$config_file" | cut -d= -f2- | xargs)
|
||||
|
||||
# Don't display actual value if it's a password field
|
||||
if [[ "$key" == *"password"* ]]; then
|
||||
if [ "$current_value" = "$value" ]; then
|
||||
echo "- $key is already set correctly (value hidden)"
|
||||
return 0
|
||||
fi
|
||||
sed -i "/^\[${section}\]/,/^\[/{s|^[[:space:]]*${key}[[:space:]]*=.*|${key} = ${value}|}" "$config_file"
|
||||
echo "- Updated $key (value hidden)"
|
||||
else
|
||||
if [ "$current_value" = "$value" ]; then
|
||||
echo "- $key is already set correctly ($value)"
|
||||
return 0
|
||||
fi
|
||||
sed -i "/^\[${section}\]/,/^\[/{s|^[[:space:]]*${key}[[:space:]]*=.*|${key} = ${value}|}" "$config_file"
|
||||
echo "- Updated $key to: $value"
|
||||
fi
|
||||
else
|
||||
sed -i "/^\[${section}\]/a ${key} = ${value}" "$config_file"
|
||||
|
||||
# Don't display actual value if it's a password field
|
||||
if [[ "$key" == *"password"* ]]; then
|
||||
echo "- Added new setting $key (value hidden)"
|
||||
else
|
||||
echo "- Added new setting $key: $value"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
stop_service() {
|
||||
echo "[WGDashboard] Stopping WGDashboard..."
|
||||
/bin/bash ./wgd.sh stop
|
||||
@@ -12,10 +57,9 @@ stop_service() {
|
||||
}
|
||||
|
||||
echo "------------------------- START ----------------------------"
|
||||
echo "Starting the WireGuard Dashboard Docker container."
|
||||
echo "Starting the WGDashboard Docker container."
|
||||
|
||||
ensure_installation() {
|
||||
# When using a custom directory to store the files, this part moves over and makes sure the installation continues.
|
||||
echo "Quick-installing..."
|
||||
|
||||
# Make the wgd.sh script executable.
|
||||
@@ -33,54 +77,39 @@ ensure_installation() {
|
||||
echo "Removing clear command from wgd.sh for better Docker logging."
|
||||
sed -i '/clear/d' ./wgd.sh
|
||||
|
||||
# Create the databases directory if it does not exist yet.
|
||||
# Create required directories and links
|
||||
if [ ! -d "/data/db" ]; then
|
||||
echo "Creating database dir"
|
||||
mkdir /data/db
|
||||
mkdir -p /data/db
|
||||
fi
|
||||
|
||||
# Linking the database on the persistent directory location to where WGDashboard expects.
|
||||
mkdir "${WGDASH}/src/log"
|
||||
|
||||
if [ ! -d "${WGDASH}/src/db" ]; then
|
||||
ln -s /data/db "${WGDASH}/src/db"
|
||||
fi
|
||||
|
||||
# Create the wg-dashboard.ini file if it does not exist yet.
|
||||
if [ ! -f "${config_file}" ]; then
|
||||
echo "Creating wg-dashboard.ini file"
|
||||
touch "${config_file}"
|
||||
fi
|
||||
|
||||
# Link the wg-dashboard.ini file from the persistent directory to where WGDashboard expects it.
|
||||
if [ ! -f "${WGDASH}/src/wg-dashboard.ini" ]; then
|
||||
ln -s "${config_file}" "${WGDASH}/src/wg-dashboard.ini"
|
||||
fi
|
||||
|
||||
# Create the Python virtual environment.
|
||||
python3 -m venv "${WGDASH}"/src/venv
|
||||
. "${WGDASH}/src/venv/bin/activate"
|
||||
|
||||
# Due to this pip dependency being available as a system package we can just move it to the venv.
|
||||
echo "Moving PIP dependency from ephemerality to runtime environment: psutil"
|
||||
mv /usr/lib/python3.12/site-packages/psutil* "${WGDASH}"/src/venv/lib/python3.12/site-packages
|
||||
|
||||
# Due to this pip dependency being available as a system package we can just move it to the venv.
|
||||
echo "Moving PIP dependency from ephemerality to runtime environment: bcrypt"
|
||||
mv /usr/lib/python3.12/site-packages/bcrypt* "${WGDASH}"/src/venv/lib/python3.12/site-packages
|
||||
|
||||
# Use the bash interpreter to install WGDashboard according to the wgd.sh script.
|
||||
/bin/bash ./wgd.sh install
|
||||
|
||||
# /bin/bash ./wgd.sh install
|
||||
echo "Looks like the installation succeeded. Moving on."
|
||||
|
||||
# This first step is to ensure the wg0.conf file exists, and if not, then its copied over from the ephemeral container storage.
|
||||
# This is done so WGDashboard it works out of the box, it also sets a randomly generated private key.
|
||||
|
||||
if [ ! -f "/etc/wireguard/wg0.conf" ]; then
|
||||
echo "Standard wg0 Configuration file not found, grabbing template."
|
||||
# Setup WireGuard if needed
|
||||
if [ -z "$(ls -A /etc/wireguard)" ]; then
|
||||
cp -a "/configs/wg0.conf.template" "/etc/wireguard/wg0.conf"
|
||||
|
||||
echo "Setting a secure private key."
|
||||
|
||||
local privateKey
|
||||
privateKey=$(wg genkey)
|
||||
sed -i "s|^PrivateKey *=.*$|PrivateKey = ${privateKey}|g" /etc/wireguard/wg0.conf
|
||||
@@ -94,60 +123,72 @@ ensure_installation() {
|
||||
set_envvars() {
|
||||
printf "\n------------- SETTING ENVIRONMENT VARIABLES ----------------\n"
|
||||
|
||||
# Check if the file is empty
|
||||
# Check if config file is empty
|
||||
if [ ! -s "${config_file}" ]; then
|
||||
echo "Config file is empty. Creating [Peers] section."
|
||||
|
||||
# Create [Peers] section with initial values
|
||||
{
|
||||
echo "[Peers]"
|
||||
echo "peer_global_dns = ${global_dns}"
|
||||
echo "remote_endpoint = ${public_ip}"
|
||||
echo -e "\n[Server]"
|
||||
echo "app_port = ${wgd_port}"
|
||||
} > "${config_file}"
|
||||
|
||||
else
|
||||
echo "Config file is not empty, using pre-existing."
|
||||
echo "Config file is empty. Creating initial structure."
|
||||
fi
|
||||
|
||||
echo "Verifying current variables..."
|
||||
echo "Checking basic configuration:"
|
||||
set_ini Peers peer_global_dns "${global_dns}"
|
||||
|
||||
# Check and update the DNS if it has changed
|
||||
current_dns=$(grep "peer_global_dns = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${global_dns}" == "$current_dns" ]; then
|
||||
echo "DNS is set correctly, moving on."
|
||||
|
||||
else
|
||||
echo "Changing default DNS..."
|
||||
sed -i "s/^peer_global_dns = .*/peer_global_dns = ${global_dns}/" "${config_file}"
|
||||
if [ -z "${public_ip}" ]; then
|
||||
public_ip=$(curl -s ifconfig.me)
|
||||
echo "Automatically detected public IP: ${public_ip}"
|
||||
fi
|
||||
|
||||
# Checking the current set public IP and changing it if it has changed.
|
||||
current_public_ip=$(grep "remote_endpoint = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${public_ip}" == "" ]; then
|
||||
default_ip=$(curl -s ifconfig.me)
|
||||
set_ini Peers remote_endpoint "${public_ip}"
|
||||
set_ini Server app_port "${wgd_port}"
|
||||
|
||||
echo "Trying to fetch the Public-IP using ifconfig.me: ${default_ip}"
|
||||
sed -i "s/^remote_endpoint = .*/remote_endpoint = ${default_ip}/" "${config_file}"
|
||||
elif [ "${current_public_ip}" != "${public_ip}" ]; then
|
||||
sed -i "s/^remote_endpoint = .*/remote_endpoint = ${public_ip}/" "${config_file}"
|
||||
else
|
||||
echo "Public-IP is correct, moving on."
|
||||
# Account settings - process all parameters
|
||||
[[ -n "$username" ]] && echo "Configuring user account:"
|
||||
# Basic account variables
|
||||
[[ -n "$username" ]] && set_ini Account username "${username}"
|
||||
|
||||
if [[ -n "$password" ]]; then
|
||||
echo "- Setting password"
|
||||
set_ini Account password "$(hash_password "${password}")"
|
||||
fi
|
||||
|
||||
# Checking the current WGDashboard web port and changing if needed.
|
||||
current_wgd_port=$(grep "app_port = " "${config_file}" | awk '{print $NF}')
|
||||
if [ "${current_wgd_port}" == "${wgd_port}" ]; then
|
||||
echo "Current WGD port is set correctly, moving on."
|
||||
else
|
||||
echo "Changing default WGD port..."
|
||||
sed -i "s/^app_port = .*/app_port = ${wgd_port}/" "${config_file}"
|
||||
# Additional account variables
|
||||
[[ -n "$enable_totp" ]] && set_ini Account enable_totp "${enable_totp}"
|
||||
[[ -n "$totp_verified" ]] && set_ini Account totp_verified "${totp_verified}"
|
||||
[[ -n "$totp_key" ]] && set_ini Account totp_key "${totp_key}"
|
||||
|
||||
# Welcome session
|
||||
[[ -n "$welcome_session" ]] && set_ini Other welcome_session "${welcome_session}"
|
||||
# If username and password are set but welcome_session isn't, disable it
|
||||
if [[ -n "$username" && -n "$password" && -z "$welcome_session" ]]; then
|
||||
set_ini Other welcome_session "false"
|
||||
fi
|
||||
|
||||
# Autostart WireGuard
|
||||
if [[ -n "$wg_autostart" ]]; then
|
||||
echo "Configuring WireGuard autostart:"
|
||||
set_ini WireGuardConfiguration autostart "${wg_autostart}"
|
||||
fi
|
||||
|
||||
# Email (check if any settings need to be configured)
|
||||
email_vars=("email_server" "email_port" "email_encryption" "email_username" "email_password" "email_from" "email_template")
|
||||
for var in "${email_vars[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
echo "Configuring email settings:"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Email (iterate through all possible fields)
|
||||
email_fields=("server:email_server" "port:email_port" "encryption:email_encryption"
|
||||
"username:email_username" "email_password:email_password"
|
||||
"send_from:email_from" "email_template:email_template")
|
||||
|
||||
for field_pair in "${email_fields[@]}"; do
|
||||
IFS=: read -r field var <<< "$field_pair"
|
||||
[[ -n "${!var}" ]] && set_ini Email "$field" "${!var}"
|
||||
done
|
||||
}
|
||||
|
||||
# === CORE SERVICES ===
|
||||
start_core() {
|
||||
# Start service and monitor logs
|
||||
start_and_monitor() {
|
||||
printf "\n---------------------- STARTING CORE -----------------------\n"
|
||||
|
||||
# Due to some instances complaining about this, making sure its there every time.
|
||||
@@ -157,23 +198,19 @@ start_core() {
|
||||
|
||||
# Actually starting WGDashboard
|
||||
echo "Activating Python venv and executing the WireGuard Dashboard service."
|
||||
/bin/bash ./wgd.sh start
|
||||
}
|
||||
bash ./wgd.sh start
|
||||
|
||||
ensure_blocking() {
|
||||
# Wait a second before continuing, to give the python program some time to get ready.
|
||||
sleep 1s
|
||||
sleep 1
|
||||
echo -e "\nEnsuring container continuation."
|
||||
|
||||
# Find and tail the latest error and access logs if they exist
|
||||
# Find and monitor log file
|
||||
local logdir="${WGDASH}/src/log"
|
||||
|
||||
latestErrLog=$(find "$logdir" -name "error_*.log" -type f -print | sort -r | head -n 1)
|
||||
|
||||
# Only tail the logs if they are found
|
||||
if [ -n "$latestErrLog" ]; then
|
||||
tail -f "$latestErrLog" &
|
||||
|
||||
# Wait for the tail process to end.
|
||||
wait $!
|
||||
else
|
||||
@@ -182,8 +219,7 @@ ensure_blocking() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute functions for the WireGuard Dashboard services, then set the environment variables
|
||||
# Main execution flow
|
||||
ensure_installation
|
||||
set_envvars
|
||||
start_core
|
||||
ensure_blocking
|
||||
start_and_monitor
|
1369
package-lock.json
generated
1369
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"marked": "^15.0.7",
|
||||
"openai": "^4.89.0",
|
||||
"pinia-plugin-persistedstate": "^4.2.0"
|
||||
}
|
||||
}
|
232
src/client.py
Normal file
232
src/client.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import datetime
|
||||
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint, render_template, abort, request, Flask, current_app, session, redirect, url_for
|
||||
import os
|
||||
|
||||
from modules.WireguardConfiguration import WireguardConfiguration
|
||||
from modules.DashboardConfig import DashboardConfig
|
||||
from modules.Email import EmailSender
|
||||
|
||||
|
||||
def ResponseObject(status=True, message=None, data=None, status_code = 200) -> Flask.response_class:
|
||||
response = Flask.make_response(current_app, {
|
||||
"status": status,
|
||||
"message": message,
|
||||
"data": data
|
||||
})
|
||||
response.status_code = status_code
|
||||
response.content_type = "application/json"
|
||||
return response
|
||||
|
||||
|
||||
|
||||
from modules.DashboardClients import DashboardClients
|
||||
def createClientBlueprint(wireguardConfigurations: dict[WireguardConfiguration], dashboardConfig: DashboardConfig, dashboardClients: DashboardClients):
|
||||
|
||||
client = Blueprint('client', __name__, template_folder=os.path.abspath("./static/dist/WGDashboardClient"))
|
||||
prefix = f'{dashboardConfig.GetConfig("Server", "app_prefix")[1]}/client'
|
||||
|
||||
def login_required(f):
|
||||
@wraps(f)
|
||||
def func(*args, **kwargs):
|
||||
if session.get("Email") is None or session.get("TotpVerified") is None or not session.get("TotpVerified") or session.get("Role") != "client":
|
||||
return ResponseObject(False, "Unauthorized access.", data=None, status_code=401)
|
||||
|
||||
if not dashboardClients.GetClient(session.get("ClientID")):
|
||||
session.clear()
|
||||
return ResponseObject(False, "Unauthorized access.", data=None, status_code=401)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return func
|
||||
|
||||
@client.before_request
|
||||
def clientBeforeRequest():
|
||||
if not dashboardConfig.GetConfig("Clients", "enable")[1]:
|
||||
abort(404)
|
||||
|
||||
if request.method.lower() == 'options':
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.post(f'{prefix}/api/signup')
|
||||
def ClientAPI_SignUp():
|
||||
data = request.get_json()
|
||||
status, msg = dashboardClients.SignUp(**data)
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.get(f'{prefix}/api/signin/oidc/providers')
|
||||
def ClientAPI_SignIn_OIDC_GetProviders():
|
||||
_, oidc = dashboardConfig.GetConfig("OIDC", "client_enable")
|
||||
if not oidc:
|
||||
return ResponseObject(status=False, message="OIDC is disabled")
|
||||
|
||||
return ResponseObject(data=dashboardClients.OIDC.GetProviders())
|
||||
|
||||
@client.post(f'{prefix}/api/signin/oidc')
|
||||
def ClientAPI_SignIn_OIDC():
|
||||
_, oidc = dashboardConfig.GetConfig("OIDC", "client_enable")
|
||||
if not oidc:
|
||||
return ResponseObject(status=False, message="OIDC is disabled")
|
||||
|
||||
data = request.get_json()
|
||||
status, oidcData = dashboardClients.SignIn_OIDC(**data)
|
||||
if not status:
|
||||
return ResponseObject(status, oidcData)
|
||||
|
||||
session['Email'] = oidcData.get('email')
|
||||
session['Role'] = 'client'
|
||||
session['TotpVerified'] = True
|
||||
|
||||
return ResponseObject()
|
||||
|
||||
@client.post(f'{prefix}/api/signin')
|
||||
def ClientAPI_SignIn():
|
||||
data = request.get_json()
|
||||
status, msg = dashboardClients.SignIn(**data)
|
||||
if status:
|
||||
session['Email'] = data.get('Email')
|
||||
session['Role'] = 'client'
|
||||
session['TotpVerified'] = False
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword/generateResetToken')
|
||||
def ClientAPI_ResetPassword_GenerateResetToken():
|
||||
date = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
|
||||
emailSender = EmailSender(dashboardConfig)
|
||||
if not emailSender.ready():
|
||||
return ResponseObject(False, "We can't send you an email due to your Administrator has not setup email service. Please contact your administrator.")
|
||||
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
if not email:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
token = dashboardClients.GenerateClientPasswordResetToken(u.get('ClientID'))
|
||||
|
||||
status, msg = emailSender.send(
|
||||
email, "[WGDashboard | Client] Reset Password",
|
||||
f"Hi {email}, \n\nIt looks like you're trying to reset your password at {date} \n\nEnter this 6 digits code on the Forgot Password to continue:\n\n{token}\n\nThis code will expire in 30 minutes for your security. If you didn’t request a password reset, you can safely ignore this email—your current password will remain unchanged.\n\nIf you need help, feel free to contact support.\n\nBest regards,\n\nWGDashboard"
|
||||
)
|
||||
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword/validateResetToken')
|
||||
def ClientAPI_ResetPassword_ValidateResetToken():
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
token = data.get('Token', None)
|
||||
if not all([email, token]):
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
return ResponseObject(status=dashboardClients.ValidateClientPasswordResetToken(u.get('ClientID'), token))
|
||||
|
||||
@client.post(f'{prefix}/api/resetPassword')
|
||||
def ClientAPI_ResetPassword():
|
||||
data = request.get_json()
|
||||
email = data.get('Email', None)
|
||||
token = data.get('Token', None)
|
||||
password = data.get('Password', None)
|
||||
confirmPassword = data.get('ConfirmPassword', None)
|
||||
if not all([email, token, password, confirmPassword]):
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
u = dashboardClients.SignIn_UserExistence(email)
|
||||
if not u:
|
||||
return ResponseObject(False, "Please provide a valid Email")
|
||||
|
||||
if not dashboardClients.ValidateClientPasswordResetToken(u.get('ClientID'), token):
|
||||
return ResponseObject(False, "Verification code is either invalid or expired")
|
||||
|
||||
status, msg = dashboardClients.ResetClientPassword(u.get('ClientID'), password, confirmPassword)
|
||||
|
||||
dashboardClients.RevokeClientPasswordResetToken(u.get('ClientID'), token)
|
||||
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
|
||||
@client.get(f'{prefix}/api/signout')
|
||||
def ClientAPI_SignOut():
|
||||
if session.get("SignInMethod") == "OIDC":
|
||||
dashboardClients.SignOut_OIDC()
|
||||
session.clear()
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.get(f'{prefix}/api/signin/totp')
|
||||
def ClientAPI_SignIn_TOTP():
|
||||
token = request.args.get('Token', None)
|
||||
if not token:
|
||||
return ResponseObject(False, "Please provide TOTP token")
|
||||
|
||||
status, msg = dashboardClients.SignIn_GetTotp(token)
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.post(f'{prefix}/api/signin/totp')
|
||||
def ClientAPI_SignIn_ValidateTOTP():
|
||||
data = request.get_json()
|
||||
token = data.get('Token', None)
|
||||
userProvidedTotp = data.get('UserProvidedTOTP', None)
|
||||
if not all([token, userProvidedTotp]):
|
||||
return ResponseObject(False, "Please fill in all fields")
|
||||
status, msg = dashboardClients.SignIn_GetTotp(token, userProvidedTotp)
|
||||
if status:
|
||||
if session.get('Email') is None:
|
||||
return ResponseObject(False, "Sign in status is invalid", status_code=401)
|
||||
session['TotpVerified'] = True
|
||||
profile = dashboardClients.GetClientProfile(session.get("ClientID"))
|
||||
|
||||
return ResponseObject(True, data={
|
||||
"Email": session.get('Email'),
|
||||
"Profile": profile
|
||||
})
|
||||
return ResponseObject(status, msg)
|
||||
|
||||
@client.get(prefix)
|
||||
def ClientIndex():
|
||||
return render_template('client.html')
|
||||
|
||||
@client.get(f'{prefix}/api/serverInformation')
|
||||
def ClientAPI_ServerInformation():
|
||||
return ResponseObject(data={
|
||||
"ServerTimezone": str(get_localzone())
|
||||
})
|
||||
|
||||
@client.get(f'{prefix}/api/validateAuthentication')
|
||||
@login_required
|
||||
def ClientAPI_ValidateAuthentication():
|
||||
return ResponseObject(True)
|
||||
|
||||
@client.get(f'{prefix}/api/configurations')
|
||||
@login_required
|
||||
def ClientAPI_Configurations():
|
||||
return ResponseObject(True, data=dashboardClients.GetClientAssignedPeers(session['ClientID']))
|
||||
|
||||
@client.get(f'{prefix}/api/settings/getClientProfile')
|
||||
@login_required
|
||||
def ClientAPI_Settings_GetClientProfile():
|
||||
return ResponseObject(data={
|
||||
"Email": session.get("Email"),
|
||||
"SignInMethod": session.get("SignInMethod"),
|
||||
"Profile": dashboardClients.GetClientProfile(session.get("ClientID"))
|
||||
})
|
||||
|
||||
@client.post(f'{prefix}/api/settings/updatePassword')
|
||||
@login_required
|
||||
def ClientAPI_Settings_UpdatePassword():
|
||||
data = request.get_json()
|
||||
status, message = dashboardClients.UpdateClientPassword(session['ClientID'], **data)
|
||||
|
||||
return ResponseObject(status, message)
|
||||
|
||||
return client
|
2722
src/dashboard.py
2722
src/dashboard.py
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,26 @@
|
||||
import os.path
|
||||
import dashboard, configparser
|
||||
import dashboard
|
||||
from datetime import datetime
|
||||
global sqldb, cursor, DashboardConfig, WireguardConfigurations, AllPeerJobs, JobLogger
|
||||
global sqldb, cursor, DashboardConfig, WireguardConfigurations, AllPeerJobs, JobLogger, Dash
|
||||
app_host, app_port = dashboard.gunicornConfig()
|
||||
date = datetime.today().strftime('%Y_%m_%d_%H_%M_%S')
|
||||
|
||||
def post_worker_init(worker):
|
||||
dashboard.startThreads()
|
||||
dashboard.DashboardPlugins.startThreads()
|
||||
|
||||
worker_class = 'gthread'
|
||||
workers = 1
|
||||
threads = 1
|
||||
threads = 2
|
||||
bind = f"{app_host}:{app_port}"
|
||||
daemon = True
|
||||
pidfile = './gunicorn.pid'
|
||||
wsgi_app = "dashboard:app"
|
||||
accesslog = f"./log/access_{date}.log"
|
||||
log_level = "debug"
|
||||
loglevel = "info"
|
||||
capture_output = True
|
||||
errorlog = f"./log/error_{date}.log"
|
||||
pythonpath = "., ./modules"
|
||||
|
||||
print(f"[Gunicorn] WGDashboard w/ Gunicorn will be running on {bind}", flush=True)
|
||||
print(f"[Gunicorn] Access log file is at {accesslog}", flush=True)
|
||||
print(f"[Gunicorn] Error log file is at {errorlog}", flush=True)
|
||||
print(f"[Gunicorn] Error log file is at {errorlog}", flush=True)
|
92
src/modules/AmneziaWGPeer.py
Normal file
92
src/modules/AmneziaWGPeer.py
Normal file
@@ -0,0 +1,92 @@
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import subprocess
|
||||
import uuid
|
||||
|
||||
from .Peer import Peer
|
||||
from .Utilities import ValidateIPAddressesWithRange, ValidateDNSAddress, GenerateWireguardPublicKey
|
||||
|
||||
|
||||
class AmneziaWGPeer(Peer):
|
||||
def __init__(self, tableData, configuration):
|
||||
self.advanced_security = tableData["advanced_security"]
|
||||
super().__init__(tableData, configuration)
|
||||
|
||||
|
||||
def updatePeer(self, name: str, private_key: str,
|
||||
preshared_key: str,
|
||||
dns_addresses: str, allowed_ip: str, endpoint_allowed_ip: str, mtu: int,
|
||||
keepalive: int, advanced_security: str) -> tuple[bool, str] or tuple[bool, None]:
|
||||
if not self.configuration.getStatus():
|
||||
self.configuration.toggleConfiguration()
|
||||
|
||||
existingAllowedIps = [item for row in list(
|
||||
map(lambda x: [q.strip() for q in x.split(',')],
|
||||
map(lambda y: y.allowed_ip,
|
||||
list(filter(lambda k: k.id != self.id, self.configuration.getPeersList()))))) for item in row]
|
||||
|
||||
if allowed_ip in existingAllowedIps:
|
||||
return False, "Allowed IP already taken by another peer"
|
||||
if not ValidateIPAddressesWithRange(endpoint_allowed_ip):
|
||||
return False, f"Endpoint Allowed IPs format is incorrect"
|
||||
if len(dns_addresses) > 0 and not ValidateDNSAddress(dns_addresses):
|
||||
return False, f"DNS format is incorrect"
|
||||
|
||||
if type(mtu) is str:
|
||||
mtu = 0
|
||||
|
||||
if type(keepalive) is str:
|
||||
keepalive = 0
|
||||
|
||||
if mtu < 0 or mtu > 1460:
|
||||
return False, "MTU format is not correct"
|
||||
if keepalive < 0:
|
||||
return False, "Persistent Keepalive format is not correct"
|
||||
if advanced_security != "on" and advanced_security != "off":
|
||||
return False, "Advanced Security can only be on or off"
|
||||
if len(private_key) > 0:
|
||||
pubKey = GenerateWireguardPublicKey(private_key)
|
||||
if not pubKey[0] or pubKey[1] != self.id:
|
||||
return False, "Private key does not match with the public key"
|
||||
try:
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
pskExist = len(preshared_key) > 0
|
||||
|
||||
if pskExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(preshared_key)
|
||||
newAllowedIPs = allowed_ip.replace(" ", "")
|
||||
updateAllowedIp = subprocess.check_output(
|
||||
f"{self.configuration.Protocol} set {self.configuration.Name} peer {self.id} allowed-ips {newAllowedIPs} {f'preshared-key {uid}' if pskExist else 'preshared-key /dev/null'}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
if pskExist: os.remove(uid)
|
||||
|
||||
if len(updateAllowedIp.decode().strip("\n")) != 0:
|
||||
return False, "Update peer failed when updating Allowed IPs"
|
||||
saveConfig = subprocess.check_output(f"{self.configuration.Protocol}-quick save {self.configuration.Name}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if f"wg showconf {self.configuration.Name}" not in saveConfig.decode().strip('\n'):
|
||||
return False, "Update peer failed when saving the configuration"
|
||||
|
||||
with self.configuration.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"name": name,
|
||||
"private_key": private_key,
|
||||
"DNS": dns_addresses,
|
||||
"endpoint_allowed_ip": endpoint_allowed_ip,
|
||||
"mtu": mtu,
|
||||
"keepalive": keepalive,
|
||||
"preshared_key": preshared_key,
|
||||
"advanced_security": advanced_security
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.configuration.getPeers()
|
||||
return True, None
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return False, exc.output.decode("UTF-8").strip()
|
324
src/modules/AmneziaWireguardConfiguration.py
Normal file
324
src/modules/AmneziaWireguardConfiguration.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
AmneziaWG Configuration
|
||||
"""
|
||||
import random, sqlalchemy, os, subprocess, re, uuid
|
||||
from flask import current_app
|
||||
from .PeerJobs import PeerJobs
|
||||
from .AmneziaWGPeer import AmneziaWGPeer
|
||||
from .PeerShareLinks import PeerShareLinks
|
||||
from .Utilities import RegexMatch
|
||||
from .WireguardConfiguration import WireguardConfiguration
|
||||
from .DashboardWebHooks import DashboardWebHooks
|
||||
|
||||
|
||||
class AmneziaWireguardConfiguration(WireguardConfiguration):
|
||||
def __init__(self, DashboardConfig,
|
||||
AllPeerJobs: PeerJobs,
|
||||
AllPeerShareLinks: PeerShareLinks,
|
||||
DashboardWebHooks: DashboardWebHooks,
|
||||
name: str = None, data: dict = None, backup: dict = None, startup: bool = False):
|
||||
self.Jc = 0
|
||||
self.Jmin = 0
|
||||
self.Jmax = 0
|
||||
self.S1 = 0
|
||||
self.S2 = 0
|
||||
self.H1 = 1
|
||||
self.H2 = 2
|
||||
self.H3 = 3
|
||||
self.H4 = 4
|
||||
|
||||
super().__init__(DashboardConfig, AllPeerJobs, AllPeerShareLinks, DashboardWebHooks, name, data, backup, startup, wg=False)
|
||||
|
||||
def toJson(self):
|
||||
self.Status = self.getStatus()
|
||||
return {
|
||||
"Status": self.Status,
|
||||
"Name": self.Name,
|
||||
"PrivateKey": self.PrivateKey,
|
||||
"PublicKey": self.PublicKey,
|
||||
"Address": self.Address,
|
||||
"ListenPort": self.ListenPort,
|
||||
"PreUp": self.PreUp,
|
||||
"PreDown": self.PreDown,
|
||||
"PostUp": self.PostUp,
|
||||
"PostDown": self.PostDown,
|
||||
"SaveConfig": self.SaveConfig,
|
||||
"Info": self.configurationInfo.model_dump(),
|
||||
"DataUsage": {
|
||||
"Total": sum(list(map(lambda x: x.cumu_data + x.total_data, self.Peers))),
|
||||
"Sent": sum(list(map(lambda x: x.cumu_sent + x.total_sent, self.Peers))),
|
||||
"Receive": sum(list(map(lambda x: x.cumu_receive + x.total_receive, self.Peers)))
|
||||
},
|
||||
"ConnectedPeers": len(list(filter(lambda x: x.status == "running", self.Peers))),
|
||||
"TotalPeers": len(self.Peers),
|
||||
"Protocol": self.Protocol,
|
||||
"Table": self.Table,
|
||||
"Jc": self.Jc,
|
||||
"Jmin": self.Jmin,
|
||||
"Jmax": self.Jmax,
|
||||
"S1": self.S1,
|
||||
"S2": self.S2,
|
||||
"H1": self.H1,
|
||||
"H2": self.H2,
|
||||
"H3": self.H3,
|
||||
"H4": self.H4
|
||||
}
|
||||
|
||||
def createDatabase(self, dbName = None):
|
||||
if dbName is None:
|
||||
dbName = self.Name
|
||||
|
||||
|
||||
self.peersTable = sqlalchemy.Table(
|
||||
dbName, self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False, primary_key=True),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersRestrictedTable = sqlalchemy.Table(
|
||||
f'{dbName}_restrict_access', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False, primary_key=True),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersTransferTable = sqlalchemy.Table(
|
||||
f'{dbName}_transfer', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('time', (sqlalchemy.DATETIME if self.DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else sqlalchemy.TIMESTAMP),
|
||||
server_default=sqlalchemy.func.now()),
|
||||
extend_existing=True
|
||||
)
|
||||
self.peersDeletedTable = sqlalchemy.Table(
|
||||
f'{dbName}_deleted', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('private_key', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('DNS', sqlalchemy.Text),
|
||||
sqlalchemy.Column('advanced_security', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('endpoint_allowed_ip', sqlalchemy.Text),
|
||||
sqlalchemy.Column('name', sqlalchemy.Text),
|
||||
sqlalchemy.Column('total_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('total_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('status', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('latest_handshake', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('allowed_ip', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('cumu_receive', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_sent', sqlalchemy.Float),
|
||||
sqlalchemy.Column('cumu_data', sqlalchemy.Float),
|
||||
sqlalchemy.Column('mtu', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('keepalive', sqlalchemy.Integer),
|
||||
sqlalchemy.Column('remote_endpoint', sqlalchemy.String(255)),
|
||||
sqlalchemy.Column('preshared_key', sqlalchemy.String(255)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.infoTable = sqlalchemy.Table(
|
||||
'ConfigurationsInfo', self.metadata,
|
||||
sqlalchemy.Column('ID', sqlalchemy.String(255), primary_key=True),
|
||||
sqlalchemy.Column('Info', sqlalchemy.Text),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.peersHistoryEndpointTable = sqlalchemy.Table(
|
||||
f'{dbName}_history_endpoint', self.metadata,
|
||||
sqlalchemy.Column('id', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('endpoint', sqlalchemy.String(255), nullable=False),
|
||||
sqlalchemy.Column('time',
|
||||
(sqlalchemy.DATETIME if self.DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else sqlalchemy.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
|
||||
def getPeers(self):
|
||||
self.Peers.clear()
|
||||
current_app.logger.info(f"Refreshing {self.Name} peer list")
|
||||
|
||||
if self.configurationFileChanged():
|
||||
with open(self.configPath, 'r') as configFile:
|
||||
p = []
|
||||
pCounter = -1
|
||||
content = configFile.read().split('\n')
|
||||
try:
|
||||
if "[Peer]" not in content:
|
||||
current_app.logger.info(f"{self.Name} config has no [Peer] section")
|
||||
return
|
||||
|
||||
peerStarts = content.index("[Peer]")
|
||||
content = content[peerStarts:]
|
||||
for i in content:
|
||||
if not RegexMatch("#(.*)", i) and not RegexMatch(";(.*)", i):
|
||||
if i == "[Peer]":
|
||||
pCounter += 1
|
||||
p.append({})
|
||||
p[pCounter]["name"] = ""
|
||||
else:
|
||||
if len(i) > 0:
|
||||
split = re.split(r'\s*=\s*', i, 1)
|
||||
if len(split) == 2:
|
||||
p[pCounter][split[0]] = split[1]
|
||||
|
||||
if RegexMatch("#Name# = (.*)", i):
|
||||
split = re.split(r'\s*=\s*', i, 1)
|
||||
if len(split) == 2:
|
||||
p[pCounter]["name"] = split[1]
|
||||
with self.engine.begin() as conn:
|
||||
for i in p:
|
||||
if "PublicKey" in i.keys():
|
||||
tempPeer = conn.execute(self.peersTable.select().where(
|
||||
self.peersTable.columns.id == i['PublicKey']
|
||||
)).mappings().fetchone()
|
||||
if tempPeer is None:
|
||||
tempPeer = {
|
||||
"id": i['PublicKey'],
|
||||
"advanced_security": i.get('AdvancedSecurity', 'off'),
|
||||
"private_key": "",
|
||||
"DNS": self.DashboardConfig.GetConfig("Peers", "peer_global_DNS")[1],
|
||||
"endpoint_allowed_ip": self.DashboardConfig.GetConfig("Peers", "peer_endpoint_allowed_ip")[
|
||||
1],
|
||||
"name": i.get("name"),
|
||||
"total_receive": 0,
|
||||
"total_sent": 0,
|
||||
"total_data": 0,
|
||||
"endpoint": "N/A",
|
||||
"status": "stopped",
|
||||
"latest_handshake": "N/A",
|
||||
"allowed_ip": i.get("AllowedIPs", "N/A"),
|
||||
"cumu_receive": 0,
|
||||
"cumu_sent": 0,
|
||||
"cumu_data": 0,
|
||||
"mtu": self.DashboardConfig.GetConfig("Peers", "peer_mtu")[1],
|
||||
"keepalive": self.DashboardConfig.GetConfig("Peers", "peer_keep_alive")[1],
|
||||
"remote_endpoint": self.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1],
|
||||
"preshared_key": i["PresharedKey"] if "PresharedKey" in i.keys() else ""
|
||||
}
|
||||
conn.execute(
|
||||
self.peersTable.insert().values(tempPeer)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.peersTable.update().values({
|
||||
"allowed_ip": i.get("AllowedIPs", "N/A")
|
||||
}).where(
|
||||
self.peersTable.columns.id == i['PublicKey']
|
||||
)
|
||||
)
|
||||
self.Peers.append(AmneziaWGPeer(tempPeer, self))
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"{self.Name} getPeers() Error", e)
|
||||
else:
|
||||
with self.engine.connect() as conn:
|
||||
existingPeers = conn.execute(self.peersTable.select()).mappings().fetchall()
|
||||
for i in existingPeers:
|
||||
self.Peers.append(AmneziaWGPeer(i, self))
|
||||
|
||||
def addPeers(self, peers: list) -> tuple[bool, list, str]:
|
||||
result = {
|
||||
"message": None,
|
||||
"peers": []
|
||||
}
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
for i in peers:
|
||||
newPeer = {
|
||||
"id": i['id'],
|
||||
"private_key": i['private_key'],
|
||||
"DNS": i['DNS'],
|
||||
"endpoint_allowed_ip": i['endpoint_allowed_ip'],
|
||||
"name": i['name'],
|
||||
"total_receive": 0,
|
||||
"total_sent": 0,
|
||||
"total_data": 0,
|
||||
"endpoint": "N/A",
|
||||
"status": "stopped",
|
||||
"latest_handshake": "N/A",
|
||||
"allowed_ip": i.get("allowed_ip", "N/A"),
|
||||
"cumu_receive": 0,
|
||||
"cumu_sent": 0,
|
||||
"cumu_data": 0,
|
||||
"mtu": i['mtu'],
|
||||
"keepalive": i['keepalive'],
|
||||
"remote_endpoint": self.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1],
|
||||
"preshared_key": i["preshared_key"],
|
||||
"advanced_security": i['advanced_security']
|
||||
}
|
||||
conn.execute(
|
||||
self.peersTable.insert().values(newPeer)
|
||||
)
|
||||
for p in peers:
|
||||
presharedKeyExist = len(p['preshared_key']) > 0
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
if presharedKeyExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(p['preshared_key'])
|
||||
|
||||
subprocess.check_output(
|
||||
f"{self.Protocol} set {self.Name} peer {p['id']} allowed-ips {p['allowed_ip'].replace(' ', '')}{f' preshared-key {uid}' if presharedKeyExist else ''}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if presharedKeyExist:
|
||||
os.remove(uid)
|
||||
subprocess.check_output(
|
||||
f"{self.Protocol}-quick save {self.Name}", shell=True, stderr=subprocess.STDOUT)
|
||||
self.getPeers()
|
||||
for p in peers:
|
||||
p = self.searchPeer(p['id'])
|
||||
if p[0]:
|
||||
result['peers'].append(p[1])
|
||||
self.DashboardWebHooks.RunWebHook("peer_created", {
|
||||
"configuration": self.Name,
|
||||
"peers": list(map(lambda k : k['id'], peers))
|
||||
})
|
||||
except Exception as e:
|
||||
current_app.logger.error("Add peers error", e)
|
||||
return False, [], str(e)
|
||||
return True, result['peers'], ""
|
||||
|
||||
def getRestrictedPeers(self):
|
||||
self.RestrictedPeers = []
|
||||
with self.engine.connect() as conn:
|
||||
restricted = conn.execute(self.peersRestrictedTable.select()).mappings().fetchall()
|
||||
for i in restricted:
|
||||
self.RestrictedPeers.append(AmneziaWGPeer(i, self))
|
25
src/modules/ConnectionString.py
Normal file
25
src/modules/ConnectionString.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import configparser
|
||||
import os
|
||||
from sqlalchemy_utils import database_exists, create_database
|
||||
from flask import current_app
|
||||
|
||||
def ConnectionString(database) -> str:
|
||||
parser = configparser.ConfigParser(strict=False)
|
||||
parser.read_file(open('wg-dashboard.ini', "r+"))
|
||||
sqlitePath = os.path.join("db")
|
||||
if not os.path.isdir(sqlitePath):
|
||||
os.mkdir(sqlitePath)
|
||||
if parser.get("Database", "type") == "postgresql":
|
||||
cn = f'postgresql+psycopg://{parser.get("Database", "username")}:{parser.get("Database", "password")}@{parser.get("Database", "host")}/{database}'
|
||||
elif parser.get("Database", "type") == "mysql":
|
||||
cn = f'mysql+pymysql://{parser.get("Database", "username")}:{parser.get("Database", "password")}@{parser.get("Database", "host")}/{database}'
|
||||
else:
|
||||
cn = f'sqlite:///{os.path.join(sqlitePath, f"{database}.db")}'
|
||||
try:
|
||||
if not database_exists(cn):
|
||||
create_database(cn)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Database error. Terminating...", e)
|
||||
exit(1)
|
||||
|
||||
return cn
|
11
src/modules/DashboardAPIKey.py
Normal file
11
src/modules/DashboardAPIKey.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Dashboard API Key
|
||||
"""
|
||||
class DashboardAPIKey:
|
||||
def __init__(self, Key: str, CreatedAt: str, ExpiredAt: str):
|
||||
self.Key = Key
|
||||
self.CreatedAt = CreatedAt
|
||||
self.ExpiredAt = ExpiredAt
|
||||
|
||||
def toJson(self):
|
||||
return self.__dict__
|
498
src/modules/DashboardClients.py
Normal file
498
src/modules/DashboardClients.py
Normal file
@@ -0,0 +1,498 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import random
|
||||
import uuid
|
||||
|
||||
import bcrypt
|
||||
import pyotp
|
||||
import sqlalchemy as db
|
||||
import requests
|
||||
|
||||
from .ConnectionString import ConnectionString
|
||||
from .DashboardClientsPeerAssignment import DashboardClientsPeerAssignment
|
||||
from .DashboardClientsTOTP import DashboardClientsTOTP
|
||||
from .DashboardOIDC import DashboardOIDC
|
||||
from .Utilities import ValidatePasswordStrength
|
||||
from .DashboardLogger import DashboardLogger
|
||||
from flask import session
|
||||
|
||||
|
||||
class DashboardClients:
|
||||
def __init__(self, wireguardConfigurations):
|
||||
self.logger = DashboardLogger()
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.OIDC = DashboardOIDC("Client")
|
||||
|
||||
self.dashboardClientsTable = db.Table(
|
||||
'DashboardClients', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Email', db.String(255), nullable=False, index=True),
|
||||
db.Column('Password', db.String(500)),
|
||||
db.Column('TotpKey', db.String(500)),
|
||||
db.Column('TotpKeyVerified', db.Integer),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('DeletedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardOIDCClientsTable = db.Table(
|
||||
'DashboardOIDCClients', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Email', db.String(255), nullable=False, index=True),
|
||||
db.Column('ProviderIssuer', db.String(500), nullable=False, index=True),
|
||||
db.Column('ProviderSubject', db.String(500), nullable=False, index=True),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('DeletedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardClientsInfoTable = db.Table(
|
||||
'DashboardClientsInfo', self.metadata,
|
||||
db.Column('ClientID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Name', db.String(500)),
|
||||
extend_existing=True,
|
||||
)
|
||||
|
||||
self.dashboardClientsPasswordResetLinkTable = db.Table(
|
||||
'DashboardClientsPasswordResetLinks', self.metadata,
|
||||
db.Column('ResetToken', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('ClientID', db.String(255), nullable=False),
|
||||
db.Column('CreatedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('ExpiryDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
self.Clients = {}
|
||||
self.ClientsRaw = []
|
||||
self.__getClients()
|
||||
self.DashboardClientsTOTP = DashboardClientsTOTP()
|
||||
self.DashboardClientsPeerAssignment = DashboardClientsPeerAssignment(wireguardConfigurations)
|
||||
|
||||
def __getClients(self):
|
||||
with self.engine.connect() as conn:
|
||||
localClients = db.select(
|
||||
self.dashboardClientsTable.c.ClientID,
|
||||
self.dashboardClientsTable.c.Email,
|
||||
db.literal_column("'Local'").label("ClientGroup")
|
||||
).where(
|
||||
self.dashboardClientsTable.c.DeletedDate.is_(None)
|
||||
)
|
||||
|
||||
oidcClients = db.select(
|
||||
self.dashboardOIDCClientsTable.c.ClientID,
|
||||
self.dashboardOIDCClientsTable.c.Email,
|
||||
self.dashboardOIDCClientsTable.c.ProviderIssuer.label("ClientGroup"),
|
||||
).where(
|
||||
self.dashboardOIDCClientsTable.c.DeletedDate.is_(None)
|
||||
)
|
||||
|
||||
union = db.union(localClients, oidcClients).alias("U")
|
||||
|
||||
self.ClientsRaw = conn.execute(
|
||||
db.select(
|
||||
union,
|
||||
self.dashboardClientsInfoTable.c.Name
|
||||
).outerjoin(self.dashboardClientsInfoTable,
|
||||
union.c.ClientID == self.dashboardClientsInfoTable.c.ClientID)
|
||||
).mappings().fetchall()
|
||||
|
||||
groups = set(map(lambda c: c.get('ClientGroup'), self.ClientsRaw))
|
||||
gr = {}
|
||||
for g in groups:
|
||||
gr[(g if g == 'Local' else self.OIDC.GetProviderNameByIssuer(g))] = [
|
||||
dict(x) for x in list(
|
||||
filter(lambda c: c.get('ClientGroup') == g, self.ClientsRaw)
|
||||
)
|
||||
]
|
||||
self.Clients = gr
|
||||
|
||||
def GetAllClients(self):
|
||||
self.__getClients()
|
||||
return self.Clients
|
||||
|
||||
def GetAllClientsRaw(self):
|
||||
self.__getClients()
|
||||
return self.ClientsRaw
|
||||
|
||||
def GetClient(self, ClientID) -> dict[str, str] | None:
|
||||
c = filter(lambda x: x['ClientID'] == ClientID, self.ClientsRaw)
|
||||
client = next((dict(client) for client in c), None)
|
||||
if client is not None:
|
||||
client['ClientGroup'] = self.OIDC.GetProviderNameByIssuer(client['ClientGroup'])
|
||||
return client
|
||||
|
||||
def GetClientProfile(self, ClientID):
|
||||
with self.engine.connect() as conn:
|
||||
return dict(conn.execute(
|
||||
db.select(
|
||||
*[c for c in self.dashboardClientsInfoTable.c if c.name != 'ClientID']
|
||||
).where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
).mappings().fetchone())
|
||||
|
||||
def SignIn_ValidatePassword(self, Email, Password) -> bool:
|
||||
if not all([Email, Password]):
|
||||
return False
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
return bcrypt.checkpw(Password.encode("utf-8"), existingClient.get("Password").encode("utf-8"))
|
||||
return False
|
||||
|
||||
def SignIn_UserExistence(self, Email):
|
||||
with self.engine.connect() as conn:
|
||||
existingClient = conn.execute(
|
||||
self.dashboardClientsTable.select().where(
|
||||
self.dashboardClientsTable.c.Email == Email
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return existingClient
|
||||
|
||||
def SignIn_OIDC_UserExistence(self, data: dict[str, str]):
|
||||
with self.engine.connect() as conn:
|
||||
existingClient = conn.execute(
|
||||
self.dashboardOIDCClientsTable.select().where(
|
||||
db.and_(
|
||||
self.dashboardOIDCClientsTable.c.ProviderIssuer == data.get('iss'),
|
||||
self.dashboardOIDCClientsTable.c.ProviderSubject == data.get('sub'),
|
||||
)
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return existingClient
|
||||
|
||||
def SignUp_OIDC(self, data: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
if not self.SignIn_OIDC_UserExistence(data):
|
||||
with self.engine.begin() as conn:
|
||||
newClientUUID = str(uuid.uuid4())
|
||||
conn.execute(
|
||||
self.dashboardOIDCClientsTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Email": data.get('email', ''),
|
||||
"ProviderIssuer": data.get('iss', ''),
|
||||
"ProviderSubject": data.get('sub', '')
|
||||
})
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Name": data.get("name")
|
||||
})
|
||||
)
|
||||
self.logger.log(Message=f"User {data.get('email', '')} from {data.get('iss', '')} signed up")
|
||||
self.__getClients()
|
||||
return True, newClientUUID
|
||||
return False, "User already signed up"
|
||||
|
||||
def SignOut_OIDC(self):
|
||||
sessionPayload = session.get('OIDCPayload')
|
||||
status, oidc_config = self.OIDC.GetProviderConfiguration(session.get('SignInPayload').get("Provider"))
|
||||
signOut = requests.get(
|
||||
oidc_config.get("end_session_endpoint"),
|
||||
params={
|
||||
'id_token_hint': session.get('SignInPayload').get("Payload").get('sid')
|
||||
}
|
||||
)
|
||||
return True
|
||||
|
||||
def SignIn_OIDC(self, **kwargs):
|
||||
status, data = self.OIDC.VerifyToken(**kwargs)
|
||||
if not status:
|
||||
return False, "Sign in failed. Reason: " + data
|
||||
existingClient = self.SignIn_OIDC_UserExistence(data)
|
||||
if not existingClient:
|
||||
status, newClientUUID = self.SignUp_OIDC(data)
|
||||
session['ClientID'] = newClientUUID
|
||||
else:
|
||||
session['ClientID'] = existingClient.get("ClientID")
|
||||
session['SignInMethod'] = 'OIDC'
|
||||
session['SignInPayload'] = {
|
||||
"Provider": kwargs.get('provider'),
|
||||
"Payload": data
|
||||
}
|
||||
return True, data
|
||||
|
||||
def SignIn(self, Email, Password) -> tuple[bool, str]:
|
||||
if not all([Email, Password]):
|
||||
return False, "Please fill in all fields"
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
checkPwd = self.SignIn_ValidatePassword(Email, Password)
|
||||
if checkPwd:
|
||||
session['SignInMethod'] = 'local'
|
||||
session['Email'] = Email
|
||||
session['ClientID'] = existingClient.get("ClientID")
|
||||
return True, self.DashboardClientsTOTP.GenerateToken(existingClient.get("ClientID"))
|
||||
return False, "Email or Password is incorrect"
|
||||
|
||||
def SignIn_GetTotp(self, Token: str, UserProvidedTotp: str = None) -> tuple[bool, str] or tuple[bool, None, str]:
|
||||
status, data = self.DashboardClientsTOTP.GetTotp(Token)
|
||||
|
||||
if not status:
|
||||
return False, "TOTP Token is invalid"
|
||||
if UserProvidedTotp is None:
|
||||
if data.get('TotpKeyVerified') is None:
|
||||
return True, pyotp.totp.TOTP(data.get('TotpKey')).provisioning_uri(name=data.get('Email'),
|
||||
issuer_name="WGDashboard Client")
|
||||
else:
|
||||
totpMatched = pyotp.totp.TOTP(data.get('TotpKey')).verify(UserProvidedTotp)
|
||||
if not totpMatched:
|
||||
return False, "TOTP is does not match"
|
||||
else:
|
||||
self.DashboardClientsTOTP.RevokeToken(Token)
|
||||
if data.get('TotpKeyVerified') is None:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
'TotpKeyVerified': 1
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == data.get('ClientID')
|
||||
)
|
||||
)
|
||||
|
||||
return True, None
|
||||
|
||||
def SignUp(self, Email, Password, ConfirmPassword) -> tuple[bool, str] or tuple[bool, None]:
|
||||
try:
|
||||
if not all([Email, Password, ConfirmPassword]):
|
||||
return False, "Please fill in all fields"
|
||||
if Password != ConfirmPassword:
|
||||
return False, "Passwords does not match"
|
||||
|
||||
existingClient = self.SignIn_UserExistence(Email)
|
||||
if existingClient:
|
||||
return False, "Email already signed up"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(Password)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
newClientUUID = str(uuid.uuid4())
|
||||
totpKey = pyotp.random_base32()
|
||||
encodePassword = Password.encode('utf-8')
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.insert().values({
|
||||
"ClientID": newClientUUID,
|
||||
"Email": Email,
|
||||
"Password": bcrypt.hashpw(encodePassword, bcrypt.gensalt()).decode("utf-8"),
|
||||
"TotpKey": totpKey
|
||||
})
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.insert().values({
|
||||
"ClientID": newClientUUID
|
||||
})
|
||||
)
|
||||
self.logger.log(Message=f"User {Email} signed up")
|
||||
self.__getClients()
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"Signed up failed, reason: {str(e)}")
|
||||
return False, "Signe up failed."
|
||||
|
||||
return True, None
|
||||
|
||||
def GetClientAssignedPeers(self, ClientID):
|
||||
return self.DashboardClientsPeerAssignment.GetAssignedPeers(ClientID)
|
||||
|
||||
def ResetClientPassword(self, ClientID, NewPassword, ConfirmNewPassword) -> tuple[bool, str] | tuple[bool, None]:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False, "Client does not exist"
|
||||
|
||||
if NewPassword != ConfirmNewPassword:
|
||||
return False, "New passwords does not match"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(NewPassword)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
"TotpKeyVerified": None,
|
||||
"TotpKey": pyotp.random_base32(),
|
||||
"Password": bcrypt.hashpw(NewPassword.encode('utf-8'), bcrypt.gensalt()).decode("utf-8"),
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} reset password and TOTP")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} reset password failed, reason: {str(e)}")
|
||||
return False, "Reset password failed."
|
||||
|
||||
|
||||
return True, None
|
||||
|
||||
def UpdateClientPassword(self, ClientID, CurrentPassword, NewPassword, ConfirmNewPassword) -> tuple[bool, str] | tuple[bool, None]:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False, "Client does not exist"
|
||||
|
||||
if not all([CurrentPassword, NewPassword, ConfirmNewPassword]):
|
||||
return False, "Please fill in all fields"
|
||||
|
||||
if not self.SignIn_ValidatePassword(c.get('Email'), CurrentPassword):
|
||||
return False, "Current password does not match"
|
||||
|
||||
if NewPassword != ConfirmNewPassword:
|
||||
return False, "New passwords does not match"
|
||||
|
||||
pwStrength, msg = ValidatePasswordStrength(NewPassword)
|
||||
if not pwStrength:
|
||||
return pwStrength, msg
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.update().values({
|
||||
"Password": bcrypt.hashpw(NewPassword.encode('utf-8'), bcrypt.gensalt()).decode("utf-8"),
|
||||
}).where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} updated password")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} update password failed, reason: {str(e)}")
|
||||
return False, "Update password failed."
|
||||
return True, None
|
||||
|
||||
def UpdateClientProfile(self, ClientID, Name):
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.update().values({
|
||||
"Name": Name
|
||||
}).where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.logger.log(Message=f"User {ClientID} updated name to {Name}")
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"User {ClientID} updated name to {Name} failed")
|
||||
return False
|
||||
return True
|
||||
|
||||
def DeleteClient(self, ClientID):
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
client = self.GetClient(ClientID)
|
||||
if client.get("ClientGroup") == "Local":
|
||||
conn.execute(
|
||||
self.dashboardClientsTable.delete().where(
|
||||
self.dashboardClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.dashboardOIDCClientsTable.delete().where(
|
||||
self.dashboardOIDCClientsTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsInfoTable.delete().where(
|
||||
self.dashboardClientsInfoTable.c.ClientID == ClientID
|
||||
)
|
||||
)
|
||||
self.DashboardClientsPeerAssignment.UnassignPeers(ClientID)
|
||||
self.__getClients()
|
||||
except Exception as e:
|
||||
self.logger.log(Status="false", Message=f"Failed to delete {ClientID}")
|
||||
return False
|
||||
return True
|
||||
|
||||
'''
|
||||
For WGDashboard Admin to Manage Clients
|
||||
'''
|
||||
|
||||
def GenerateClientPasswordResetToken(self, ClientID) -> bool | str:
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False
|
||||
|
||||
newToken = str(random.randint(0, 999999)).zfill(6)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.update().values({
|
||||
"ExpiryDate": datetime.datetime.now()
|
||||
|
||||
}).where(
|
||||
db.and_(
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ExpiryDate > db.func.now()
|
||||
)
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.insert().values({
|
||||
"ResetToken": newToken,
|
||||
"ClientID": ClientID,
|
||||
"CreatedDate": datetime.datetime.now(),
|
||||
"ExpiryDate": datetime.datetime.now() + datetime.timedelta(minutes=30)
|
||||
})
|
||||
)
|
||||
|
||||
return newToken
|
||||
|
||||
def ValidateClientPasswordResetToken(self, ClientID, Token):
|
||||
c = self.GetClient(ClientID)
|
||||
if c is None:
|
||||
return False
|
||||
with self.engine.connect() as conn:
|
||||
t = conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.select().where(
|
||||
db.and_(self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ResetToken == Token,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ExpiryDate > datetime.datetime.now())
|
||||
|
||||
)
|
||||
).mappings().fetchone()
|
||||
return t is not None
|
||||
|
||||
def RevokeClientPasswordResetToken(self, ClientID, Token):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPasswordResetLinkTable.update().values({
|
||||
"ExpiryDate": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(self.dashboardClientsPasswordResetLinkTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPasswordResetLinkTable.c.ResetToken == Token)
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
def GetAssignedPeerClients(self, ConfigurationName, PeerID):
|
||||
c = self.DashboardClientsPeerAssignment.GetAssignedClients(ConfigurationName, PeerID)
|
||||
for a in c:
|
||||
client = self.GetClient(a.ClientID)
|
||||
if client is not None:
|
||||
a.Client = self.GetClient(a.ClientID)
|
||||
return c
|
||||
|
||||
def GetClientAssignedPeersGrouped(self, ClientID):
|
||||
client = self.GetClient(ClientID)
|
||||
if client is not None:
|
||||
p = self.DashboardClientsPeerAssignment.GetAssignedPeers(ClientID)
|
||||
configs = set(map(lambda x : x['configuration_name'], p))
|
||||
d = {}
|
||||
for i in configs:
|
||||
d[i] = list(filter(lambda x : x['configuration_name'] == i, p))
|
||||
return d
|
||||
return None
|
||||
|
||||
def AssignClient(self, ConfigurationName, PeerID, ClientID) -> tuple[bool, dict[str, str]] | tuple[bool, None]:
|
||||
return self.DashboardClientsPeerAssignment.AssignClient(ClientID, ConfigurationName, PeerID)
|
||||
|
||||
def UnassignClient(self, AssignmentID):
|
||||
return self.DashboardClientsPeerAssignment.UnassignClients(AssignmentID)
|
||||
|
159
src/modules/DashboardClientsPeerAssignment.py
Normal file
159
src/modules/DashboardClientsPeerAssignment.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from .ConnectionString import ConnectionString
|
||||
from .DashboardLogger import DashboardLogger
|
||||
import sqlalchemy as db
|
||||
from .WireguardConfiguration import WireguardConfiguration
|
||||
|
||||
class Assignment:
|
||||
def __init__(self, **kwargs):
|
||||
self.AssignmentID: str = kwargs.get('AssignmentID')
|
||||
self.ClientID: str = kwargs.get('ClientID')
|
||||
self.ConfigurationName: str = kwargs.get('ConfigurationName')
|
||||
self.PeerID: str = kwargs.get('PeerID')
|
||||
self.AssignedDate: datetime.datetime = kwargs.get('AssignedDate')
|
||||
self.UnassignedDate: datetime.datetime = kwargs.get('UnassignedDate')
|
||||
self.Client: dict = {
|
||||
"ClientID": self.ClientID
|
||||
}
|
||||
|
||||
def toJson(self):
|
||||
return {
|
||||
"AssignmentID": self.AssignmentID,
|
||||
"Client": self.Client,
|
||||
"ConfigurationName": self.ConfigurationName,
|
||||
"PeerID": self.PeerID,
|
||||
"AssignedDate": self.AssignedDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"UnassignedDate": self.UnassignedDate.strftime("%Y-%m-%d %H:%M:%S") if self.UnassignedDate is not None else self.UnassignedDate
|
||||
}
|
||||
|
||||
class DashboardClientsPeerAssignment:
|
||||
def __init__(self, wireguardConfigurations: dict[str, WireguardConfiguration]):
|
||||
self.logger = DashboardLogger()
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.wireguardConfigurations = wireguardConfigurations
|
||||
self.dashboardClientsPeerAssignmentTable = db.Table(
|
||||
'DashboardClientsPeerAssignment', self.metadata,
|
||||
db.Column('AssignmentID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('ClientID', db.String(255), nullable=False, index=True),
|
||||
db.Column('ConfigurationName', db.String(255)),
|
||||
db.Column('PeerID', db.String(500)),
|
||||
db.Column('AssignedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('UnassignedDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)),
|
||||
extend_existing=True
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.assignments: list[Assignment] = []
|
||||
self.__getAssignments()
|
||||
|
||||
def __getAssignments(self):
|
||||
with self.engine.connect() as conn:
|
||||
assignments = []
|
||||
get = conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.select().where(
|
||||
self.dashboardClientsPeerAssignmentTable.c.UnassignedDate.is_(None)
|
||||
)
|
||||
).mappings().fetchall()
|
||||
for a in get:
|
||||
assignments.append(Assignment(**a))
|
||||
self.assignments = assignments
|
||||
|
||||
|
||||
def AssignClient(self, ClientID, ConfigurationName, PeerID):
|
||||
existing = list(
|
||||
filter(lambda e:
|
||||
e.ClientID == ClientID and
|
||||
e.ConfigurationName == ConfigurationName and
|
||||
e.PeerID == PeerID, self.assignments)
|
||||
)
|
||||
if len(existing) == 0:
|
||||
if ConfigurationName in self.wireguardConfigurations.keys():
|
||||
config = self.wireguardConfigurations.get(ConfigurationName)
|
||||
peer = list(filter(lambda x : x.id == PeerID, config.Peers))
|
||||
if len(peer) == 1:
|
||||
with self.engine.begin() as conn:
|
||||
data = {
|
||||
"AssignmentID": str(uuid.uuid4()),
|
||||
"ClientID": ClientID,
|
||||
"ConfigurationName": ConfigurationName,
|
||||
"PeerID": PeerID
|
||||
}
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.insert().values(data)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True, data
|
||||
return False, None
|
||||
|
||||
def UnassignClients(self, AssignmentID):
|
||||
existing = list(
|
||||
filter(lambda e:
|
||||
e.AssignmentID == AssignmentID, self.assignments)
|
||||
)
|
||||
if not existing:
|
||||
return False
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.update().values({
|
||||
"UnassignedDate": datetime.datetime.now()
|
||||
}).where(
|
||||
self.dashboardClientsPeerAssignmentTable.c.AssignmentID == AssignmentID
|
||||
)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True
|
||||
|
||||
def UnassignPeers(self, ClientID):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsPeerAssignmentTable.update().values({
|
||||
"UnassignedDate": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(
|
||||
self.dashboardClientsPeerAssignmentTable.c.ClientID == ClientID,
|
||||
self.dashboardClientsPeerAssignmentTable.c.UnassignedDate.is_(db.null())
|
||||
)
|
||||
)
|
||||
)
|
||||
self.__getAssignments()
|
||||
return True
|
||||
|
||||
def GetAssignedClients(self, ConfigurationName, PeerID) -> list[Assignment]:
|
||||
self.__getAssignments()
|
||||
return list(filter(
|
||||
lambda c : c.ConfigurationName == ConfigurationName and
|
||||
c.PeerID == PeerID, self.assignments))
|
||||
|
||||
def GetAssignedPeers(self, ClientID):
|
||||
self.__getAssignments()
|
||||
|
||||
peers = []
|
||||
assigned = filter(lambda e:
|
||||
e.ClientID == ClientID, self.assignments)
|
||||
|
||||
for a in assigned:
|
||||
peer = filter(lambda e : e.id == a.PeerID,
|
||||
self.wireguardConfigurations[a.ConfigurationName].Peers)
|
||||
for p in peer:
|
||||
peers.append({
|
||||
'assignment_id': a.AssignmentID,
|
||||
'protocol': self.wireguardConfigurations[a.ConfigurationName].Protocol,
|
||||
'id': p.id,
|
||||
'private_key': p.private_key,
|
||||
'name': p.name,
|
||||
'received_data': p.total_receive + p.cumu_receive,
|
||||
'sent_data': p.total_sent + p.cumu_sent,
|
||||
'data': p.total_data + p.cumu_data,
|
||||
'status': p.status,
|
||||
'latest_handshake': p.latest_handshake,
|
||||
'allowed_ip': p.allowed_ip,
|
||||
'jobs': p.jobs,
|
||||
'configuration_name': a.ConfigurationName,
|
||||
'peer_configuration_data': p.downloadPeer()
|
||||
})
|
||||
return peers
|
82
src/modules/DashboardClientsTOTP.py
Normal file
82
src/modules/DashboardClientsTOTP.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class DashboardClientsTOTP:
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.dashboardClientsTOTPTable = db.Table(
|
||||
'DashboardClientsTOTPTokens', self.metadata,
|
||||
db.Column("Token", db.String(500), primary_key=True, index=True),
|
||||
db.Column("ClientID", db.String(500), index=True),
|
||||
db.Column(
|
||||
"ExpireTime", (db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP)
|
||||
)
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.metadata.reflect(self.engine)
|
||||
self.dashboardClientsTable = self.metadata.tables['DashboardClients']
|
||||
|
||||
def GenerateToken(self, ClientID) -> str:
|
||||
token = hashlib.sha512(f"{ClientID}_{datetime.datetime.now()}_{uuid.uuid4()}".encode()).hexdigest()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.update().values({
|
||||
"ExpireTime": datetime.datetime.now()
|
||||
}).where(
|
||||
db.and_(self.dashboardClientsTOTPTable.c.ClientID == ClientID, self.dashboardClientsTOTPTable.c.ExpireTime > datetime.datetime.now())
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.insert().values({
|
||||
"Token": token,
|
||||
"ClientID": ClientID,
|
||||
"ExpireTime": datetime.datetime.now() + datetime.timedelta(minutes=10)
|
||||
})
|
||||
)
|
||||
return token
|
||||
|
||||
def RevokeToken(self, Token) -> bool:
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardClientsTOTPTable.update().values({
|
||||
"ExpireTime": datetime.datetime.now()
|
||||
}).where(
|
||||
self.dashboardClientsTOTPTable.c.Token == Token
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
return False
|
||||
return True
|
||||
|
||||
def GetTotp(self, token: str) -> tuple[bool, dict] or tuple[bool, None]:
|
||||
with self.engine.connect() as conn:
|
||||
totp = conn.execute(
|
||||
db.select(
|
||||
self.dashboardClientsTable.c.ClientID,
|
||||
self.dashboardClientsTable.c.Email,
|
||||
self.dashboardClientsTable.c.TotpKey,
|
||||
self.dashboardClientsTable.c.TotpKeyVerified,
|
||||
).select_from(
|
||||
self.dashboardClientsTOTPTable
|
||||
).where(
|
||||
db.and_(
|
||||
self.dashboardClientsTOTPTable.c.Token == token,
|
||||
self.dashboardClientsTOTPTable.c.ExpireTime > datetime.datetime.now()
|
||||
)
|
||||
).join(
|
||||
self.dashboardClientsTable,
|
||||
self.dashboardClientsTOTPTable.c.ClientID == self.dashboardClientsTable.c.ClientID
|
||||
)
|
||||
).mappings().fetchone()
|
||||
if totp:
|
||||
return True, dict(totp)
|
||||
return False, None
|
||||
|
||||
|
285
src/modules/DashboardConfig.py
Normal file
285
src/modules/DashboardConfig.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""
|
||||
Dashboard Configuration
|
||||
"""
|
||||
import configparser, secrets, os, pyotp, ipaddress, bcrypt
|
||||
from sqlalchemy_utils import database_exists, create_database
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
from .Utilities import (
|
||||
GetRemoteEndpoint, ValidateDNSAddress
|
||||
)
|
||||
from .DashboardAPIKey import DashboardAPIKey
|
||||
|
||||
|
||||
|
||||
class DashboardConfig:
|
||||
DashboardVersion = 'v4.3.0.1'
|
||||
ConfigurationPath = os.getenv('CONFIGURATION_PATH', '.')
|
||||
ConfigurationFilePath = os.path.join(ConfigurationPath, 'wg-dashboard.ini')
|
||||
|
||||
def __init__(self):
|
||||
if not os.path.exists(DashboardConfig.ConfigurationFilePath):
|
||||
open(DashboardConfig.ConfigurationFilePath, "x")
|
||||
self.__config = configparser.RawConfigParser(strict=False)
|
||||
self.__config.read_file(open(DashboardConfig.ConfigurationFilePath, "r+"))
|
||||
self.hiddenAttribute = ["totp_key", "auth_req"]
|
||||
self.__default = {
|
||||
"Account": {
|
||||
"username": "admin",
|
||||
"password": "admin",
|
||||
"enable_totp": "false",
|
||||
"totp_verified": "false",
|
||||
"totp_key": pyotp.random_base32()
|
||||
},
|
||||
"Server": {
|
||||
"wg_conf_path": "/etc/wireguard",
|
||||
"awg_conf_path": "/etc/amnezia/amneziawg",
|
||||
"app_prefix": "",
|
||||
"app_ip": "0.0.0.0",
|
||||
"app_port": "10086",
|
||||
"auth_req": "true",
|
||||
"version": DashboardConfig.DashboardVersion,
|
||||
"dashboard_refresh_interval": "60000",
|
||||
"dashboard_peer_list_display": "grid",
|
||||
"dashboard_sort": "status",
|
||||
"dashboard_theme": "dark",
|
||||
"dashboard_api_key": "false",
|
||||
"dashboard_language": "en-US"
|
||||
},
|
||||
"Peers": {
|
||||
"peer_global_DNS": "1.1.1.1",
|
||||
"peer_endpoint_allowed_ip": "0.0.0.0/0",
|
||||
"peer_display_mode": "grid",
|
||||
"remote_endpoint": GetRemoteEndpoint(),
|
||||
"peer_MTU": "1420",
|
||||
"peer_keep_alive": "21"
|
||||
},
|
||||
"Other": {
|
||||
"welcome_session": "true"
|
||||
},
|
||||
"Database":{
|
||||
"type": "sqlite",
|
||||
"host": "",
|
||||
"port": "",
|
||||
"username": "",
|
||||
"password": ""
|
||||
},
|
||||
"Email":{
|
||||
"server": "",
|
||||
"port": "",
|
||||
"encryption": "",
|
||||
"username": "",
|
||||
"email_password": "",
|
||||
"authentication_required": "true",
|
||||
"send_from": "",
|
||||
"email_template": ""
|
||||
},
|
||||
"OIDC": {
|
||||
"admin_enable": "false",
|
||||
"client_enable": "false"
|
||||
},
|
||||
"Clients": {
|
||||
"enable": "true",
|
||||
},
|
||||
"WireGuardConfiguration": {
|
||||
"autostart": ""
|
||||
}
|
||||
}
|
||||
|
||||
for section, keys in self.__default.items():
|
||||
for key, value in keys.items():
|
||||
exist, currentData = self.GetConfig(section, key)
|
||||
if not exist:
|
||||
self.SetConfig(section, key, value, True)
|
||||
|
||||
self.engine = db.create_engine(ConnectionString('wgdashboard'))
|
||||
self.dbMetadata = db.MetaData()
|
||||
self.__createAPIKeyTable()
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
self.APIAccessed = False
|
||||
self.SetConfig("Server", "version", DashboardConfig.DashboardVersion)
|
||||
|
||||
def getConnectionString(self, database) -> str or None:
|
||||
sqlitePath = os.path.join(DashboardConfig.ConfigurationPath, "db")
|
||||
|
||||
if not os.path.isdir(sqlitePath):
|
||||
os.mkdir(sqlitePath)
|
||||
|
||||
if self.GetConfig("Database", "type")[1] == "postgresql":
|
||||
cn = f'postgresql+psycopg2://{self.GetConfig("Database", "username")[1]}:{self.GetConfig("Database", "password")[1]}@{self.GetConfig("Database", "host")[1]}/{database}'
|
||||
elif self.GetConfig("Database", "type")[1] == "mysql":
|
||||
cn = f'mysql+mysqldb://{self.GetConfig("Database", "username")[1]}:{self.GetConfig("Database", "password")[1]}@{self.GetConfig("Database", "host")[1]}/{database}'
|
||||
else:
|
||||
cn = f'sqlite:///{os.path.join(sqlitePath, f"{database}.db")}'
|
||||
if not database_exists(cn):
|
||||
create_database(cn)
|
||||
return cn
|
||||
|
||||
def __createAPIKeyTable(self):
|
||||
self.apiKeyTable = db.Table('DashboardAPIKeys', self.dbMetadata,
|
||||
db.Column("Key", db.String(255), nullable=False, primary_key=True),
|
||||
db.Column("CreatedAt",
|
||||
(db.DATETIME if self.GetConfig('Database', 'type')[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()
|
||||
),
|
||||
db.Column("ExpiredAt",
|
||||
(db.DATETIME if self.GetConfig('Database', 'type')[1] == 'sqlite' else db.TIMESTAMP)
|
||||
)
|
||||
)
|
||||
self.dbMetadata.create_all(self.engine)
|
||||
def __getAPIKeys(self) -> list[DashboardAPIKey]:
|
||||
try:
|
||||
with self.engine.connect() as conn:
|
||||
keys = conn.execute(self.apiKeyTable.select().where(
|
||||
db.or_(self.apiKeyTable.columns.ExpiredAt.is_(None), self.apiKeyTable.columns.ExpiredAt > datetime.now())
|
||||
)).fetchall()
|
||||
fKeys = []
|
||||
for k in keys:
|
||||
fKeys.append(DashboardAPIKey(k[0], k[1].strftime("%Y-%m-%d %H:%M:%S"), (k[2].strftime("%Y-%m-%d %H:%M:%S") if k[2] else None)))
|
||||
return fKeys
|
||||
except Exception as e:
|
||||
current_app.logger.error("API Keys error", e)
|
||||
return []
|
||||
|
||||
def createAPIKeys(self, ExpiredAt = None):
|
||||
newKey = secrets.token_urlsafe(32)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.apiKeyTable.insert().values({
|
||||
"Key": newKey,
|
||||
"ExpiredAt": ExpiredAt
|
||||
})
|
||||
)
|
||||
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
|
||||
def deleteAPIKey(self, key):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.apiKeyTable.update().values({
|
||||
"ExpiredAt": datetime.now(),
|
||||
}).where(self.apiKeyTable.columns.Key == key)
|
||||
)
|
||||
|
||||
self.DashboardAPIKeys = self.__getAPIKeys()
|
||||
|
||||
def __configValidation(self, section : str, key: str, value: Any) -> tuple[bool, str]:
|
||||
if (type(value) is str and len(value) == 0
|
||||
and section not in ['Email', 'WireGuardConfiguration'] and
|
||||
(section == 'Peer' and key == 'peer_global_dns')):
|
||||
return False, "Field cannot be empty!"
|
||||
if section == "Peers" and key == "peer_global_dns" and len(value) > 0:
|
||||
return ValidateDNSAddress(value)
|
||||
if section == "Peers" and key == "peer_endpoint_allowed_ip":
|
||||
value = value.split(",")
|
||||
for i in value:
|
||||
i = i.strip()
|
||||
try:
|
||||
ipaddress.ip_network(i, strict=False)
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
if section == "Server" and key == "wg_conf_path":
|
||||
if not os.path.exists(value):
|
||||
return False, f"{value} is not a valid path"
|
||||
if section == "Account" and key == "password":
|
||||
if self.GetConfig("Account", "password")[0]:
|
||||
if not self.__checkPassword(
|
||||
value["currentPassword"], self.GetConfig("Account", "password")[1].encode("utf-8")):
|
||||
return False, "Current password does not match."
|
||||
if value["newPassword"] != value["repeatNewPassword"]:
|
||||
return False, "New passwords does not match"
|
||||
return True, ""
|
||||
|
||||
def generatePassword(self, plainTextPassword: str):
|
||||
return bcrypt.hashpw(plainTextPassword.encode("utf-8"), bcrypt.gensalt())
|
||||
|
||||
def __checkPassword(self, plainTextPassword: str, hashedPassword: bytes):
|
||||
return bcrypt.checkpw(plainTextPassword.encode("utf-8"), hashedPassword)
|
||||
|
||||
def SetConfig(self, section: str, key: str, value: str | bool | list[str] | dict[str, str], init: bool = False) -> tuple[bool, str] | tuple[bool, None]:
|
||||
if key in self.hiddenAttribute and not init:
|
||||
return False, None
|
||||
|
||||
if not init:
|
||||
valid, msg = self.__configValidation(section, key, value)
|
||||
if not valid:
|
||||
return False, msg
|
||||
|
||||
if section == "Account" and key == "password":
|
||||
if not init:
|
||||
value = self.generatePassword(value["newPassword"]).decode("utf-8")
|
||||
else:
|
||||
value = self.generatePassword(value).decode("utf-8")
|
||||
|
||||
if section == "Email" and key == "email_template":
|
||||
value = value.encode('unicode_escape').decode('utf-8')
|
||||
|
||||
if section == "Server" and key == "wg_conf_path":
|
||||
if not os.path.exists(value):
|
||||
return False, "Path does not exist"
|
||||
|
||||
if section not in self.__config:
|
||||
if init:
|
||||
self.__config[section] = {}
|
||||
else:
|
||||
return False, "Section does not exist"
|
||||
|
||||
if ((key not in self.__config[section].keys() and init) or
|
||||
(key in self.__config[section].keys())):
|
||||
if type(value) is bool:
|
||||
if value:
|
||||
self.__config[section][key] = "true"
|
||||
else:
|
||||
self.__config[section][key] = "false"
|
||||
elif type(value) in [int, float]:
|
||||
self.__config[section][key] = str(value)
|
||||
elif type(value) is list:
|
||||
self.__config[section][key] = "||".join(value).strip("||")
|
||||
else:
|
||||
self.__config[section][key] = fr"{value}"
|
||||
return self.SaveConfig(), ""
|
||||
else:
|
||||
return False, f"{key} does not exist under {section}"
|
||||
|
||||
def SaveConfig(self) -> bool:
|
||||
try:
|
||||
with open(DashboardConfig.ConfigurationFilePath, "w+", encoding='utf-8') as configFile:
|
||||
self.__config.write(configFile)
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
def GetConfig(self, section, key) ->tuple[bool, bool] | tuple[bool, str] | tuple[bool, list[str]] | tuple[bool, None]:
|
||||
if section not in self.__config:
|
||||
return False, None
|
||||
|
||||
if key not in self.__config[section]:
|
||||
return False, None
|
||||
|
||||
if section == "Email" and key == "email_template":
|
||||
return True, self.__config[section][key].encode('utf-8').decode('unicode_escape')
|
||||
|
||||
if section == "WireGuardConfiguration" and key == "autostart":
|
||||
return True, list(filter(lambda x: len(x) > 0, self.__config[section][key].split("||")))
|
||||
|
||||
if self.__config[section][key] in ["1", "yes", "true", "on"]:
|
||||
return True, True
|
||||
|
||||
if self.__config[section][key] in ["0", "no", "false", "off"]:
|
||||
return True, False
|
||||
|
||||
|
||||
return True, self.__config[section][key]
|
||||
|
||||
def toJson(self) -> dict[str, dict[Any, Any]]:
|
||||
the_dict = {}
|
||||
|
||||
for section in self.__config.sections():
|
||||
the_dict[section] = {}
|
||||
for key, val in self.__config.items(section):
|
||||
if key not in self.hiddenAttribute:
|
||||
the_dict[section][key] = self.GetConfig(section, key)[1]
|
||||
return the_dict
|
@@ -1,35 +1,44 @@
|
||||
"""
|
||||
Dashboard Logger Class
|
||||
"""
|
||||
import sqlite3, os, uuid
|
||||
import uuid
|
||||
import sqlalchemy as db
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class DashboardLogger:
|
||||
def __init__(self, CONFIGURATION_PATH):
|
||||
self.loggerdb = sqlite3.connect(os.path.join(CONFIGURATION_PATH, 'db', 'wgdashboard_log.db'),
|
||||
isolation_level=None,
|
||||
check_same_thread=False)
|
||||
self.loggerdb.row_factory = sqlite3.Row
|
||||
self.__createLogDatabase()
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard_log"))
|
||||
self.metadata = db.MetaData()
|
||||
self.dashboardLoggerTable = db.Table('DashboardLog', self.metadata,
|
||||
|
||||
db.Column('LogID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('LogDate',
|
||||
(db.DATETIME if 'sqlite:///' in ConnectionString("wgdashboard") else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('URL', db.String(255)),
|
||||
db.Column('IP', db.String(255)),
|
||||
|
||||
db.Column('Status', db.String(255), nullable=False),
|
||||
db.Column('Message', db.Text), extend_existing=True,
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.log(Message="WGDashboard started")
|
||||
def __createLogDatabase(self):
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
existingTable = loggerdbCursor.execute("SELECT name from sqlite_master where type='table'").fetchall()
|
||||
existingTable = [t['name'] for t in existingTable]
|
||||
if "DashboardLog" not in existingTable:
|
||||
loggerdbCursor.execute(
|
||||
"CREATE TABLE DashboardLog (LogID VARCHAR NOT NULL, LogDate DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S','now', 'localtime')), URL VARCHAR, IP VARCHAR, Status VARCHAR, Message VARCHAR, PRIMARY KEY (LogID))")
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
|
||||
def log(self, URL: str = "", IP: str = "", Status: str = "true", Message: str = "") -> bool:
|
||||
try:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
loggerdbCursor.execute(
|
||||
"INSERT INTO DashboardLog (LogID, URL, IP, Status, Message) VALUES (?, ?, ?, ?, ?);", (str(uuid.uuid4()), URL, IP, Status, Message,))
|
||||
loggerdbCursor.close()
|
||||
self.loggerdb.commit()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.dashboardLoggerTable.insert().values(
|
||||
LogID=str(uuid.uuid4()),
|
||||
URL=URL,
|
||||
IP=IP,
|
||||
Status=Status,
|
||||
Message=Message
|
||||
)
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[WGDashboard] Access Log Error: {str(e)}")
|
||||
current_app.logger.error(f"Access Log Error", e)
|
||||
return False
|
142
src/modules/DashboardOIDC.py
Normal file
142
src/modules/DashboardOIDC.py
Normal file
@@ -0,0 +1,142 @@
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
from jose import jwt
|
||||
import certifi
|
||||
from flask import current_app
|
||||
|
||||
class DashboardOIDC:
|
||||
ConfigurationPath = os.getenv('CONFIGURATION_PATH', '.')
|
||||
ConfigurationFilePath = os.path.join(ConfigurationPath, 'wg-dashboard-oidc-providers.json')
|
||||
def __init__(self, mode):
|
||||
self.mode = mode
|
||||
self.providers: dict[str, dict] = {}
|
||||
self.provider_secret: dict[str, str] = {}
|
||||
self.__default = {
|
||||
"Admin": {
|
||||
'Provider': {
|
||||
'client_id': '',
|
||||
'client_secret': '',
|
||||
'issuer': '',
|
||||
},
|
||||
},
|
||||
"Client": {
|
||||
'Provider': {
|
||||
'client_id': '',
|
||||
'client_secret': '',
|
||||
'issuer': '',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if not os.path.exists(DashboardOIDC.ConfigurationFilePath):
|
||||
with open(DashboardOIDC.ConfigurationFilePath, "w+") as f:
|
||||
encoder = json.JSONEncoder(indent=4)
|
||||
f.write(encoder.encode(self.__default))
|
||||
|
||||
self.ReadFile()
|
||||
|
||||
def GetProviders(self):
|
||||
return self.providers
|
||||
|
||||
def GetProviderNameByIssuer(self, issuer):
|
||||
for (key, val) in self.providers.items():
|
||||
if val.get('openid_configuration').get('issuer') == issuer:
|
||||
return key
|
||||
return issuer
|
||||
|
||||
def VerifyToken(self, provider, code, redirect_uri):
|
||||
try:
|
||||
if not all([provider, code, redirect_uri]):
|
||||
return False, "Please provide all parameters"
|
||||
|
||||
if provider not in self.providers.keys():
|
||||
return False, "Provider does not exist"
|
||||
|
||||
secrete = self.provider_secret.get(provider)
|
||||
oidc_config_status, oidc_config = self.GetProviderConfiguration(provider)
|
||||
provider_info = self.providers.get(provider)
|
||||
|
||||
|
||||
data = {
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"redirect_uri": redirect_uri,
|
||||
"client_id": provider_info.get('client_id'),
|
||||
"client_secret": secrete
|
||||
}
|
||||
|
||||
try:
|
||||
tokens = requests.post(oidc_config.get('token_endpoint'), data=data).json()
|
||||
if not all([tokens.get('access_token'), tokens.get('id_token')]):
|
||||
return False, tokens.get('error_description', None)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Verify token failed", e)
|
||||
return False, str(e)
|
||||
|
||||
access_token = tokens.get('access_token')
|
||||
id_token = tokens.get('id_token')
|
||||
jwks_uri = oidc_config.get("jwks_uri")
|
||||
issuer = oidc_config.get("issuer")
|
||||
jwks = requests.get(jwks_uri, verify=certifi.where()).json()
|
||||
|
||||
headers = jwt.get_unverified_header(id_token)
|
||||
kid = headers["kid"]
|
||||
|
||||
key = next(k for k in jwks["keys"] if k["kid"] == kid)
|
||||
|
||||
payload = jwt.decode(
|
||||
id_token,
|
||||
key,
|
||||
algorithms=[key["alg"]],
|
||||
audience=provider_info.get('client_id'),
|
||||
issuer=issuer,
|
||||
access_token=access_token
|
||||
)
|
||||
print(payload)
|
||||
return True, payload
|
||||
except Exception as e:
|
||||
current_app.logger.error('Read OIDC file failed. Reason: ' + str(e), provider, code, redirect_uri)
|
||||
return False, str(e)
|
||||
|
||||
def GetProviderConfiguration(self, provider_name):
|
||||
if not all([provider_name]):
|
||||
return False, None
|
||||
provider = self.providers.get(provider_name)
|
||||
try:
|
||||
oidc_config = requests.get(
|
||||
f"{provider.get('issuer').strip('/')}/.well-known/openid-configuration",
|
||||
verify=certifi.where()
|
||||
).json()
|
||||
except Exception as e:
|
||||
current_app.logger.error("Failed to get OpenID Configuration of " + provider.get('issuer'), exc_info=e)
|
||||
return False, None
|
||||
return True, oidc_config
|
||||
|
||||
def ReadFile(self):
|
||||
decoder = json.JSONDecoder()
|
||||
try:
|
||||
providers = decoder.decode(
|
||||
open(DashboardOIDC.ConfigurationFilePath, 'r').read()
|
||||
)
|
||||
providers = providers[self.mode]
|
||||
for k in providers.keys():
|
||||
if all([providers[k]['client_id'], providers[k]['client_secret'], providers[k]['issuer']]):
|
||||
try:
|
||||
oidc_config = requests.get(
|
||||
f"{providers[k]['issuer'].strip('/')}/.well-known/openid-configuration",
|
||||
timeout=3,
|
||||
verify=certifi.where()
|
||||
).json()
|
||||
self.providers[k] = {
|
||||
'client_id': providers[k]['client_id'],
|
||||
'issuer': providers[k]['issuer'].strip('/'),
|
||||
'openid_configuration': oidc_config
|
||||
}
|
||||
self.provider_secret[k] = providers[k]['client_secret']
|
||||
current_app.logger.info(f"Registered OIDC Provider: {k}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Failed to register OIDC config for {k}", exc_info=e)
|
||||
except Exception as e:
|
||||
current_app.logger.error('Read OIDC file failed. Reason: ' + str(e))
|
||||
return False
|
117
src/modules/DashboardPlugins.py
Normal file
117
src/modules/DashboardPlugins.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import os
|
||||
import sys
|
||||
import importlib.util
|
||||
from pathlib import Path
|
||||
from typing import Dict, Callable, List, Optional
|
||||
import threading
|
||||
|
||||
|
||||
class DashboardPlugins:
|
||||
|
||||
def __init__(self, app, WireguardConfigurations, directory: str = 'plugins'):
|
||||
self.directory = Path('plugins')
|
||||
self.loadedPlugins: dict[str, Callable] = {}
|
||||
self.errorPlugins: List[str] = []
|
||||
self.logger = app.logger
|
||||
self.WireguardConfigurations = WireguardConfigurations
|
||||
|
||||
def startThreads(self):
|
||||
self.loadAllPlugins()
|
||||
self.executeAllPlugins()
|
||||
|
||||
def preparePlugins(self) -> list[Path]:
|
||||
|
||||
readyPlugins = []
|
||||
|
||||
if not self.directory.exists():
|
||||
os.mkdir(self.directory)
|
||||
return []
|
||||
|
||||
for plugin in self.directory.iterdir():
|
||||
if plugin.is_dir():
|
||||
codeFile = plugin / "main.py"
|
||||
if codeFile.exists():
|
||||
self.logger.info(f"Prepared plugin: {plugin.name}")
|
||||
readyPlugins.append(plugin)
|
||||
|
||||
return readyPlugins
|
||||
|
||||
def loadPlugin(self, path: Path) -> Optional[Callable]:
|
||||
pluginName = path.name
|
||||
codeFile = path / "main.py"
|
||||
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
f"WGDashboardPlugin_{pluginName}",
|
||||
codeFile
|
||||
)
|
||||
|
||||
if spec is None or spec.loader is None:
|
||||
raise ImportError(f"Failed to create spec for {pluginName}")
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
|
||||
plugin_dir_str = str(path)
|
||||
if plugin_dir_str not in sys.path:
|
||||
sys.path.insert(0, plugin_dir_str)
|
||||
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
finally:
|
||||
if plugin_dir_str in sys.path:
|
||||
sys.path.remove(plugin_dir_str)
|
||||
|
||||
if hasattr(module, 'main'):
|
||||
main_func = getattr(module, 'main')
|
||||
if callable(main_func):
|
||||
self.logger.info(f"Successfully loaded plugin [{pluginName}]")
|
||||
return main_func
|
||||
else:
|
||||
raise AttributeError(f"'main' in {pluginName} is not callable")
|
||||
else:
|
||||
raise AttributeError(f"Plugin {pluginName} does not have a 'main' function")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load the plugin [{pluginName}]. Reason: {str(e)}")
|
||||
self.errorPlugins.append(pluginName)
|
||||
return None
|
||||
|
||||
def loadAllPlugins(self):
|
||||
self.loadedPlugins.clear()
|
||||
self.errorPlugins.clear()
|
||||
|
||||
preparedPlugins = self.preparePlugins()
|
||||
|
||||
for plugin in preparedPlugins:
|
||||
pluginName = plugin.name
|
||||
mainFunction = self.loadPlugin(plugin)
|
||||
|
||||
if mainFunction:
|
||||
self.loadedPlugins[pluginName] = mainFunction
|
||||
if self.errorPlugins:
|
||||
self.logger.warning(f"Failed to load {len(self.errorPlugins)} plugin(s): {self.errorPlugins}")
|
||||
|
||||
def executePlugin(self, pluginName: str):
|
||||
if pluginName not in self.loadedPlugins.keys():
|
||||
self.logger.error(f"Failed to execute plugin [{pluginName}]. Reason: Not loaded")
|
||||
return False
|
||||
|
||||
plugin = self.loadedPlugins.get(pluginName)
|
||||
|
||||
try:
|
||||
t = threading.Thread(target=plugin, args=(self.WireguardConfigurations,), daemon=True)
|
||||
t.name = f'WGDashboardPlugin_{pluginName}'
|
||||
t.start()
|
||||
|
||||
if t.is_alive():
|
||||
self.logger.info(f"Execute plugin [{pluginName}] success. PID: {t.native_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to execute plugin [{pluginName}]. Reason: {str(e)}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def executeAllPlugins(self):
|
||||
for plugin in self.loadedPlugins.keys():
|
||||
self.executePlugin(plugin)
|
287
src/modules/DashboardWebHooks.py
Normal file
287
src/modules/DashboardWebHooks.py
Normal file
@@ -0,0 +1,287 @@
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
from pydantic import BaseModel, field_serializer
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
from flask import current_app
|
||||
|
||||
WebHookActions = ['peer_created', 'peer_deleted', 'peer_updated']
|
||||
class WebHook(BaseModel):
|
||||
WebHookID: str = ''
|
||||
PayloadURL: str = ''
|
||||
ContentType: str = 'application/json'
|
||||
Headers: dict[str, dict[str, str]] = {}
|
||||
VerifySSL: bool = True
|
||||
SubscribedActions: list[str] = WebHookActions
|
||||
IsActive: bool = True
|
||||
CreationDate: datetime = ''
|
||||
Notes: str = ''
|
||||
|
||||
class WebHookSessionLog(BaseModel):
|
||||
LogTime: datetime
|
||||
Status: int
|
||||
Message: str = ''
|
||||
|
||||
@field_serializer('LogTime')
|
||||
def logTimeSerializer(self, LogTime: datetime):
|
||||
return LogTime.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
class WebHookSessionLogs(BaseModel):
|
||||
Logs: list[WebHookSessionLog] = []
|
||||
|
||||
def addLog(self, status: int, message: str):
|
||||
self.Logs.append(WebHookSessionLog(LogTime=datetime.now(), Status=status, Message=message))
|
||||
|
||||
class DashboardWebHooks:
|
||||
def __init__(self, DashboardConfig):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.webHooksTable = db.Table(
|
||||
'DashboardWebHooks', self.metadata,
|
||||
db.Column('WebHookID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('PayloadURL', db.Text, nullable=False),
|
||||
db.Column('ContentType', db.String(255), nullable=False),
|
||||
db.Column('Headers', db.JSON),
|
||||
db.Column('VerifySSL', db.Boolean, nullable=False),
|
||||
db.Column('SubscribedActions', db.JSON),
|
||||
db.Column('IsActive', db.Boolean, nullable=False),
|
||||
db.Column('CreationDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now(),
|
||||
nullable=False),
|
||||
db.Column('Notes', db.Text),
|
||||
extend_existing=True
|
||||
)
|
||||
self.webHookSessionsTable = db.Table(
|
||||
'DashboardWebHookSessions', self.metadata,
|
||||
db.Column('WebHookSessionID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('WebHookID', db.String(255), nullable=False),
|
||||
db.Column('StartDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now(),
|
||||
nullable=False
|
||||
),
|
||||
db.Column('EndDate',
|
||||
(db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
),
|
||||
db.Column('Data', db.JSON),
|
||||
db.Column('Status', db.INTEGER),
|
||||
db.Column('Logs', db.JSON)
|
||||
)
|
||||
|
||||
self.metadata.create_all(self.engine)
|
||||
self.WebHooks: list[WebHook] = []
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"EndDate": datetime.now(),
|
||||
"Status": 2
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.Status == -1
|
||||
)
|
||||
)
|
||||
|
||||
self.__getWebHooks()
|
||||
|
||||
def __getWebHooks(self):
|
||||
with self.engine.connect() as conn:
|
||||
webhooks = conn.execute(
|
||||
self.webHooksTable.select().order_by(
|
||||
self.webHooksTable.c.CreationDate
|
||||
)
|
||||
).mappings().fetchall()
|
||||
self.WebHooks.clear()
|
||||
self.WebHooks = [WebHook(**webhook) for webhook in webhooks]
|
||||
|
||||
def GetWebHooks(self):
|
||||
self.__getWebHooks()
|
||||
return list(map(lambda x : x.model_dump(), self.WebHooks))
|
||||
|
||||
def GetWebHookSessions(self, webHook: WebHook):
|
||||
with self.engine.connect() as conn:
|
||||
sessions = conn.execute(
|
||||
self.webHookSessionsTable.select().where(
|
||||
self.webHookSessionsTable.c.WebHookID == webHook.WebHookID
|
||||
).order_by(
|
||||
db.desc(self.webHookSessionsTable.c.StartDate)
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return sessions
|
||||
|
||||
def CreateWebHook(self) -> WebHook:
|
||||
return WebHook(WebHookID=str(uuid.uuid4()))
|
||||
|
||||
def SearchWebHook(self, webHook: WebHook) -> WebHook | None:
|
||||
try:
|
||||
first = next(filter(lambda x : x.WebHookID == webHook.WebHookID, self.WebHooks))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def SearchWebHookByID(self, webHookID: str) -> WebHook | None:
|
||||
try:
|
||||
first = next(filter(lambda x : x.WebHookID == webHookID, self.WebHooks))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def UpdateWebHook(self, webHook: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
webHook = WebHook(**webHook)
|
||||
|
||||
if len(webHook.PayloadURL) == 0:
|
||||
return False, "Payload URL cannot be empty"
|
||||
|
||||
if len(webHook.ContentType) == 0 or webHook.ContentType not in [
|
||||
'application/json', 'application/x-www-form-urlencoded'
|
||||
]:
|
||||
return False, "Content Type is invalid"
|
||||
|
||||
|
||||
with self.engine.begin() as conn:
|
||||
if self.SearchWebHook(webHook):
|
||||
conn.execute(
|
||||
self.webHooksTable.update().values(
|
||||
webHook.model_dump(exclude={'WebHookID'})
|
||||
).where(
|
||||
self.webHooksTable.c.WebHookID == webHook.WebHookID
|
||||
)
|
||||
)
|
||||
else:
|
||||
webHook.CreationDate = datetime.now()
|
||||
conn.execute(
|
||||
self.webHooksTable.insert().values(
|
||||
webHook.model_dump()
|
||||
)
|
||||
)
|
||||
self.__getWebHooks()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def DeleteWebHook(self, webHook) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
webHook = WebHook(**webHook)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHooksTable.delete().where(
|
||||
self.webHooksTable.c.WebHookID == webHook.WebHookID
|
||||
)
|
||||
)
|
||||
self.__getWebHooks()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def RunWebHook(self, action: str, data):
|
||||
try:
|
||||
if action not in WebHookActions:
|
||||
return False
|
||||
self.__getWebHooks()
|
||||
subscribedWebHooks = filter(lambda webhook: action in webhook.SubscribedActions and webhook.IsActive,
|
||||
self.WebHooks)
|
||||
data['action'] = action
|
||||
for i in subscribedWebHooks:
|
||||
try:
|
||||
ws = WebHookSession(i, data)
|
||||
t = threading.Thread(target=ws.Execute, daemon=True)
|
||||
t.start()
|
||||
current_app.logger.info(f"Requesting {i.PayloadURL}")
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Requesting {i.PayloadURL} error", e)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Error when running WebHook")
|
||||
|
||||
class WebHookSession:
|
||||
def __init__(self, webHook: WebHook, data: dict[str, str]):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.webHookSessionsTable = db.Table('DashboardWebHookSessions', self.metadata, autoload_with=self.engine)
|
||||
self.webHook = webHook
|
||||
self.sessionID = str(uuid.uuid4())
|
||||
self.webHookSessionLogs: WebHookSessionLogs = WebHookSessionLogs()
|
||||
self.time = datetime.now()
|
||||
data['time'] = self.time.strftime("%Y-%m-%d %H:%M:%S")
|
||||
data['webhook_id'] = webHook.WebHookID
|
||||
data['webhook_session'] = self.sessionID
|
||||
self.data = data
|
||||
self.Prepare()
|
||||
|
||||
def Prepare(self):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.insert().values({
|
||||
"WebHookSessionID": self.sessionID,
|
||||
"WebHookID": self.webHook.WebHookID,
|
||||
"Data": self.data,
|
||||
"StartDate": self.time,
|
||||
"Status": -1,
|
||||
"Logs": self.webHookSessionLogs.model_dump()
|
||||
})
|
||||
)
|
||||
self.UpdateSessionLog(-1, "Preparing webhook session")
|
||||
|
||||
def UpdateSessionLog(self, status, message):
|
||||
self.webHookSessionLogs.addLog(status, message)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"Logs": self.webHookSessionLogs.model_dump()
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.WebHookSessionID == self.sessionID
|
||||
)
|
||||
)
|
||||
|
||||
def UpdateStatus(self, status: int):
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.webHookSessionsTable.update().values({
|
||||
"Status": status,
|
||||
"EndDate": datetime.now()
|
||||
}).where(
|
||||
self.webHookSessionsTable.c.WebHookSessionID == self.sessionID
|
||||
)
|
||||
)
|
||||
|
||||
def Execute(self):
|
||||
success = False
|
||||
|
||||
for i in range(5):
|
||||
headerDictionary = {
|
||||
'Content-Type': self.webHook.ContentType
|
||||
}
|
||||
for header in self.webHook.Headers.values():
|
||||
if header['key'] not in ['Content-Type']:
|
||||
headerDictionary[header['key']] = header['value']
|
||||
|
||||
if self.webHook.ContentType == "application/json":
|
||||
reqData = json.dumps(self.data)
|
||||
else:
|
||||
for (key, val) in self.data.items():
|
||||
if type(self.data[key]) not in [str, int]:
|
||||
self.data[key] = json.dumps(self.data[key])
|
||||
reqData = urllib.parse.urlencode(self.data)
|
||||
try:
|
||||
req = requests.post(
|
||||
self.webHook.PayloadURL, headers=headerDictionary, timeout=10, data=reqData, verify=self.webHook.VerifySSL
|
||||
)
|
||||
req.raise_for_status()
|
||||
success = True
|
||||
self.UpdateSessionLog(0, "Webhook request finished")
|
||||
self.UpdateSessionLog(0, json.dumps({"returned_data": req.text}))
|
||||
self.UpdateStatus(0)
|
||||
break
|
||||
except requests.exceptions.RequestException as e:
|
||||
self.UpdateSessionLog(1, f"Attempt #{i + 1}/5. Request errored. Reason: " + str(e))
|
||||
time.sleep(10)
|
||||
|
||||
if not success:
|
||||
self.UpdateSessionLog(1, "Webhook request failed & terminated.")
|
||||
self.UpdateStatus(1)
|
@@ -31,18 +31,25 @@ class EmailSender:
|
||||
|
||||
def SendFrom(self):
|
||||
return self.DashboardConfig.GetConfig("Email", "send_from")[1]
|
||||
|
||||
# Thank you, @gdeeble from GitHub
|
||||
def AuthenticationRequired(self):
|
||||
return self.DashboardConfig.GetConfig("Email", "authentication_required")[1]
|
||||
|
||||
def ready(self):
|
||||
return len(self.Server()) > 0 and len(self.Port()) > 0 and len(self.Encryption()) > 0 and len(self.Username()) > 0 and len(self.Password()) > 0 and len(self.SendFrom())
|
||||
if self.AuthenticationRequired():
|
||||
return all([self.Server(), self.Port(), self.Encryption(), self.Username(), self.Password(), self.SendFrom()])
|
||||
return all([self.Server(), self.Port(), self.Encryption(), self.SendFrom()])
|
||||
|
||||
def send(self, receiver, subject, body, includeAttachment = False, attachmentName = ""):
|
||||
def send(self, receiver, subject, body, includeAttachment = False, attachmentName = "") -> tuple[bool, str] | tuple[bool, None]:
|
||||
if self.ready():
|
||||
try:
|
||||
self.smtp = smtplib.SMTP(self.Server(), port=int(self.Port()))
|
||||
self.smtp.ehlo()
|
||||
if self.Encryption() == "STARTTLS":
|
||||
self.smtp.starttls()
|
||||
self.smtp.login(self.Username(), self.Password())
|
||||
if self.AuthenticationRequired():
|
||||
self.smtp.login(self.Username(), self.Password())
|
||||
message = MIMEMultipart()
|
||||
message['Subject'] = subject
|
||||
message['From'] = self.SendFrom()
|
||||
|
88
src/modules/NewConfigurationTemplates.py
Normal file
88
src/modules/NewConfigurationTemplates.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import uuid
|
||||
|
||||
from pydantic import BaseModel, field_serializer
|
||||
import sqlalchemy as db
|
||||
from .ConnectionString import ConnectionString
|
||||
|
||||
|
||||
class NewConfigurationTemplate(BaseModel):
|
||||
TemplateID: str = ''
|
||||
Subnet: str = ''
|
||||
ListenPortStart: int = 0
|
||||
ListenPortEnd: int = 0
|
||||
Notes: str = ""
|
||||
|
||||
class NewConfigurationTemplates:
|
||||
def __init__(self):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.templatesTable = db.Table(
|
||||
'NewConfigurationTemplates', self.metadata,
|
||||
db.Column('TemplateID', db.String(255), primary_key=True),
|
||||
db.Column('Subnet', db.String(255)),
|
||||
db.Column('ListenPortStart', db.Integer),
|
||||
db.Column('ListenPortEnd', db.Integer),
|
||||
db.Column('Notes', db.Text),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.Templates: list[NewConfigurationTemplate] = []
|
||||
self.__getTemplates()
|
||||
|
||||
def GetTemplates(self):
|
||||
self.__getTemplates()
|
||||
return list(map(lambda x : x.model_dump(), self.Templates))
|
||||
|
||||
def __getTemplates(self):
|
||||
with self.engine.connect() as conn:
|
||||
templates = conn.execute(
|
||||
self.templatesTable.select()
|
||||
).mappings().fetchall()
|
||||
self.Templates.clear()
|
||||
self.Templates = [NewConfigurationTemplate(**template) for template in templates]
|
||||
|
||||
def CreateTemplate(self) -> NewConfigurationTemplate:
|
||||
return NewConfigurationTemplate(TemplateID=str(uuid.uuid4()))
|
||||
|
||||
def SearchTemplate(self, template: NewConfigurationTemplate):
|
||||
try:
|
||||
first = next(filter(lambda x : x.TemplateID == template.TemplateID, self.Templates))
|
||||
except StopIteration:
|
||||
return None
|
||||
return first
|
||||
|
||||
def UpdateTemplate(self, template: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
template = NewConfigurationTemplate(**template)
|
||||
with self.engine.begin() as conn:
|
||||
if self.SearchTemplate(template):
|
||||
conn.execute(
|
||||
self.templatesTable.update().values(
|
||||
template.model_dump(exclude={'TemplateID'})
|
||||
).where(
|
||||
self.templatesTable.c.TemplateID == template.TemplateID
|
||||
)
|
||||
)
|
||||
else:
|
||||
conn.execute(
|
||||
self.templatesTable.insert().values(
|
||||
template.model_dump()
|
||||
)
|
||||
)
|
||||
self.__getTemplates()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def DeleteTemplate(self, template: dict[str, str]) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
template = NewConfigurationTemplate(**template)
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.templatesTable.delete().where(
|
||||
self.templatesTable.c.TemplateID == template.TemplateID
|
||||
)
|
||||
)
|
||||
self.__getTemplates()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, None
|
354
src/modules/Peer.py
Normal file
354
src/modules/Peer.py
Normal file
@@ -0,0 +1,354 @@
|
||||
"""
|
||||
Peer
|
||||
"""
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import os, subprocess, uuid, random, re
|
||||
from datetime import timedelta
|
||||
|
||||
import jinja2
|
||||
import sqlalchemy as db
|
||||
from .PeerJob import PeerJob
|
||||
from .PeerShareLink import PeerShareLink
|
||||
from .Utilities import GenerateWireguardPublicKey, ValidateIPAddressesWithRange, ValidateDNSAddress
|
||||
|
||||
|
||||
class Peer:
|
||||
def __init__(self, tableData, configuration):
|
||||
self.configuration = configuration
|
||||
self.id = tableData["id"]
|
||||
self.private_key = tableData["private_key"]
|
||||
self.DNS = tableData["DNS"]
|
||||
self.endpoint_allowed_ip = tableData["endpoint_allowed_ip"]
|
||||
self.name = tableData["name"]
|
||||
self.total_receive = tableData["total_receive"]
|
||||
self.total_sent = tableData["total_sent"]
|
||||
self.total_data = tableData["total_data"]
|
||||
self.endpoint = tableData["endpoint"]
|
||||
self.status = tableData["status"]
|
||||
self.latest_handshake = tableData["latest_handshake"]
|
||||
self.allowed_ip = tableData["allowed_ip"]
|
||||
self.cumu_receive = tableData["cumu_receive"]
|
||||
self.cumu_sent = tableData["cumu_sent"]
|
||||
self.cumu_data = tableData["cumu_data"]
|
||||
self.mtu = tableData["mtu"]
|
||||
self.keepalive = tableData["keepalive"]
|
||||
self.remote_endpoint = tableData["remote_endpoint"]
|
||||
self.preshared_key = tableData["preshared_key"]
|
||||
self.jobs: list[PeerJob] = []
|
||||
self.ShareLink: list[PeerShareLink] = []
|
||||
self.getJobs()
|
||||
self.getShareLink()
|
||||
|
||||
def toJson(self):
|
||||
# self.getJobs()
|
||||
# self.getShareLink()
|
||||
return self.__dict__
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.toJson())
|
||||
|
||||
def updatePeer(self, name: str, private_key: str,
|
||||
preshared_key: str,
|
||||
dns_addresses: str, allowed_ip: str, endpoint_allowed_ip: str, mtu: int,
|
||||
keepalive: int) -> tuple[bool, str] or tuple[bool, None]:
|
||||
if not self.configuration.getStatus():
|
||||
self.configuration.toggleConfiguration()
|
||||
|
||||
existingAllowedIps = [item for row in list(
|
||||
map(lambda x: [q.strip() for q in x.split(',')],
|
||||
map(lambda y: y.allowed_ip,
|
||||
list(filter(lambda k: k.id != self.id, self.configuration.getPeersList()))))) for item in row]
|
||||
|
||||
if allowed_ip in existingAllowedIps:
|
||||
return False, "Allowed IP already taken by another peer"
|
||||
|
||||
if not ValidateIPAddressesWithRange(endpoint_allowed_ip):
|
||||
return False, f"Endpoint Allowed IPs format is incorrect"
|
||||
|
||||
if len(dns_addresses) > 0 and not ValidateDNSAddress(dns_addresses):
|
||||
return False, f"DNS format is incorrect"
|
||||
|
||||
if type(mtu) is str or mtu is None:
|
||||
mtu = 0
|
||||
|
||||
if mtu < 0 or mtu > 1460:
|
||||
return False, "MTU format is not correct"
|
||||
|
||||
if type(keepalive) is str or keepalive is None:
|
||||
keepalive = 0
|
||||
|
||||
if keepalive < 0:
|
||||
return False, "Persistent Keepalive format is not correct"
|
||||
if len(private_key) > 0:
|
||||
pubKey = GenerateWireguardPublicKey(private_key)
|
||||
if not pubKey[0] or pubKey[1] != self.id:
|
||||
return False, "Private key does not match with the public key"
|
||||
try:
|
||||
rd = random.Random()
|
||||
uid = str(uuid.UUID(int=rd.getrandbits(128), version=4))
|
||||
pskExist = len(preshared_key) > 0
|
||||
|
||||
if pskExist:
|
||||
with open(uid, "w+") as f:
|
||||
f.write(preshared_key)
|
||||
newAllowedIPs = allowed_ip.replace(" ", "")
|
||||
updateAllowedIp = subprocess.check_output(
|
||||
f"{self.configuration.Protocol} set {self.configuration.Name} peer {self.id} allowed-ips {newAllowedIPs} {f'preshared-key {uid}' if pskExist else 'preshared-key /dev/null'}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
if pskExist: os.remove(uid)
|
||||
if len(updateAllowedIp.decode().strip("\n")) != 0:
|
||||
return False, "Update peer failed when updating Allowed IPs"
|
||||
saveConfig = subprocess.check_output(f"{self.configuration.Protocol}-quick save {self.configuration.Name}",
|
||||
shell=True, stderr=subprocess.STDOUT)
|
||||
if f"wg showconf {self.configuration.Name}" not in saveConfig.decode().strip('\n'):
|
||||
return False, "Update peer failed when saving the configuration"
|
||||
with self.configuration.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"name": name,
|
||||
"private_key": private_key,
|
||||
"DNS": dns_addresses,
|
||||
"endpoint_allowed_ip": endpoint_allowed_ip,
|
||||
"mtu": mtu,
|
||||
"keepalive": keepalive,
|
||||
"preshared_key": preshared_key
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
return True, None
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return False, exc.output.decode("UTF-8").strip()
|
||||
|
||||
def downloadPeer(self) -> dict[str, str]:
|
||||
final = {
|
||||
"fileName": "",
|
||||
"file": ""
|
||||
}
|
||||
filename = self.name
|
||||
if len(filename) == 0:
|
||||
filename = "UntitledPeer"
|
||||
filename = "".join(filename.split(' '))
|
||||
filename = f"{filename}"
|
||||
illegal_filename = [".", ",", "/", "?", "<", ">", "\\", ":", "*", '|' '\"', "com1", "com2", "com3",
|
||||
"com4", "com5", "com6", "com7", "com8", "com9", "lpt1", "lpt2", "lpt3", "lpt4",
|
||||
"lpt5", "lpt6", "lpt7", "lpt8", "lpt9", "con", "nul", "prn"]
|
||||
for i in illegal_filename:
|
||||
filename = filename.replace(i, "")
|
||||
|
||||
for i in filename:
|
||||
if re.match("^[a-zA-Z0-9_=+.-]$", i):
|
||||
final["fileName"] += i
|
||||
|
||||
interfaceSection = {
|
||||
"PrivateKey": self.private_key,
|
||||
"Address": self.allowed_ip,
|
||||
"MTU": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.MTU
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.MTU else self.mtu
|
||||
),
|
||||
"DNS": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.DNS
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.DNS else self.DNS
|
||||
)
|
||||
}
|
||||
|
||||
if self.configuration.Protocol == "awg":
|
||||
interfaceSection.update({
|
||||
"Jc": self.configuration.Jc,
|
||||
"Jmin": self.configuration.Jmin,
|
||||
"Jmax": self.configuration.Jmax,
|
||||
"S1": self.configuration.S1,
|
||||
"S2": self.configuration.S2,
|
||||
"H1": self.configuration.H1,
|
||||
"H2": self.configuration.H2,
|
||||
"H3": self.configuration.H3,
|
||||
"H4": self.configuration.H4
|
||||
})
|
||||
|
||||
peerSection = {
|
||||
"PublicKey": self.configuration.PublicKey,
|
||||
"AllowedIPs": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.EndpointAllowedIPs
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.EndpointAllowedIPs else self.endpoint_allowed_ip
|
||||
),
|
||||
"Endpoint": f'{(self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint if self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint else self.configuration.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1])}:{(self.configuration.configurationInfo.OverridePeerSettings.ListenPort if self.configuration.configurationInfo.OverridePeerSettings.ListenPort else self.configuration.ListenPort)}',
|
||||
"PersistentKeepalive": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.PersistentKeepalive
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.PersistentKeepalive
|
||||
else self.keepalive
|
||||
),
|
||||
"PresharedKey": self.preshared_key
|
||||
}
|
||||
combine = [interfaceSection.items(), peerSection.items()]
|
||||
for s in range(len(combine)):
|
||||
if s == 0:
|
||||
final["file"] += "[Interface]\n"
|
||||
else:
|
||||
final["file"] += "\n[Peer]\n"
|
||||
for (key, val) in combine[s]:
|
||||
if val is not None and ((type(val) is str and len(val) > 0) or (type(val) is int and val > 0)):
|
||||
final["file"] += f"{key} = {val}\n"
|
||||
|
||||
final["file"] = jinja2.Template(final["file"]).render(configuration=self.configuration)
|
||||
|
||||
|
||||
if self.configuration.Protocol == "awg":
|
||||
final["amneziaVPN"] = json.dumps({
|
||||
"containers": [{
|
||||
"awg": {
|
||||
"isThirdPartyConfig": True,
|
||||
"last_config": final['file'],
|
||||
"port": self.configuration.ListenPort,
|
||||
"transport_proto": "udp"
|
||||
},
|
||||
"container": "amnezia-awg"
|
||||
}],
|
||||
"defaultContainer": "amnezia-awg",
|
||||
"description": self.name,
|
||||
"hostName": (
|
||||
self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint
|
||||
if self.configuration.configurationInfo.OverridePeerSettings.PeerRemoteEndpoint
|
||||
else self.configuration.DashboardConfig.GetConfig("Peers", "remote_endpoint")[1])
|
||||
})
|
||||
return final
|
||||
|
||||
def getJobs(self):
|
||||
self.jobs = self.configuration.AllPeerJobs.searchJob(self.configuration.Name, self.id)
|
||||
|
||||
def getShareLink(self):
|
||||
self.ShareLink = self.configuration.AllPeerShareLinks.getLink(self.configuration.Name, self.id)
|
||||
|
||||
def resetDataUsage(self, mode: str):
|
||||
try:
|
||||
with self.configuration.engine.begin() as conn:
|
||||
if mode == "total":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_data": 0,
|
||||
"cumu_data": 0,
|
||||
"total_receive": 0,
|
||||
"cumu_receive": 0,
|
||||
"total_sent": 0,
|
||||
"cumu_sent": 0
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.total_data = 0
|
||||
self.total_receive = 0
|
||||
self.total_sent = 0
|
||||
self.cumu_data = 0
|
||||
self.cumu_sent = 0
|
||||
self.cumu_receive = 0
|
||||
elif mode == "receive":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_receive": 0,
|
||||
"cumu_receive": 0,
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.cumu_receive = 0
|
||||
self.total_receive = 0
|
||||
elif mode == "sent":
|
||||
conn.execute(
|
||||
self.configuration.peersTable.update().values({
|
||||
"total_sent": 0,
|
||||
"cumu_sent": 0
|
||||
}).where(
|
||||
self.configuration.peersTable.c.id == self.id
|
||||
)
|
||||
)
|
||||
self.cumu_sent = 0
|
||||
self.total_sent = 0
|
||||
else:
|
||||
return False
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def getEndpoints(self):
|
||||
result = []
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersHistoryEndpointTable.c.endpoint
|
||||
).group_by(
|
||||
self.configuration.peersHistoryEndpointTable.c.endpoint
|
||||
).where(
|
||||
self.configuration.peersHistoryEndpointTable.c.id == self.id
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return list(result)
|
||||
|
||||
def getTraffics(self, interval: int = 30, startDate: datetime.datetime = None, endDate: datetime.datetime = None):
|
||||
if startDate is None and endDate is None:
|
||||
endDate = datetime.datetime.now()
|
||||
startDate = endDate - timedelta(minutes=interval)
|
||||
else:
|
||||
endDate = endDate.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
startDate = startDate.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersTransferTable.c.cumu_data,
|
||||
self.configuration.peersTransferTable.c.total_data,
|
||||
self.configuration.peersTransferTable.c.cumu_receive,
|
||||
self.configuration.peersTransferTable.c.total_receive,
|
||||
self.configuration.peersTransferTable.c.cumu_sent,
|
||||
self.configuration.peersTransferTable.c.total_sent,
|
||||
self.configuration.peersTransferTable.c.time
|
||||
).where(
|
||||
db.and_(
|
||||
self.configuration.peersTransferTable.c.id == self.id,
|
||||
self.configuration.peersTransferTable.c.time <= endDate,
|
||||
self.configuration.peersTransferTable.c.time >= startDate,
|
||||
)
|
||||
).order_by(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
)
|
||||
).mappings().fetchall()
|
||||
return list(result)
|
||||
|
||||
|
||||
def getSessions(self, startDate: datetime.datetime = None, endDate: datetime.datetime = None):
|
||||
if endDate is None:
|
||||
endDate = datetime.datetime.now()
|
||||
|
||||
if startDate is None:
|
||||
startDate = endDate
|
||||
|
||||
endDate = endDate.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
startDate = startDate.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
with self.configuration.engine.connect() as conn:
|
||||
result = conn.execute(
|
||||
db.select(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
).where(
|
||||
db.and_(
|
||||
self.configuration.peersTransferTable.c.id == self.id,
|
||||
self.configuration.peersTransferTable.c.time <= endDate,
|
||||
self.configuration.peersTransferTable.c.time >= startDate,
|
||||
)
|
||||
).order_by(
|
||||
self.configuration.peersTransferTable.c.time
|
||||
)
|
||||
).fetchall()
|
||||
time = list(map(lambda x : x[0], result))
|
||||
return time
|
||||
|
||||
def __duration(self, t1: datetime.datetime, t2: datetime.datetime):
|
||||
delta = t1 - t2
|
||||
|
||||
hours, remainder = divmod(delta.total_seconds(), 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}"
|
@@ -23,8 +23,8 @@ class PeerJob:
|
||||
"Field": self.Field,
|
||||
"Operator": self.Operator,
|
||||
"Value": self.Value,
|
||||
"CreationDate": self.CreationDate,
|
||||
"ExpireDate": self.ExpireDate,
|
||||
"CreationDate": self.CreationDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"ExpireDate": (self.ExpireDate.strftime("%Y-%m-%d %H:%M:%S") if self.ExpireDate is not None else None),
|
||||
"Action": self.Action
|
||||
}
|
||||
|
||||
|
@@ -1,53 +1,59 @@
|
||||
"""
|
||||
Peer Job Logger
|
||||
"""
|
||||
import sqlite3, os, uuid
|
||||
import uuid
|
||||
import sqlalchemy as db
|
||||
from flask import current_app
|
||||
from .ConnectionString import ConnectionString
|
||||
from .Log import Log
|
||||
|
||||
class PeerJobLogger:
|
||||
def __init__(self, CONFIGURATION_PATH, AllPeerJobs):
|
||||
self.loggerdb = sqlite3.connect(os.path.join(CONFIGURATION_PATH, 'db', 'wgdashboard_log.db'),
|
||||
check_same_thread=False)
|
||||
self.loggerdb.row_factory = sqlite3.Row
|
||||
def __init__(self, AllPeerJobs, DashboardConfig):
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard_log"))
|
||||
self.metadata = db.MetaData()
|
||||
self.jobLogTable = db.Table('JobLog', self.metadata,
|
||||
db.Column('LogID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('JobID', db.String(255), nullable=False),
|
||||
db.Column('LogDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
db.Column('Status', db.String(255), nullable=False),
|
||||
db.Column('Message', db.Text)
|
||||
)
|
||||
self.logs: list[Log] = []
|
||||
self.__createLogDatabase()
|
||||
self.metadata.create_all(self.engine)
|
||||
self.AllPeerJobs = AllPeerJobs
|
||||
def __createLogDatabase(self):
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
|
||||
existingTable = loggerdbCursor.execute("SELECT name from sqlite_master where type='table'").fetchall()
|
||||
existingTable = [t['name'] for t in existingTable]
|
||||
|
||||
if "JobLog" not in existingTable:
|
||||
loggerdbCursor.execute("CREATE TABLE JobLog (LogID VARCHAR NOT NULL, JobID NOT NULL, LogDate DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S','now', 'localtime')), Status VARCHAR NOT NULL, Message VARCHAR, PRIMARY KEY (LogID))")
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
def log(self, JobID: str, Status: bool = True, Message: str = "") -> bool:
|
||||
try:
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
loggerdbCursor.execute(f"INSERT INTO JobLog (LogID, JobID, Status, Message) VALUES (?, ?, ?, ?)",
|
||||
(str(uuid.uuid4()), JobID, Status, Message,))
|
||||
if self.loggerdb.in_transaction:
|
||||
self.loggerdb.commit()
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.jobLogTable.insert().values(
|
||||
{
|
||||
"LogID": str(uuid.uuid4()),
|
||||
"JobID": JobID,
|
||||
"Status": Status,
|
||||
"Message": Message
|
||||
}
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[WGDashboard] Peer Job Log Error: {str(e)}")
|
||||
current_app.logger.error(f"Peer Job Log Error", e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def getLogs(self, all: bool = False, configName = None) -> list[Log]:
|
||||
def getLogs(self, configName = None) -> list[Log]:
|
||||
logs: list[Log] = []
|
||||
try:
|
||||
allJobs = self.AllPeerJobs.getAllJobs(configName)
|
||||
allJobsID = ", ".join([f"'{x.JobID}'" for x in allJobs])
|
||||
with self.loggerdb:
|
||||
loggerdbCursor = self.loggerdb.cursor()
|
||||
table = loggerdbCursor.execute(f"SELECT * FROM JobLog WHERE JobID IN ({allJobsID}) ORDER BY LogDate DESC").fetchall()
|
||||
self.logs.clear()
|
||||
allJobsID = [x.JobID for x in allJobs]
|
||||
stmt = self.jobLogTable.select().where(self.jobLogTable.columns.JobID.in_(
|
||||
allJobsID
|
||||
))
|
||||
with self.engine.connect() as conn:
|
||||
table = conn.execute(stmt).fetchall()
|
||||
for l in table:
|
||||
logs.append(
|
||||
Log(l["LogID"], l["JobID"], l["LogDate"], l["Status"], l["Message"]))
|
||||
Log(l.LogID, l.JobID, l.LogDate.strftime("%Y-%m-%d %H:%M:%S"), l.Status, l.Message))
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Getting Peer Job Log Error", e)
|
||||
return logs
|
||||
return logs
|
||||
return logs
|
202
src/modules/PeerJobs.py
Normal file
202
src/modules/PeerJobs.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Peer Jobs
|
||||
"""
|
||||
from .ConnectionString import ConnectionString
|
||||
from .PeerJob import PeerJob
|
||||
from .PeerJobLogger import PeerJobLogger
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
from flask import current_app
|
||||
|
||||
class PeerJobs:
|
||||
def __init__(self, DashboardConfig, WireguardConfigurations):
|
||||
self.Jobs: list[PeerJob] = []
|
||||
self.engine = db.create_engine(ConnectionString('wgdashboard_job'))
|
||||
self.metadata = db.MetaData()
|
||||
self.peerJobTable = db.Table('PeerJobs', self.metadata,
|
||||
db.Column('JobID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Configuration', db.String(255), nullable=False),
|
||||
db.Column('Peer', db.String(255), nullable=False),
|
||||
db.Column('Field', db.String(255), nullable=False),
|
||||
db.Column('Operator', db.String(255), nullable=False),
|
||||
db.Column('Value', db.String(255), nullable=False),
|
||||
db.Column('CreationDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP), nullable=False),
|
||||
db.Column('ExpireDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP)),
|
||||
db.Column('Action', db.String(255), nullable=False),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.__getJobs()
|
||||
self.JobLogger: PeerJobLogger = PeerJobLogger(self, DashboardConfig)
|
||||
self.WireguardConfigurations = WireguardConfigurations
|
||||
|
||||
def __getJobs(self):
|
||||
self.Jobs.clear()
|
||||
with self.engine.connect() as conn:
|
||||
jobs = conn.execute(self.peerJobTable.select().where(
|
||||
self.peerJobTable.columns.ExpireDate.is_(None)
|
||||
)).mappings().fetchall()
|
||||
for job in jobs:
|
||||
self.Jobs.append(PeerJob(
|
||||
job['JobID'], job['Configuration'], job['Peer'], job['Field'], job['Operator'], job['Value'],
|
||||
job['CreationDate'], job['ExpireDate'], job['Action']))
|
||||
|
||||
def getAllJobs(self, configuration: str = None):
|
||||
if configuration is not None:
|
||||
with self.engine.connect() as conn:
|
||||
jobs = conn.execute(self.peerJobTable.select().where(
|
||||
self.peerJobTable.columns.Configuration == configuration
|
||||
)).mappings().fetchall()
|
||||
j = []
|
||||
for job in jobs:
|
||||
j.append(PeerJob(
|
||||
job['JobID'], job['Configuration'], job['Peer'], job['Field'], job['Operator'], job['Value'],
|
||||
job['CreationDate'], job['ExpireDate'], job['Action']))
|
||||
return j
|
||||
return []
|
||||
|
||||
def toJson(self):
|
||||
return [x.toJson() for x in self.Jobs]
|
||||
|
||||
def searchJob(self, Configuration: str, Peer: str):
|
||||
return list(filter(lambda x: x.Configuration == Configuration and x.Peer == Peer, self.Jobs))
|
||||
|
||||
def searchJobById(self, JobID):
|
||||
return list(filter(lambda x: x.JobID == JobID, self.Jobs))
|
||||
|
||||
def saveJob(self, Job: PeerJob) -> tuple[bool, list] | tuple[bool, str]:
|
||||
import traceback
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
currentJob = self.searchJobById(Job.JobID)
|
||||
if len(currentJob) == 0:
|
||||
conn.execute(
|
||||
self.peerJobTable.insert().values(
|
||||
{
|
||||
"JobID": Job.JobID,
|
||||
"Configuration": Job.Configuration,
|
||||
"Peer": Job.Peer,
|
||||
"Field": Job.Field,
|
||||
"Operator": Job.Operator,
|
||||
"Value": Job.Value,
|
||||
"CreationDate": datetime.now(),
|
||||
"ExpireDate": None,
|
||||
"Action": Job.Action
|
||||
}
|
||||
)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is created if {Job.Field} {Job.Operator} {Job.Value} then {Job.Action}")
|
||||
else:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values({
|
||||
"Field": Job.Field,
|
||||
"Operator": Job.Operator,
|
||||
"Value": Job.Value,
|
||||
"Action": Job.Action
|
||||
}).where(self.peerJobTable.columns.JobID == Job.JobID)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is updated from if {currentJob[0].Field} {currentJob[0].Operator} {currentJob[0].Value} then {currentJob[0].Action}; to if {Job.Field} {Job.Operator} {Job.Value} then {Job.Action}")
|
||||
self.__getJobs()
|
||||
self.WireguardConfigurations.get(Job.Configuration).searchPeer(Job.Peer)[1].getJobs()
|
||||
return True, list(
|
||||
filter(lambda x: x.Configuration == Job.Configuration and x.Peer == Job.Peer and x.JobID == Job.JobID,
|
||||
self.Jobs))
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
return False, str(e)
|
||||
|
||||
def deleteJob(self, Job: PeerJob) -> tuple[bool, None] | tuple[bool, str]:
|
||||
try:
|
||||
if len(self.searchJobById(Job.JobID)) == 0:
|
||||
return False, "Job does not exist"
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values(
|
||||
{
|
||||
"ExpireDate": datetime.now()
|
||||
}
|
||||
).where(self.peerJobTable.columns.JobID == Job.JobID)
|
||||
)
|
||||
self.JobLogger.log(Job.JobID, Message=f"Job is removed due to being deleted or finshed.")
|
||||
self.__getJobs()
|
||||
self.WireguardConfigurations.get(Job.Configuration).searchPeer(Job.Peer)[1].getJobs()
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def updateJobConfigurationName(self, ConfigurationName: str, NewConfigurationName: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
with self.engine.begin() as conn:
|
||||
conn.execute(
|
||||
self.peerJobTable.update().values({
|
||||
"Configuration": NewConfigurationName
|
||||
}).where(self.peerJobTable.columns.Configuration == ConfigurationName)
|
||||
)
|
||||
self.__getJobs()
|
||||
return True, None
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def getPeerJobLogs(self, configurationName):
|
||||
return self.JobLogger.getLogs(configurationName)
|
||||
|
||||
|
||||
def runJob(self):
|
||||
current_app.logger.info("Running scheduled jobs")
|
||||
needToDelete = []
|
||||
self.__getJobs()
|
||||
for job in self.Jobs:
|
||||
c = self.WireguardConfigurations.get(job.Configuration)
|
||||
if c is not None:
|
||||
f, fp = c.searchPeer(job.Peer)
|
||||
if f:
|
||||
if job.Field in ["total_receive", "total_sent", "total_data"]:
|
||||
s = job.Field.split("_")[1]
|
||||
x: float = getattr(fp, f"total_{s}") + getattr(fp, f"cumu_{s}")
|
||||
y: float = float(job.Value)
|
||||
else:
|
||||
x: datetime = datetime.now()
|
||||
y: datetime = datetime.strptime(job.Value, "%Y-%m-%d %H:%M:%S")
|
||||
runAction: bool = self.__runJob_Compare(x, y, job.Operator)
|
||||
if runAction:
|
||||
s = False
|
||||
if job.Action == "restrict":
|
||||
s, msg = c.restrictPeers([fp.id])
|
||||
elif job.Action == "delete":
|
||||
s, msg = c.deletePeers([fp.id])
|
||||
elif job.Action == "reset_total_data_usage":
|
||||
s = fp.resetDataUsage("total")
|
||||
c.restrictPeers([fp.id])
|
||||
c.allowAccessPeers([fp.id])
|
||||
if s is True:
|
||||
self.JobLogger.log(job.JobID, s,
|
||||
f"Peer {fp.id} from {c.Name} is successfully {job.Action}ed."
|
||||
)
|
||||
current_app.logger.info(f"Peer {fp.id} from {c.Name} is successfully {job.Action}ed.")
|
||||
needToDelete.append(job)
|
||||
else:
|
||||
current_app.logger.info(f"Peer {fp.id} from {c.Name} is failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, s,
|
||||
f"Peer {fp.id} from {c.Name} failed {job.Action}ed."
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(f"Somehow can't find this peer {job.Peer} from {c.Name} failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, False,
|
||||
f"Somehow can't find this peer {job.Peer} from {c.Name} failed {job.Action}ed."
|
||||
)
|
||||
else:
|
||||
current_app.logger.warning(f"Somehow can't find this peer {job.Peer} from {job.Configuration} failed {job.Action}ed.")
|
||||
self.JobLogger.log(job.JobID, False,
|
||||
f"Somehow can't find this peer {job.Peer} from {job.Configuration} failed {job.Action}ed."
|
||||
)
|
||||
for j in needToDelete:
|
||||
self.deleteJob(j)
|
||||
|
||||
def __runJob_Compare(self, x: float | datetime, y: float | datetime, operator: str):
|
||||
if operator == "eq":
|
||||
return x == y
|
||||
if operator == "neq":
|
||||
return x != y
|
||||
if operator == "lgt":
|
||||
return x > y
|
||||
if operator == "lst":
|
||||
return x < y
|
22
src/modules/PeerShareLink.py
Normal file
22
src/modules/PeerShareLink.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from datetime import datetime
|
||||
"""
|
||||
Peer Share Link
|
||||
"""
|
||||
class PeerShareLink:
|
||||
def __init__(self, ShareID:str, Configuration: str, Peer: str, ExpireDate: datetime, SharedDate: datetime):
|
||||
self.ShareID = ShareID
|
||||
self.Peer = Peer
|
||||
self.Configuration = Configuration
|
||||
self.SharedDate = SharedDate
|
||||
self.ExpireDate = ExpireDate
|
||||
if not self.ExpireDate:
|
||||
self.ExpireDate = datetime.strptime("2199-12-31","%Y-%m-%d")
|
||||
|
||||
def toJson(self):
|
||||
return {
|
||||
"ShareID": self.ShareID,
|
||||
"Peer": self.Peer,
|
||||
"Configuration": self.Configuration,
|
||||
"ExpireDate": self.ExpireDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"SharedDate": self.SharedDate.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
}
|
89
src/modules/PeerShareLinks.py
Normal file
89
src/modules/PeerShareLinks.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from .ConnectionString import ConnectionString
|
||||
from .PeerShareLink import PeerShareLink
|
||||
import sqlalchemy as db
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
"""
|
||||
Peer Share Links
|
||||
"""
|
||||
class PeerShareLinks:
|
||||
def __init__(self, DashboardConfig, WireguardConfigurations):
|
||||
self.Links: list[PeerShareLink] = []
|
||||
self.engine = db.create_engine(ConnectionString("wgdashboard"))
|
||||
self.metadata = db.MetaData()
|
||||
self.peerShareLinksTable = db.Table(
|
||||
'PeerShareLinks', self.metadata,
|
||||
db.Column('ShareID', db.String(255), nullable=False, primary_key=True),
|
||||
db.Column('Configuration', db.String(255), nullable=False),
|
||||
db.Column('Peer', db.String(255), nullable=False),
|
||||
db.Column('ExpireDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP)),
|
||||
db.Column('SharedDate', (db.DATETIME if DashboardConfig.GetConfig("Database", "type")[1] == 'sqlite' else db.TIMESTAMP),
|
||||
server_default=db.func.now()),
|
||||
)
|
||||
self.metadata.create_all(self.engine)
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations = WireguardConfigurations
|
||||
def __getSharedLinks(self):
|
||||
self.Links.clear()
|
||||
with self.engine.connect() as conn:
|
||||
allLinks = conn.execute(
|
||||
self.peerShareLinksTable.select().where(
|
||||
db.or_(self.peerShareLinksTable.columns.ExpireDate.is_(None), self.peerShareLinksTable.columns.ExpireDate > datetime.now())
|
||||
)
|
||||
).mappings().fetchall()
|
||||
for link in allLinks:
|
||||
self.Links.append(PeerShareLink(**link))
|
||||
|
||||
|
||||
|
||||
def getLink(self, Configuration: str, Peer: str) -> list[PeerShareLink]:
|
||||
self.__getSharedLinks()
|
||||
return list(filter(lambda x : x.Configuration == Configuration and x.Peer == Peer, self.Links))
|
||||
|
||||
def getLinkByID(self, ShareID: str) -> list[PeerShareLink]:
|
||||
self.__getSharedLinks()
|
||||
return list(filter(lambda x : x.ShareID == ShareID, self.Links))
|
||||
|
||||
def addLink(self, Configuration: str, Peer: str, ExpireDate: datetime = None) -> tuple[bool, str]:
|
||||
try:
|
||||
newShareID = str(uuid.uuid4())
|
||||
with self.engine.begin() as conn:
|
||||
if len(self.getLink(Configuration, Peer)) > 0:
|
||||
conn.execute(
|
||||
self.peerShareLinksTable.update().values(
|
||||
{
|
||||
"ExpireDate": datetime.now()
|
||||
}
|
||||
).where(db.and_(self.peerShareLinksTable.columns.Configuration == Configuration, self.peerShareLinksTable.columns.Peer == Peer))
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
self.peerShareLinksTable.insert().values(
|
||||
{
|
||||
"ShareID": newShareID,
|
||||
"Configuration": Configuration,
|
||||
"Peer": Peer,
|
||||
"ExpireDate": ExpireDate
|
||||
}
|
||||
)
|
||||
)
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations.get(Configuration).searchPeer(Peer)[1].getShareLink()
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
return True, newShareID
|
||||
|
||||
def updateLinkExpireDate(self, ShareID, ExpireDate: datetime = None) -> tuple[bool, str]:
|
||||
with self.engine.begin() as conn:
|
||||
updated = conn.execute(
|
||||
self.peerShareLinksTable.update().values(
|
||||
{
|
||||
"ExpireDate": ExpireDate
|
||||
}
|
||||
).returning(self.peerShareLinksTable.c.Configuration, self.peerShareLinksTable.c.Peer)
|
||||
.where(self.peerShareLinksTable.columns.ShareID == ShareID)
|
||||
).mappings().fetchone()
|
||||
self.__getSharedLinks()
|
||||
self.wireguardConfigurations.get(updated.Configuration).searchPeer(updated.Peer)[1].getShareLink()
|
||||
return True, ""
|
@@ -1,4 +1,6 @@
|
||||
import psutil
|
||||
import shutil, subprocess, time, threading, psutil
|
||||
from flask import current_app
|
||||
|
||||
class SystemStatus:
|
||||
def __init__(self):
|
||||
self.CPU = CPU()
|
||||
@@ -8,6 +10,17 @@ class SystemStatus:
|
||||
self.NetworkInterfaces = NetworkInterfaces()
|
||||
self.Processes = Processes()
|
||||
def toJson(self):
|
||||
process = [
|
||||
threading.Thread(target=self.CPU.getCPUPercent),
|
||||
threading.Thread(target=self.CPU.getPerCPUPercent),
|
||||
threading.Thread(target=self.NetworkInterfaces.getData)
|
||||
]
|
||||
for p in process:
|
||||
p.start()
|
||||
for p in process:
|
||||
p.join()
|
||||
|
||||
|
||||
return {
|
||||
"CPU": self.CPU,
|
||||
"Memory": {
|
||||
@@ -16,6 +29,7 @@ class SystemStatus:
|
||||
},
|
||||
"Disks": self.Disks,
|
||||
"NetworkInterfaces": self.NetworkInterfaces,
|
||||
"NetworkInterfacesPriority": self.NetworkInterfaces.getInterfacePriorities(),
|
||||
"Processes": self.Processes
|
||||
}
|
||||
|
||||
@@ -24,14 +38,20 @@ class CPU:
|
||||
def __init__(self):
|
||||
self.cpu_percent: float = 0
|
||||
self.cpu_percent_per_cpu: list[float] = []
|
||||
def getData(self):
|
||||
|
||||
def getCPUPercent(self):
|
||||
try:
|
||||
self.cpu_percent_per_cpu = psutil.cpu_percent(interval=0.5, percpu=True)
|
||||
self.cpu_percent = psutil.cpu_percent(interval=0.5)
|
||||
self.cpu_percent = psutil.cpu_percent(interval=1)
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get CPU Percent error", e)
|
||||
|
||||
def getPerCPUPercent(self):
|
||||
try:
|
||||
self.cpu_percent_per_cpu = psutil.cpu_percent(interval=1, percpu=True)
|
||||
except Exception as e:
|
||||
current_app.logger.error("Get Per CPU Percent error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
|
||||
class Memory:
|
||||
@@ -44,13 +64,15 @@ class Memory:
|
||||
try:
|
||||
if self.__memoryType__ == "virtual":
|
||||
memory = psutil.virtual_memory()
|
||||
self.available = memory.available
|
||||
else:
|
||||
memory = psutil.swap_memory()
|
||||
self.available = memory.free
|
||||
self.total = memory.total
|
||||
self.available = memory.available
|
||||
|
||||
self.percent = memory.percent
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Memory percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
@@ -62,7 +84,7 @@ class Disks:
|
||||
try:
|
||||
self.disks = list(map(lambda x : Disk(x.mountpoint), psutil.disk_partitions()))
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Disk percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.disks
|
||||
@@ -82,7 +104,7 @@ class Disk:
|
||||
self.used = disk.used
|
||||
self.percent = disk.percent
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get Disk percent error", e)
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.__dict__
|
||||
@@ -90,15 +112,38 @@ class Disk:
|
||||
class NetworkInterfaces:
|
||||
def __init__(self):
|
||||
self.interfaces = {}
|
||||
|
||||
def getInterfacePriorities(self):
|
||||
if shutil.which("ip"):
|
||||
result = subprocess.check_output(["ip", "route", "show"]).decode()
|
||||
priorities = {}
|
||||
for line in result.splitlines():
|
||||
if "metric" in line and "dev" in line:
|
||||
parts = line.split()
|
||||
dev = parts[parts.index("dev")+1]
|
||||
metric = int(parts[parts.index("metric")+1])
|
||||
if dev not in priorities:
|
||||
priorities[dev] = metric
|
||||
return priorities
|
||||
return {}
|
||||
|
||||
def getData(self):
|
||||
self.interfaces.clear()
|
||||
try:
|
||||
network = psutil.net_io_counters(pernic=True, nowrap=True)
|
||||
for i in network.keys():
|
||||
self.interfaces[i] = network[i]._asdict()
|
||||
time.sleep(1)
|
||||
network = psutil.net_io_counters(pernic=True, nowrap=True)
|
||||
for i in network.keys():
|
||||
self.interfaces[i]['realtime'] = {
|
||||
'sent': round((network[i].bytes_sent - self.interfaces[i]['bytes_sent']) / 1024 / 1024, 4),
|
||||
'recv': round((network[i].bytes_recv - self.interfaces[i]['bytes_recv']) / 1024 / 1024, 4)
|
||||
}
|
||||
except Exception as e:
|
||||
pass
|
||||
current_app.logger.error("Get network error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return self.interfaces
|
||||
|
||||
class Process:
|
||||
@@ -126,7 +171,8 @@ class Processes:
|
||||
key=lambda x : x.percent, reverse=True)[:20]
|
||||
break
|
||||
except Exception as e:
|
||||
break
|
||||
current_app.logger.error("Get processes error", e)
|
||||
|
||||
def toJson(self):
|
||||
self.getData()
|
||||
return {
|
||||
|
@@ -59,6 +59,15 @@ def ValidateDNSAddress(addresses) -> tuple[bool, str]:
|
||||
return False, f"{address} does not appear to be an valid DNS address"
|
||||
return True, ""
|
||||
|
||||
def ValidateEndpointAllowedIPs(IPs) -> tuple[bool, str] | tuple[bool, None]:
|
||||
ips = IPs.replace(" ", "").split(",")
|
||||
for ip in ips:
|
||||
try:
|
||||
ipaddress.ip_network(ip, strict=False)
|
||||
except ValueError as e:
|
||||
return False, str(e)
|
||||
return True, None
|
||||
|
||||
def GenerateWireguardPublicKey(privateKey: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
try:
|
||||
publicKey = subprocess.check_output(f"wg pubkey", input=privateKey.encode(), shell=True,
|
||||
@@ -73,4 +82,23 @@ def GenerateWireguardPrivateKey() -> tuple[bool, str] | tuple[bool, None]:
|
||||
stderr=subprocess.STDOUT)
|
||||
return True, publicKey.decode().strip('\n')
|
||||
except subprocess.CalledProcessError:
|
||||
return False, None
|
||||
return False, None
|
||||
|
||||
def ValidatePasswordStrength(password: str) -> tuple[bool, str] | tuple[bool, None]:
|
||||
# Rules:
|
||||
# - Must be over 8 characters & numbers
|
||||
# - Must contain at least 1 Uppercase & Lowercase letters
|
||||
# - Must contain at least 1 Numbers (0-9)
|
||||
# - Must contain at least 1 special characters from $&+,:;=?@#|'<>.-^*()%!~_-
|
||||
if len(password) < 8:
|
||||
return False, "Password must be 8 characters or more"
|
||||
if not re.search(r'[a-z]', password):
|
||||
return False, "Password must contain at least 1 lowercase character"
|
||||
if not re.search(r'[A-Z]', password):
|
||||
return False, "Password must contain at least 1 uppercase character"
|
||||
if not re.search(r'\d', password):
|
||||
return False, "Password must contain at least 1 number"
|
||||
if not re.search(r'[$&+,:;=?@#|\'<>.\-^*()%!~_-]', password):
|
||||
return False, "Password must contain at least 1 special character from $&+,:;=?@#|'<>.-^*()%!~_-"
|
||||
|
||||
return True, None
|
1229
src/modules/WireguardConfiguration.py
Normal file
1229
src/modules/WireguardConfiguration.py
Normal file
File diff suppressed because it is too large
Load Diff
21
src/modules/WireguardConfigurationInfo.py
Normal file
21
src/modules/WireguardConfigurationInfo.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
class OverridePeerSettingsClass(BaseModel):
|
||||
DNS: str = ''
|
||||
EndpointAllowedIPs: str = ''
|
||||
MTU: str | int = ''
|
||||
PersistentKeepalive: int | str = ''
|
||||
PeerRemoteEndpoint: str = ''
|
||||
ListenPort: int | str = ''
|
||||
|
||||
class PeerGroupsClass(BaseModel):
|
||||
GroupName: str = ''
|
||||
Description: str = ''
|
||||
BackgroundColor: str = ''
|
||||
Icon: str = ''
|
||||
Peers: list[str] = []
|
||||
|
||||
class WireguardConfigurationInfo(BaseModel):
|
||||
Description: str = ''
|
||||
OverridePeerSettings: OverridePeerSettingsClass = OverridePeerSettingsClass(**{})
|
||||
PeerGroups: dict[str, PeerGroupsClass] = {}
|
@@ -7,4 +7,11 @@ flask-cors
|
||||
icmplib
|
||||
gunicorn
|
||||
requests
|
||||
tcconfig
|
||||
tcconfig
|
||||
sqlalchemy
|
||||
sqlalchemy_utils
|
||||
psycopg
|
||||
PyMySQL
|
||||
tzlocal
|
||||
python-jose
|
||||
pydantic
|
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{_ as r,c as i,d as o,w as e,k as l,a as t,j as _,i as a,l as d,S as u}from"./index-DZliHkQD.js";const m={name:"configuration"},p={class:"mt-md-5 mt-3 text-body"};function f(k,x,h,w,$,v){const n=l("RouterView");return t(),i("div",p,[o(n,null,{default:e(({Component:s,route:c})=>[o(_,{name:"fade2",mode:"out-in"},{default:e(()=>[(t(),a(u,null,{default:e(()=>[(t(),a(d(s),{key:c.path}))]),_:2},1024))]),_:2},1024)]),_:1})])}const B=r(m,[["render",f]]);export{B as default};
|
@@ -1 +0,0 @@
|
||||
.fade-enter-active[data-v-dafd6275]{transition-delay:var(--7d032b58)!important}.progress-bar[data-v-c20f1a80]{width:0;transition:all 1s cubic-bezier(.42,0,.22,1)}.filter a[data-v-ea61b607]{text-decoration:none}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
.agentMessage[data-v-8635cf47]{white-space:break-spaces;max-width:80%;display:flex;flex-direction:column;word-wrap:break-word}.text-bg-secondary[data-v-8635cf47]{background-color:RGBA(var(--bs-secondary-rgb),.7)!important}.text-bg-primary[data-v-8635cf47]{background-color:RGBA(var(--bs-primary-rgb),.7)!important}.agentContainer[data-v-a76f42bd]{--agentHeight: 100vh;position:absolute;z-index:9999;top:0;left:100%;width:450px;box-shadow:0 10px 30px #0000004d;backdrop-filter:blur(8px);background:linear-gradient(var(--degree),#009dff52 var(--distance2),#F9464752 100%)}.agentContainer.enabled[data-v-a76f42bd]{height:calc(var(--agentHeight) - 1rem)}@media screen and (max-width: 768px){.agentContainer[data-v-a76f42bd]{--agentHeight: 100vh !important;top:0;left:0;max-height:calc(var(--agentHeight) - 58px - 1rem);width:calc(100% - 1rem)}}.agentChatroomBody[data-v-a76f42bd]{flex:1 1 auto;overflow-y:auto;max-height:calc(var(--agentHeight) - 70px - 244px)}@media screen and (max-width: 768px){.navbar-container[data-v-58e71749]{position:absolute!important;z-index:1000;animation-duration:.4s;animation-fill-mode:both;display:none;animation-timing-function:cubic-bezier(.82,.58,.17,.9)}.navbar-container.active[data-v-58e71749]{animation-direction:normal;display:block!important;animation-name:zoomInFade-58e71749}}.navbar-container[data-v-58e71749]{height:100vh;position:relative}@supports (height: 100dvh){@media screen and (max-width: 768px){.navbar-container[data-v-58e71749]{height:calc(100dvh - 58px)}}}@keyframes zoomInFade-58e71749{0%{opacity:0;transform:translateY(60px);filter:blur(3px)}to{opacity:1;transform:translateY(0);filter:blur(0px)}}.slideIn-enter-active[data-v-58e71749],.slideIn-leave-active[data-v-58e71749]{transition:all .3s cubic-bezier(.82,.58,.17,1)}.slideIn-enter-from[data-v-58e71749],.slideIn-leave-to[data-v-58e71749]{transform:translateY(30px);filter:blur(3px);opacity:0}main[data-v-0c6a5068]{height:100vh}@supports (height: 100dvh){@media screen and (max-width: 768px){main[data-v-0c6a5068]{height:calc(100dvh - 58px)}}}
|
15
src/static/app/dist/assets/index-DFl-XeJT.css
vendored
15
src/static/app/dist/assets/index-DFl-XeJT.css
vendored
File diff suppressed because one or more lines are too long
44
src/static/app/dist/assets/index-DZliHkQD.js
vendored
44
src/static/app/dist/assets/index-DZliHkQD.js
vendored
File diff suppressed because one or more lines are too long
1
src/static/app/dist/assets/index-L60y6kc9.js
vendored
1
src/static/app/dist/assets/index-L60y6kc9.js
vendored
@@ -1 +0,0 @@
|
||||
function f(e){return e.includes(":")?6:e.includes(".")?4:0}function b(e){const i=f(e);if(!i)throw new Error(`Invalid IP address: ${e}`);let n=0n,o=0n;const r=Object.create(null);if(i===4)for(const s of e.split(".").map(BigInt).reverse())n+=s*2n**o,o+=8n;else{if(e.includes(".")&&(r.ipv4mapped=!0,e=e.split(":").map(t=>{if(t.includes(".")){const[c,l,d,a]=t.split(".").map($=>Number($).toString(16).padStart(2,"0"));return`${c}${l}:${d}${a}`}else return t}).join(":")),e.includes("%")){let t;[,e,t]=/(.+)%(.+)/.exec(e)||[],r.scopeid=t}const s=e.split(":"),u=s.indexOf("");if(u!==-1)for(;s.length<8;)s.splice(u,0,"");for(const t of s.map(c=>BigInt(parseInt(c||"0",16))).reverse())n+=t*2n**o,o+=16n}return r.number=n,r.version=i,r}const p={4:32,6:128},I=e=>e.includes("/")?f(e):0;function m(e){const i=I(e),n=Object.create(null);if(i)n.cidr=e,n.version=i;else{const a=f(e);if(a)n.cidr=`${e}/${p[a]}`,n.version=a;else throw new Error(`Network is not a CIDR or IP: ${e}`)}const[o,r]=n.cidr.split("/");if(!/^[0-9]+$/.test(r))throw new Error(`Network is not a CIDR or IP: ${e}`);n.prefix=r,n.single=r===String(p[n.version]);const{number:s,version:u}=b(o),t=p[u],c=s.toString(2).padStart(t,"0"),l=Number(t-r),d=c.substring(0,t-l);return n.start=BigInt(`0b${d}${"0".repeat(l)}`),n.end=BigInt(`0b${d}${"1".repeat(l)}`),n}export{m as p};
|
56
src/static/app/dist/assets/index-UG7VB4Xm.js
vendored
56
src/static/app/dist/assets/index-UG7VB4Xm.js
vendored
File diff suppressed because one or more lines are too long
18
src/static/app/dist/assets/index-myupzXki.js
vendored
18
src/static/app/dist/assets/index-myupzXki.js
vendored
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{_ as e,G as t,a as o,c as a,t as c}from"./index-DZliHkQD.js";const s={name:"localeText",props:{t:""},computed:{getLocaleText(){return t(this.t)}}};function n(r,p,l,_,i,x){return o(),a("span",null,c(this.getLocaleText),1)}const u=e(s,[["render",n]]);export{u as L};
|
@@ -1 +0,0 @@
|
||||
import{L as l}from"./localeText-DG9SnJT8.js";import{d as c}from"./dayjs.min-PaIL06iQ.js";import{_ as h,a as o,c as a,b as e,d as i,w as u,f as p,t as n,j as g,n as f,k as _}from"./index-DZliHkQD.js";const x={name:"message",methods:{dayjs:c,hide(){this.ct(),this.message.show=!1},show(){this.timeout=setTimeout(()=>{this.message.show=!1},5e3)},ct(){clearTimeout(this.timeout)}},components:{LocaleText:l},props:{message:Object},mounted(){this.show()},data(){return{dismiss:!1,timeout:null}}},v=["id"],b={key:0,class:"d-flex"},w={class:"fw-bold d-block",style:{"text-transform":"uppercase"}},y={class:"ms-auto"},k={key:1},T={class:"card-body d-flex align-items-center gap-3"};function M(j,s,C,L,t,m){const d=_("LocaleText");return o(),a("div",{onMouseenter:s[1]||(s[1]=r=>{t.dismiss=!0,this.ct()}),onMouseleave:s[2]||(s[2]=r=>{t.dismiss=!1,this.show()}),class:"card shadow rounded-3 position-relative message ms-auto",id:this.message.id},[e("div",{class:f([{"text-bg-danger":this.message.type==="danger","text-bg-success":this.message.type==="success","text-bg-warning":this.message.type==="warning"},"card-header pos"])},[i(g,{name:"zoom",mode:"out-in"},{default:u(()=>[t.dismiss?(o(),a("div",k,[e("small",{onClick:s[0]||(s[0]=r=>m.hide()),class:"d-block mx-auto w-100 text-center",style:{cursor:"pointer"}},[s[3]||(s[3]=e("i",{class:"bi bi-x-lg me-2"},null,-1)),i(d,{t:"Dismiss"})])])):(o(),a("div",b,[e("small",w,[i(d,{t:"FROM "}),p(" "+n(this.message.from),1)]),e("small",y,n(m.dayjs().format("hh:mm A")),1)]))]),_:1})],2),e("div",T,[e("div",null,n(this.message.content),1)])],40,v)}const z=h(x,[["render",M],["__scopeId","data-v-94c76b54"]]);export{z as M};
|
@@ -1 +0,0 @@
|
||||
.protocolBtnGroup a[data-v-b97242f3]{transition:all .2s ease-in-out}
|
File diff suppressed because one or more lines are too long
10
src/static/app/dist/assets/osmap-CmjRjQ0N.js
vendored
10
src/static/app/dist/assets/osmap-CmjRjQ0N.js
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{_ as v,D as g,r as o,o as h,J as x,g as y,a as i,c as n,b as s,d as c,n as w,e as C,w as k,j as F}from"./index-DZliHkQD.js";import{L as T}from"./localeText-DG9SnJT8.js";import"./browser-CjSdxGTc.js";const M={class:"peerSettingContainer w-100 h-100 position-absolute top-0 start-0"},S={class:"container d-flex h-100 w-100"},D={class:"m-auto modal-dialog-centered dashboardModal justify-content-center"},P={class:"card rounded-3 shadow w-100"},j={class:"card-header bg-transparent d-flex align-items-center gap-2 border-0 p-4 pb-0"},B={class:"mb-0"},G={class:"card-body p-4 d-flex flex-column gap-3"},L={style:{height:"300px"},class:"d-flex"},N=["value"],V={key:0,class:"spinner-border m-auto",role:"status"},I={class:"d-flex"},W=["disabled"],$={key:0,class:"d-block"},q={key:1,class:"d-block",id:"check"},z={__name:"peerConfigurationFile",props:{selectedPeer:Object},emits:["close"],setup(u,{emit:p}){const m=p,f=u,r=g(),t=o(!1),l=o(""),a=o(!0);o({error:!1,message:void 0}),h(()=>{const d=x();y("/api/downloadPeer/"+d.params.id,{id:f.selectedPeer.id},e=>{e.status?(l.value=e.data.file,a.value=!1):this.dashboardStore.newMessage("Server",e.message,"danger")})});const b=async()=>{navigator.clipboard&&navigator.clipboard.writeText?navigator.clipboard.writeText(l.value).then(()=>{t.value=!0,setTimeout(()=>{t.value=!1},3e3)}).catch(()=>{r.newMessage("WGDashboard","Failed to copy","danger")}):(document.querySelector("#peerConfigurationFile").select(),document.execCommand("copy")?(t.value=!0,setTimeout(()=>{t.value=!1},3e3)):r.newMessage("WGDashboard","Failed to copy","danger"))};return(d,e)=>(i(),n("div",M,[s("div",S,[s("div",D,[s("div",P,[s("div",j,[s("h4",B,[c(T,{t:"Peer Configuration File"})]),s("button",{type:"button",class:"btn-close ms-auto",onClick:e[0]||(e[0]=_=>m("close"))})]),s("div",G,[s("div",L,[s("textarea",{style:{height:"300px"},class:w(["form-control w-100 rounded-3 animate__fadeIn animate__faster animate__animated",{"d-none":a.value}]),id:"peerConfigurationFile",value:l.value},null,10,N),a.value?(i(),n("div",V,e[2]||(e[2]=[s("span",{class:"visually-hidden"},"Loading...",-1)]))):C("",!0)]),s("div",I,[s("button",{onClick:e[1]||(e[1]=_=>b()),disabled:t.value||a.value,class:"ms-auto btn bg-primary-subtle border-primary-subtle text-primary-emphasis rounded-3 position-relative"},[c(F,{name:"slide-up",mode:"out-in"},{default:k(()=>[t.value?(i(),n("span",q,e[4]||(e[4]=[s("i",{class:"bi bi-check-circle-fill"},null,-1)]))):(i(),n("span",$,e[3]||(e[3]=[s("i",{class:"bi bi-clipboard-fill"},null,-1)])))]),_:1})],8,W)])])])])])]))}},R=v(z,[["__scopeId","data-v-b0ea2d46"]]);export{R as default};
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
@media screen and (max-width: 768px){#qrcode[data-v-7c287bf3]{width:100%!important;height:auto!important;aspect-ratio:1/1}}
|
@@ -1 +0,0 @@
|
||||
import{b as i}from"./browser-CjSdxGTc.js";import{L as c}from"./localeText-DG9SnJT8.js";import{_ as l,D as p,g as _,k as m,a as n,c as r,b as e,d as u,n as h,e as f}from"./index-DZliHkQD.js";const g={name:"peerQRCode",components:{LocaleText:c},props:{selectedPeer:Object},setup(){return{dashboardStore:p()}},data(){return{loading:!0}},mounted(){_("/api/downloadPeer/"+this.$route.params.id,{id:this.selectedPeer.id},o=>{if(this.loading=!1,o.status){let t="";if(this.selectedPeer.configuration.Protocol==="awg"){let a={containers:[{awg:{isThirdPartyConfig:!0,last_config:o.data.file,port:this.selectedPeer.configuration.ListenPort,transport_proto:"udp"},container:"amnezia-awg"}],defaultContainer:"amnezia-awg",description:this.selectedPeer.name,hostName:this.dashboardStore.Configuration.Peers.remote_endpoint};t=btoa(JSON.stringify(a))}else t=o.data.file;i.toCanvas(document.querySelector("#qrcode"),t,a=>{a&&console.error(a)})}else this.dashboardStore.newMessage("Server",o.message,"danger")})}},b={class:"peerSettingContainer w-100 h-100 position-absolute top-0 start-0"},v={class:"container d-flex h-100 w-100"},C={class:"m-auto modal-dialog-centered dashboardModal justify-content-center"},w={class:"card rounded-3 shadow"},P={class:"card-header bg-transparent d-flex align-items-center gap-2 border-0 p-4 pb-0"},x={class:"mb-0"},S={class:"card-body p-4"},y={class:"d-flex"},L={key:0,class:"spinner-border m-auto",role:"status"};function k(o,t,a,N,s,$){const d=m("LocaleText");return n(),r("div",b,[e("div",v,[e("div",C,[e("div",w,[e("div",P,[e("h4",x,[u(d,{t:"QR Code"})]),e("button",{type:"button",class:"btn-close ms-auto",onClick:t[0]||(t[0]=Q=>this.$emit("close"))})]),e("div",S,[e("div",y,[e("canvas",{id:"qrcode",class:h(["rounded-3 shadow animate__animated animate__fadeIn animate__faster",{"d-none":s.loading}])},null,2),s.loading?(n(),r("div",L,t[1]||(t[1]=[e("span",{class:"visually-hidden"},"Loading...",-1)]))):f("",!0)])])])])])])}const q=l(g,[["render",k],["__scopeId","data-v-7c287bf3"]]);export{q as default};
|
@@ -1 +0,0 @@
|
||||
.searchPeersContainer[data-v-b741afe7]{width:100%}
|
@@ -1 +0,0 @@
|
||||
import{_ as u,q as m,G as p,r as b,W as f,a2 as h,o as g,a as v,i as y,w as _,b as e,m as x,y as w,d as S,j as B}from"./index-DZliHkQD.js";import{L as T}from"./localeText-DG9SnJT8.js";const C={class:"fixed-bottom w-100 bottom-0 z-2",style:{"z-index":"1"}},P={class:"container-fluid"},k={class:"row g-0"},L={class:"col-md-9 col-lg-10 d-flex justify-content-center py-2"},V={class:"rounded-3 p-2 border shadow searchPeersContainer bg-body-tertiary"},j={class:"d-flex gap-1 align-items-center px-2"},z=["placeholder"],D={__name:"peerSearchBar",emits:["close"],setup(G,{emit:n}){const l=m(()=>p("Search Peers..."));let t;const o=b(""),r=f(),i=()=>{t?(clearTimeout(t),t=setTimeout(()=>{r.searchString=o.value},300)):t=setTimeout(()=>{r.searchString=o.value},300)},d=n,c=h("searchBar");return g(()=>{c.value.focus()}),(M,s)=>(v(),y(B,{name:"slideUp",appear:"",type:"animation",style:{"animation-delay":"1s"}},{default:_(()=>[e("div",C,[e("div",P,[e("div",k,[s[5]||(s[5]=e("div",{class:"col-md-3 col-lg-2"},null,-1)),e("div",L,[e("div",V,[e("div",j,[s[4]||(s[4]=e("h6",{class:"mb-0 me-2"},[e("label",{for:"searchPeers"},[e("i",{class:"bi bi-search"})])],-1)),x(e("input",{ref:"searchBar",class:"flex-grow-1 form-control rounded-3 bg-secondary-subtle border-1 border-secondary-subtle",placeholder:l.value,id:"searchPeers",onKeyup:s[0]||(s[0]=a=>i()),"onUpdate:modelValue":s[1]||(s[1]=a=>o.value=a)},null,40,z),[[w,o.value]]),e("button",{onClick:s[2]||(s[2]=a=>d("close")),style:{"white-space":"nowrap"},class:"btn bg-secondary-subtle text-secondary-emphasis border-secondary-subtle rounded-3 d-flex align-items-center"},[e("span",null,[s[3]||(s[3]=e("i",{class:"bi bi-x-circle-fill me-2"},null,-1)),S(T,{t:"Done"})])])])])])])])])]),_:1}))}},W=u(D,[["__scopeId","data-v-b741afe7"]]);export{W as default};
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
.card[data-v-8cfb4d4d]{border-color:var(--bs-border-color)!important}textarea[data-v-6e705c87]:focus,input[data-v-6e705c87]:focus{box-shadow:none;border-color:var(--bs-border-color)!important}textarea[data-v-6e705c87]{padding:var(--bs-card-spacer-y) var(--bs-card-spacer-x)}
|
1
src/static/app/dist/assets/ping-CrzXKpFV.js
vendored
1
src/static/app/dist/assets/ping-CrzXKpFV.js
vendored
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
import{L as r}from"./localeText-DG9SnJT8.js";import{a as t,c as n,f as i,i as s,e as a}from"./index-DZliHkQD.js";const d={key:0,class:"badge wireguardBg rounded-3 shadow"},c={key:1,class:"badge amneziawgBg rounded-3 shadow"},u={__name:"protocolBadge",props:{protocol:String,mini:!1},setup(e){return(m,o)=>e.protocol==="wg"?(t(),n("span",d,[o[0]||(o[0]=i(" WireGuard ")),e.mini?a("",!0):(t(),s(r,{key:0,t:"Configuration"}))])):e.protocol==="awg"?(t(),n("span",c,[o[1]||(o[1]=i(" AmneziaWG ")),e.mini?a("",!0):(t(),s(r,{key:0,t:"Configuration"}))])):a("",!0)}};export{u as _};
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
@media screen and (max-width: 992px){.apiKey-card-body{&[data-v-a76253c8]{flex-direction:column!important;align-items:start!important}div.ms-auto[data-v-a76253c8]{margin-left:0!important}div[data-v-a76253c8]{width:100%;align-items:start!important}small[data-v-a76253c8]{margin-right:auto}}}.apiKey-move[data-v-100ee9f9],.apiKey-enter-active[data-v-100ee9f9],.apiKey-leave-active[data-v-100ee9f9]{transition:all .5s ease}.apiKey-enter-from[data-v-100ee9f9],.apiKey-leave-to[data-v-100ee9f9]{opacity:0;transform:translateY(30px) scale(.9)}.apiKey-leave-active[data-v-100ee9f9]{position:absolute;width:100%}.dropdown-menu[data-v-4e34593e]{width:100%}.list-group{&[data-v-4aa2aed9]:first-child{border-top-left-radius:var(--bs-border-radius-lg);border-top-right-radius:var(--bs-border-radius-lg)}&[data-v-4aa2aed9]:last-child{border-bottom-left-radius:var(--bs-border-radius-lg);border-bottom-right-radius:var(--bs-border-radius-lg)}}
|
1
src/static/app/dist/assets/setup-BmUSkunY.js
vendored
1
src/static/app/dist/assets/setup-BmUSkunY.js
vendored
@@ -1 +0,0 @@
|
||||
import{_ as u,D as m,A as p,c as r,b as e,d as o,f as c,t as h,e as f,m as l,y as d,a as i,k as w}from"./index-DZliHkQD.js";import{L as g}from"./localeText-DG9SnJT8.js";const b={name:"setup",components:{LocaleText:g},setup(){return{store:m()}},data(){return{setup:{username:"",newPassword:"",repeatNewPassword:"",enable_totp:!0},loading:!1,errorMessage:"",done:!1}},computed:{goodToSubmit(){return this.setup.username&&this.setup.newPassword.length>=8&&this.setup.repeatNewPassword.length>=8&&this.setup.newPassword===this.setup.repeatNewPassword}},methods:{submit(){this.loading=!0,p("/api/Welcome_Finish",this.setup,n=>{n.status?(this.done=!0,this.$router.push("/2FASetup")):(document.querySelectorAll("#createAccount input").forEach(s=>s.classList.add("is-invalid")),this.errorMessage=n.message,document.querySelector(".login-container-fluid").scrollTo({top:0,left:0,behavior:"smooth"})),this.loading=!1})}}},_=["data-bs-theme"],x={class:"m-auto text-body",style:{width:"500px"}},v={class:"dashboardLogo display-4"},y={class:"mb-5"},P={key:0,class:"alert alert-danger"},N={class:"d-flex flex-column gap-3"},k={id:"createAccount",class:"d-flex flex-column gap-2"},S={class:"form-group text-body"},T={for:"username",class:"mb-1 text-muted"},C={class:"form-group text-body"},L={for:"password",class:"mb-1 text-muted"},V={class:"form-group text-body"},A={for:"confirmPassword",class:"mb-1 text-muted"},$=["disabled"],q={key:0,class:"d-flex align-items-center w-100"},M={key:1,class:"d-flex align-items-center w-100"};function B(n,s,D,E,U,F){const t=w("LocaleText");return i(),r("div",{class:"container-fluid login-container-fluid d-flex main pt-5 overflow-scroll","data-bs-theme":this.store.Configuration.Server.dashboard_theme},[e("div",x,[e("span",v,[o(t,{t:"Nice to meet you!"})]),e("p",y,[o(t,{t:"Please fill in the following fields to finish setup"}),s[4]||(s[4]=c(" 😊"))]),e("div",null,[e("h3",null,[o(t,{t:"Create an account"})]),this.errorMessage?(i(),r("div",P,h(this.errorMessage),1)):f("",!0),e("div",N,[e("form",k,[e("div",S,[e("label",T,[e("small",null,[o(t,{t:"Enter an username you like"})])]),l(e("input",{type:"text",autocomplete:"username","onUpdate:modelValue":s[0]||(s[0]=a=>this.setup.username=a),class:"form-control",id:"username",name:"username",required:""},null,512),[[d,this.setup.username]])]),e("div",C,[e("label",L,[e("small",null,[o(t,{t:"Enter a password"}),e("code",null,[o(t,{t:"(At least 8 characters and make sure is strong enough!)"})])])]),l(e("input",{type:"password",autocomplete:"new-password","onUpdate:modelValue":s[1]||(s[1]=a=>this.setup.newPassword=a),class:"form-control",id:"password",name:"password",required:""},null,512),[[d,this.setup.newPassword]])]),e("div",V,[e("label",A,[e("small",null,[o(t,{t:"Confirm password"})])]),l(e("input",{type:"password",autocomplete:"confirm-new-password","onUpdate:modelValue":s[2]||(s[2]=a=>this.setup.repeatNewPassword=a),class:"form-control",id:"confirmPassword",name:"confirmPassword",required:""},null,512),[[d,this.setup.repeatNewPassword]])])]),e("button",{class:"btn btn-dark btn-lg mb-5 d-flex btn-brand shadow align-items-center",ref:"signInBtn",disabled:!this.goodToSubmit||this.loading||this.done,onClick:s[3]||(s[3]=a=>this.submit())},[!this.loading&&!this.done?(i(),r("span",q,[o(t,{t:"Next"}),s[5]||(s[5]=e("i",{class:"bi bi-chevron-right ms-auto"},null,-1))])):(i(),r("span",M,[o(t,{t:"Saving..."}),s[6]||(s[6]=e("span",{class:"spinner-border ms-auto spinner-border-sm",role:"status"},[e("span",{class:"visually-hidden"},"Loading...")],-1))]))],8,$)])])])],8,_)}const j=u(b,[["render",B]]);export{j as default};
|
1
src/static/app/dist/assets/share-CsPN7pXl.js
vendored
1
src/static/app/dist/assets/share-CsPN7pXl.js
vendored
@@ -1 +0,0 @@
|
||||
import{_,r,D as p,g as u,c as m,b as t,d as c,J as h,a as f,k as b}from"./index-DZliHkQD.js";import{b as v}from"./browser-CjSdxGTc.js";import{L as y}from"./localeText-DG9SnJT8.js";const g={name:"share",components:{LocaleText:y},async setup(){const o=h(),e=r(!1),i=p(),n=r(""),s=r(void 0),l=r(new Blob);await u("/api/getDashboardTheme",{},d=>{n.value=d.data});const a=o.query.ShareID;return a===void 0||a.length===0?(s.value=void 0,e.value=!0):await u("/api/sharePeer/get",{ShareID:a},d=>{d.status?(s.value=d.data,l.value=new Blob([s.value.file],{type:"text/plain"})):s.value=void 0,e.value=!0}),{store:i,theme:n,peerConfiguration:s,blob:l}},mounted(){this.peerConfiguration&&v.toCanvas(document.querySelector("#qrcode"),this.peerConfiguration.file,o=>{o&&console.error(o)})},methods:{download(){const o=new Blob([this.peerConfiguration.file],{type:"text/plain"}),e=URL.createObjectURL(o),i=`${this.peerConfiguration.fileName}.conf`,n=document.createElement("a");n.href=e,n.download=i,n.click()}},computed:{getBlob(){return URL.createObjectURL(this.blob)}}},w=["data-bs-theme"],x={class:"m-auto text-body",style:{width:"500px"}},C={key:0,class:"text-center position-relative",style:{}},U={class:"position-absolute w-100 h-100 top-0 start-0 d-flex animate__animated animate__fadeInUp",style:{"animation-delay":"0.1s"}},I={class:"m-auto"},L={key:1,class:"d-flex align-items-center flex-column gap-3"},k={class:"h1 dashboardLogo text-center animate__animated animate__fadeInUp"},B={id:"qrcode",class:"rounded-3 shadow animate__animated animate__fadeInUp mb-3",ref:"qrcode"},D={class:"text-muted animate__animated animate__fadeInUp mb-1",style:{"animation-delay":"0.2s"}},R=["download","href"];function q(o,e,i,n,s,l){const a=b("LocaleText");return f(),m("div",{class:"container-fluid login-container-fluid d-flex main pt-5 overflow-scroll","data-bs-theme":this.theme},[t("div",x,[this.peerConfiguration?(f(),m("div",L,[t("div",k,[e[1]||(e[1]=t("h6",null,"WGDashboard",-1)),c(a,{t:"Scan QR Code with the WireGuard App to add peer"})]),t("canvas",B,null,512),t("p",D,[c(a,{t:"or click the button below to download the "}),e[2]||(e[2]=t("samp",null,".conf",-1)),c(a,{t:" file"})]),t("a",{download:this.peerConfiguration.fileName+".conf",href:l.getBlob,class:"btn btn-lg bg-primary-subtle text-primary-emphasis border-1 border-primary-subtle animate__animated animate__fadeInUp shadow-sm",style:{"animation-delay":"0.25s"}},e[3]||(e[3]=[t("i",{class:"bi bi-download"},null,-1)]),8,R)])):(f(),m("div",C,[e[0]||(e[0]=t("div",{class:"animate__animated animate__fadeInUp"},[t("h1",{style:{"font-size":"20rem",filter:"blur(1rem)","animation-duration":"7s"},class:"animate__animated animate__flash animate__infinite"},[t("i",{class:"bi bi-file-binary"})])],-1)),t("div",U,[t("h3",I,[c(a,{t:"Oh no... This link is either expired or invalid."})])])]))])],8,w)}const N=_(g,[["render",q],["__scopeId","data-v-1b44aacd"]]);export{N as default};
|
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
.dot.inactive[data-v-ed7817c7]{background-color:#dc3545;box-shadow:0 0 0 .2rem #dc354545}.spin[data-v-ed7817c7]{animation:spin-ed7817c7 1s infinite cubic-bezier(.82,.58,.17,.9)}@keyframes spin-ed7817c7{0%{transform:rotate(0)}to{transform:rotate(360deg)}}@media screen and (max-width: 768px){.remoteServerContainer[data-v-ed7817c7]{flex-direction:column}.remoteServerContainer .button-group button[data-v-ed7817c7]{width:100%}}@media screen and (max-width: 768px){.login-box[data-v-80e20da4]{width:100%!important}.login-box div[data-v-80e20da4]{width:auto!important}}
|
@@ -1 +0,0 @@
|
||||
import{_ as b,p as m,r as p,q as v,a as t,c as r,d as g,w as x,s as n,n as f,b as l,t as d,e as C,j as w}from"./index-DZliHkQD.js";const y={class:"text-muted me-2"},_={class:"fw-bold"},k={__name:"cpuCore",props:{core_number:Number,percentage:Number,align:Boolean,square:Boolean},setup(e){m(c=>({"2ec4d3bc":o.value}));const u=e,s=p(!1),o=v(()=>u.square?"40px":"25px");return(c,a)=>(t(),r("div",{class:"flex-grow-1 square rounded-3 border position-relative p-2",onMouseenter:a[0]||(a[0]=i=>s.value=!0),onMouseleave:a[1]||(a[1]=i=>s.value=!1),style:n({"background-color":`rgb(13 110 253 / ${e.percentage*10}%)`})},[g(w,{name:"zoomReversed"},{default:x(()=>[s.value?(t(),r("div",{key:0,style:n([{"white-space":"nowrap"},{top:o.value}]),class:f(["floatingLabel z-3 border position-absolute d-block p-1 px-2 bg-body text-body rounded-3 border shadow d-flex",[e.align?"end-0":"start-0"]])},[l("small",y," Core #"+d(e.core_number+1),1),l("small",_,d(e.percentage)+"% ",1)],6)):C("",!0)]),_:1})],36))}},B=b(k,[["__scopeId","data-v-2ad535bb"]]);export{B as C};
|
@@ -1 +0,0 @@
|
||||
.title[data-v-ffe5ad8f]{height:18px;text-overflow:ellipsis;overflow:hidden;white-space:nowrap}.process-move[data-v-977dc46d],.process-enter-active[data-v-977dc46d],.process-leave-active[data-v-977dc46d]{transition:all .5s cubic-bezier(.42,0,.22,1)}.process-enter-from[data-v-977dc46d],.process-leave-to[data-v-977dc46d]{opacity:0;transform:scale(.9)}.process-leave-active[data-v-977dc46d]{position:absolute;width:100%}.progress-bar[data-v-977dc46d]{width:0;transition:all 1s cubic-bezier(.42,0,.22,1)}.fadeIn[data-v-977dc46d]{opacity:0;animation:fadeIn-977dc46d .5s forwards cubic-bezier(.42,0,.22,1)}@keyframes fadeIn-977dc46d{0%{opacity:0;transform:translateY(30px)}to{opacity:1;transform:translateY(0)}}
|
File diff suppressed because one or more lines are too long
1
src/static/app/dist/assets/totp-G9lM0GPx.js
vendored
1
src/static/app/dist/assets/totp-G9lM0GPx.js
vendored
@@ -1 +0,0 @@
|
||||
import{_ as h,D as m,g as p,A as f,c as b,b as t,d as i,t as _,m as v,y as g,i as d,w as r,k as c,a as n}from"./index-DZliHkQD.js";import{b as x}from"./browser-CjSdxGTc.js";import{L as y}from"./localeText-DG9SnJT8.js";const T={name:"totp",components:{LocaleText:y},async setup(){const s=m();let e="";return await p("/api/Welcome_GetTotpLink",{},a=>{a.status&&(e=a.data)}),{l:e,store:s}},mounted(){this.l&&x.toCanvas(document.getElementById("qrcode"),this.l,function(s){})},data(){return{totp:"",totpInvalidMessage:"",verified:!1}},methods:{validateTotp(){}},watch:{totp(s){const e=document.querySelector("#totp");e.classList.remove("is-invalid","is-valid"),s.length===6&&(console.log(s),/[0-9]{6}/.test(s)?f("/api/Welcome_VerifyTotpLink",{totp:s},a=>{a.status?(this.verified=!0,e.classList.add("is-valid"),this.$emit("verified")):(e.classList.add("is-invalid"),this.totpInvalidMessage="TOTP does not match.")}):(e.classList.add("is-invalid"),this.totpInvalidMessage="TOTP can only contain numbers"))}}},k=["data-bs-theme"],w={class:"m-auto text-body",style:{width:"500px"}},L={class:"d-flex flex-column"},M={class:"dashboardLogo display-4"},C={class:"mb-2"},P={class:"text-muted"},I={class:"p-3 bg-body-secondary rounded-3 border mb-3"},O={class:"text-muted mb-0"},B=["href"],$={style:{"line-break":"anywhere"}},A={for:"totp",class:"mb-2"},D={class:"text-muted"},S={class:"form-group mb-2"},q=["disabled"],E={class:"invalid-feedback"},F={class:"valid-feedback"},R={class:"d-flex gap-3 mt-5 flex-column"};function G(s,e,a,N,W,Q){const o=c("LocaleText"),l=c("RouterLink");return n(),b("div",{class:"container-fluid login-container-fluid d-flex main pt-5 overflow-scroll","data-bs-theme":this.store.Configuration.Server.dashboard_theme},[t("div",w,[t("div",L,[t("div",null,[t("h1",M,[i(o,{t:"Multi-Factor Authentication (MFA)"})]),t("p",C,[t("small",P,[i(o,{t:"1. Please scan the following QR Code to generate TOTP with your choice of authenticator"})])]),e[1]||(e[1]=t("canvas",{id:"qrcode",class:"rounded-3 mb-2"},null,-1)),t("div",I,[t("p",O,[t("small",null,[i(o,{t:"Or you can click the link below:"})])]),t("a",{href:this.l},[t("code",$,_(this.l),1)],8,B)]),t("label",A,[t("small",D,[i(o,{t:"2. Enter the TOTP generated by your authenticator to verify"})])]),t("div",S,[v(t("input",{class:"form-control text-center totp",id:"totp",maxlength:"6",type:"text",inputmode:"numeric",autocomplete:"one-time-code","onUpdate:modelValue":e[0]||(e[0]=u=>this.totp=u),disabled:this.verified},null,8,q),[[g,this.totp]]),t("div",E,[i(o,{t:this.totpInvalidMessage},null,8,["t"])]),t("div",F,[i(o,{t:"TOTP verified!"})])])]),e[4]||(e[4]=t("hr",null,null,-1)),t("div",R,[this.verified?(n(),d(l,{key:1,to:"/",class:"btn btn-dark btn-lg d-flex btn-brand shadow align-items-center flex-grow-1 rounded-3"},{default:r(()=>[i(o,{t:"Complete"}),e[3]||(e[3]=t("i",{class:"bi bi-chevron-right ms-auto"},null,-1))]),_:1})):(n(),d(l,{key:0,to:"/",class:"btn bg-secondary-subtle text-secondary-emphasis rounded-3 flex-grow-1 btn-lg border-1 border-secondary-subtle shadow d-flex"},{default:r(()=>[i(o,{t:"I don't need MFA"}),e[2]||(e[2]=t("i",{class:"bi bi-chevron-right ms-auto"},null,-1))]),_:1}))])])])],8,k)}const z=h(T,[["render",G]]);export{z as default};
|
@@ -1 +0,0 @@
|
||||
import{_ as h,W as g,g as b,c as o,b as t,d as n,m as y,y as f,C as x,w as r,j as c,a as l,f as v,F as u,h as m,n as k,s as T,t as i,k as _}from"./index-DZliHkQD.js";import{O as A}from"./osmap-CmjRjQ0N.js";import{L as w}from"./localeText-DG9SnJT8.js";const R={name:"traceroute",components:{LocaleText:w,OSMap:A},data(){return{tracing:!1,ipAddress:void 0,tracerouteResult:void 0}},setup(){return{store:g()}},methods:{execute(){this.ipAddress&&(this.tracing=!0,this.tracerouteResult=void 0,b("/api/traceroute/execute",{ipAddress:this.ipAddress},d=>{d.status?this.tracerouteResult=d.data:this.store.newMessage("Server",d.message,"danger"),this.tracing=!1}))}}},M={class:"mt-md-5 mt-3 text-body"},S={class:"container-md"},$={class:"mb-3 text-body"},C={class:"d-flex gap-2 mb-3 flex-column"},L={class:"flex-grow-1"},P={class:"mb-1 text-muted",for:"ipAddress"},O=["disabled"],V=["disabled"],B={key:0,class:"d-block"},I={key:1,class:"d-block"},N={class:"position-relative"},z={key:"pingPlaceholder"},D={key:1},E={key:"table",class:"w-100 mt-2"},F={class:"table table-sm rounded-3 w-100"},G={scope:"col"},H={scope:"col"},K={scope:"col"},W={scope:"col"},j={scope:"col"},U={scope:"col"},q={key:0},J={key:1};function Q(d,s,X,Y,Z,tt){const a=_("LocaleText"),p=_("OSMap");return l(),o("div",M,[t("div",S,[t("h3",$,[n(a,{t:"Traceroute"})]),t("div",C,[t("div",L,[t("label",P,[t("small",null,[n(a,{t:"Enter IP Address / Hostname"})])]),y(t("input",{disabled:this.tracing,id:"ipAddress",class:"form-control rounded-3","onUpdate:modelValue":s[0]||(s[0]=e=>this.ipAddress=e),onKeyup:s[1]||(s[1]=x(e=>this.execute(),["enter"])),type:"text"},null,40,O),[[f,this.ipAddress]])]),t("button",{class:"btn btn-primary rounded-3 position-relative flex-grow-1",disabled:this.tracing||!this.ipAddress,onClick:s[2]||(s[2]=e=>this.execute())},[n(c,{name:"slide"},{default:r(()=>[this.tracing?(l(),o("span",I,s[4]||(s[4]=[t("span",{class:"spinner-border spinner-border-sm","aria-hidden":"true"},null,-1),t("span",{class:"visually-hidden",role:"status"},"Loading...",-1)]))):(l(),o("span",B,s[3]||(s[3]=[t("i",{class:"bi bi-person-walking me-2"},null,-1),v("Trace! ")])))]),_:1})],8,V)]),t("div",N,[n(c,{name:"ping"},{default:r(()=>[this.tracerouteResult?(l(),o("div",D,[n(p,{d:this.tracerouteResult,type:"traceroute"},null,8,["d"]),t("div",E,[t("table",F,[t("thead",null,[t("tr",null,[t("th",G,[n(a,{t:"Hop"})]),t("th",H,[n(a,{t:"IP Address"})]),t("th",K,[n(a,{t:"Average RTT (ms)"})]),t("th",W,[n(a,{t:"Min RTT (ms)"})]),t("th",j,[n(a,{t:"Max RTT (ms)"})]),t("th",U,[n(a,{t:"Geolocation"})])])]),t("tbody",null,[(l(!0),o(u,null,m(this.tracerouteResult,(e,et)=>(l(),o("tr",null,[t("td",null,[t("small",null,i(e.hop),1)]),t("td",null,[t("small",null,[t("samp",null,i(e.ip),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.avg_rtt),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.min_rtt),1)])]),t("td",null,[t("small",null,[t("samp",null,i(e.max_rtt),1)])]),t("td",null,[e.geo.city&&e.geo.country?(l(),o("span",q,[t("small",null,i(e.geo.city)+", "+i(e.geo.country),1)])):(l(),o("span",J," - "))])]))),256))])])])])):(l(),o("div",z,[s[5]||(s[5]=t("div",{class:"pingPlaceholder bg-body-secondary rounded-3 mb-3",style:{height:"300px !important"}},null,-1)),(l(),o(u,null,m(5,e=>t("div",{class:k(["pingPlaceholder bg-body-secondary rounded-3 mb-3",{"animate__animated animate__flash animate__slower animate__infinite":this.tracing}]),style:T({"animation-delay":`${e*.05}s`})},null,6)),64))]))]),_:1})])])])}const lt=h(R,[["render",Q],["__scopeId","data-v-3e75b4d4"]]);export{lt as default};
|
@@ -1 +0,0 @@
|
||||
.pingPlaceholder[data-v-3e75b4d4]{width:100%;height:40px}.ping-move[data-v-3e75b4d4],.ping-enter-active[data-v-3e75b4d4],.ping-leave-active[data-v-3e75b4d4]{transition:all .4s cubic-bezier(.82,.58,.17,.9)}.ping-leave-active[data-v-3e75b4d4]{position:absolute;width:100%}.ping-enter-from[data-v-3e75b4d4],.ping-leave-to[data-v-3e75b4d4]{opacity:0;filter:blur(3px)}.ping-leave-active[data-v-3e75b4d4]{position:absolute}table th[data-v-3e75b4d4],table td[data-v-3e75b4d4]{padding:.5rem}.table[data-v-3e75b4d4]>:not(caption)>*>*{background-color:transparent!important}
|
File diff suppressed because one or more lines are too long
@@ -13,6 +13,7 @@
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
||||
<script type="module" src="./src/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
5642
src/static/app/package-lock.json
generated
5642
src/static/app/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,42 +1,46 @@
|
||||
{
|
||||
"name": "app",
|
||||
"version": "4.2.3",
|
||||
"version": "4.3.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"module": "es2022",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"build": "vite build --emptyOutDir",
|
||||
"buildcommitpush": "./build.sh",
|
||||
"build electron": "vite build && vite build --mode electron && cd ../../../../WGDashboard-Desktop && /opt/homebrew/bin/npm run \"electron dist\"",
|
||||
"build electron": "vite build --emptyOutDir && vite build --mode electron && cd ../../../../WGDashboard-Desktop && /opt/homebrew/bin/npm run \"electron dist\"",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vue/language-server": "^2.1.10",
|
||||
"@vuepic/vue-datepicker": "^9.0.1",
|
||||
"@vueuse/core": "^10.9.0",
|
||||
"@vueuse/shared": "^10.9.0",
|
||||
"@volar/language-server": "2.4.23",
|
||||
"@vue/language-server": "3.0.8",
|
||||
"@vuepic/vue-datepicker": "^11.0.2",
|
||||
"@vueuse/core": "^13.5.0",
|
||||
"@vueuse/shared": "^13.5.0",
|
||||
"animate.css": "^4.1.1",
|
||||
"bootstrap": "^5.3.2",
|
||||
"bootstrap-icons": "^1.11.3",
|
||||
"cidr-tools": "^7.0.4",
|
||||
"cidr-tools": "^11.0.3",
|
||||
"css-color-converter": "^2.0.0",
|
||||
"dayjs": "^1.11.12",
|
||||
"electron-builder": "^24.13.3",
|
||||
"electron-builder": "^26.0.12",
|
||||
"fuse.js": "^7.0.0",
|
||||
"i": "^0.3.7",
|
||||
"is-cidr": "^5.0.3",
|
||||
"npm": "^10.5.0",
|
||||
"npm": "^11.6.0",
|
||||
"ol": "^10.2.1",
|
||||
"pinia": "^2.1.7",
|
||||
"pinia": "^3.0.3",
|
||||
"pinia-plugin-persistedstate": "^4.5.0",
|
||||
"qrcode": "^1.5.3",
|
||||
"qrcodejs": "^1.0.0",
|
||||
"simple-code-editor": "^2.0.9",
|
||||
"uuid": "^9.0.1",
|
||||
"vue": "^3.4.29",
|
||||
"uuid": "^11.1.0",
|
||||
"vue": "^3.5.22",
|
||||
"vue-chartjs": "^5.3.0",
|
||||
"vue-router": "^4.2.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-vue": "^5.0.0",
|
||||
"vite": "^5.0.10"
|
||||
"@vitejs/plugin-vue": "^6.0.0",
|
||||
"vite": "^7.1.7"
|
||||
}
|
||||
}
|
||||
|
@@ -8,13 +8,13 @@
|
||||
"short_name": "WGDashboard",
|
||||
"screenshots": [
|
||||
{
|
||||
"src": "https://donaldzou.github.io/WGDashboard-Documentation/images/sign-in.png",
|
||||
"src": "https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/sign-in.png",
|
||||
"sizes": "2880x1826",
|
||||
"type": "image/png",
|
||||
"form_factor": "wide"
|
||||
},
|
||||
{
|
||||
"src": "https://donaldzou.github.io/WGDashboard-Documentation/images/sign-in.png",
|
||||
"src": "https://wgdashboard-resources.tor1.cdn.digitaloceanspaces.com/Documentation%20Images/index.png",
|
||||
"sizes": "2880x1826",
|
||||
"type": "image/png"
|
||||
}
|
||||
|
@@ -4,10 +4,20 @@ import {DashboardConfigurationStore} from "@/stores/DashboardConfigurationStore.
|
||||
import {computed, watch} from "vue";
|
||||
const store = DashboardConfigurationStore();
|
||||
import "@/utilities/wireguard.js"
|
||||
import {fetchGet} from "@/utilities/fetch.js";
|
||||
store.initCrossServerConfiguration();
|
||||
if (window.IS_WGDASHBOARD_DESKTOP){
|
||||
store.IsElectronApp = true;
|
||||
store.CrossServerConfiguration.Enable = true;
|
||||
if (store.ActiveServerConfiguration){
|
||||
fetchGet("/api/locale", {}, (res) => {
|
||||
store.Locale = res.data
|
||||
})
|
||||
}
|
||||
}else{
|
||||
fetchGet("/api/locale", {}, (res) => {
|
||||
store.Locale = res.data
|
||||
})
|
||||
}
|
||||
watch(store.CrossServerConfiguration, () => {
|
||||
store.syncCrossServerConfiguration()
|
||||
@@ -19,29 +29,31 @@ const route = useRoute()
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div style="z-index: 9999; height: 5px" class="position-absolute loadingBar top-0 start-0"></div>
|
||||
<nav class="navbar bg-dark sticky-top" data-bs-theme="dark" v-if="!route.meta.hideTopNav">
|
||||
<div class="container-fluid d-flex text-body align-items-center">
|
||||
<RouterLink to="/" class="navbar-brand mb-0 h1">
|
||||
<img src="/img/Logo-2-Rounded-512x512.png" alt="WGDashboard Logo" style="width: 32px">
|
||||
</RouterLink>
|
||||
<a role="button" class="navbarBtn text-body"
|
||||
@click="store.ShowNavBar = !store.ShowNavBar"
|
||||
style="line-height: 0; font-size: 2rem">
|
||||
<Transition name="fade2" mode="out-in">
|
||||
<i class="bi bi-list" v-if="!store.ShowNavBar"></i>
|
||||
<i class="bi bi-x-lg" v-else></i>
|
||||
<div class="h-100 bg-body" :data-bs-theme="store.Configuration?.Server.dashboard_theme">
|
||||
<div style="z-index: 9999; height: 5px" class="position-absolute loadingBar top-0 start-0"></div>
|
||||
<nav class="navbar bg-dark sticky-top" data-bs-theme="dark" v-if="!route.meta.hideTopNav">
|
||||
<div class="container-fluid d-flex text-body align-items-center">
|
||||
<RouterLink to="/" class="navbar-brand mb-0 h1">
|
||||
<img src="/img/Logo-2-Rounded-512x512.png" alt="WGDashboard Logo" style="width: 32px">
|
||||
</RouterLink>
|
||||
<a role="button" class="navbarBtn text-body"
|
||||
@click="store.ShowNavBar = !store.ShowNavBar"
|
||||
style="line-height: 0; font-size: 2rem">
|
||||
<Transition name="fade2" mode="out-in">
|
||||
<i class="bi bi-list" v-if="!store.ShowNavBar"></i>
|
||||
<i class="bi bi-x-lg" v-else></i>
|
||||
</Transition>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
<Suspense>
|
||||
<RouterView v-slot="{ Component }">
|
||||
<Transition name="app" mode="out-in" type="transition" appear>
|
||||
<Component :is="Component"></Component>
|
||||
</Transition>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
<Suspense>
|
||||
<RouterView v-slot="{ Component }">
|
||||
<Transition name="app" mode="out-in" type="transition" appear>
|
||||
<Component :is="Component"></Component>
|
||||
</Transition>
|
||||
</RouterView>
|
||||
</Suspense>
|
||||
</RouterView>
|
||||
</Suspense>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
@@ -52,7 +64,8 @@ const route = useRoute()
|
||||
.app-enter-from,
|
||||
.app-leave-to{
|
||||
opacity: 0;
|
||||
transform: scale(1.1);
|
||||
transform: scale(1.05);
|
||||
filter: blur(8px);
|
||||
}
|
||||
@media screen and (min-width: 768px) {
|
||||
.navbar{
|
||||
|
@@ -0,0 +1,96 @@
|
||||
<script setup lang="ts">
|
||||
import {computed, ref} from "vue";
|
||||
import {DashboardClientAssignmentStore} from "@/stores/DashboardClientAssignmentStore.js";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
|
||||
const props = defineProps(['configuration', 'peers', 'clientAssignedPeers', 'availablePeerSearchString'])
|
||||
const emits = defineEmits(['assign', 'unassign'])
|
||||
const assignmentStore = DashboardClientAssignmentStore()
|
||||
const available = computed(() => {
|
||||
if (props.clientAssignedPeers){
|
||||
if (Object.keys(props.clientAssignedPeers).includes(props.configuration)){
|
||||
return props.peers.filter(
|
||||
x => {
|
||||
return !props.clientAssignedPeers[props.configuration].map(
|
||||
x => x.id
|
||||
).includes(x.id) &&
|
||||
(!props.availablePeerSearchString ||
|
||||
(props.availablePeerSearchString &&
|
||||
(x.id.includes(props.availablePeerSearchString) || x.name.includes(props.availablePeerSearchString))))
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
return props.availablePeerSearchString ? props.peers.filter(
|
||||
x => x.id.includes(props.availablePeerSearchString) || x.name.includes(props.availablePeerSearchString)
|
||||
) : props.peers
|
||||
})
|
||||
const confirmDelete = ref(false)
|
||||
const collapse = ref(false)
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="card rounded-0 border-0">
|
||||
<div
|
||||
@click="collapse = !collapse"
|
||||
role="button"
|
||||
class="card-header rounded-0 sticky-top bg-body-secondary border-0 border-bottom text-white d-flex">
|
||||
<small><samp>{{ configuration }}</samp></small>
|
||||
<a role="button" class="ms-auto text-white" >
|
||||
<i class="bi bi-chevron-compact-down" v-if="collapse"></i>
|
||||
<i class="bi bi-chevron-compact-up" v-else></i>
|
||||
</a>
|
||||
</div>
|
||||
<div class="card-body p-0" v-if="!collapse">
|
||||
<div class="list-group list-group-flush" >
|
||||
<div
|
||||
class="list-group-item d-flex border-bottom list-group-item-action d-flex align-items-center gap-3"
|
||||
:key="peer.id"
|
||||
v-for="peer in available" >
|
||||
<div v-if="!confirmDelete">
|
||||
<small class="text-body">
|
||||
<RouterLink
|
||||
class="text-decoration-none"
|
||||
target="_blank"
|
||||
:to="'/configuration/' + configuration +'/peers?id=' + encodeURIComponent(peer.id)">
|
||||
<samp>{{ peer.id }}</samp>
|
||||
</RouterLink>
|
||||
</small><br>
|
||||
<small class="text-muted">
|
||||
{{ peer.name ? peer.name : 'Untitled Peer'}}
|
||||
</small>
|
||||
</div>
|
||||
<div v-else>
|
||||
<small class="text-body">
|
||||
<LocaleText t="Are you sure to remove this peer?"></LocaleText>
|
||||
</small><br>
|
||||
<small class="text-muted">
|
||||
<samp>{{ peer.id }}</samp>
|
||||
</small>
|
||||
</div>
|
||||
<template v-if="clientAssignedPeers">
|
||||
<button
|
||||
@click="emits('assign', peer.id)"
|
||||
:class="{disabled: assignmentStore.assigning}"
|
||||
class="btn bg-success-subtle text-success-emphasis ms-auto">
|
||||
<i class="bi bi-plus-circle-fill" ></i>
|
||||
</button>
|
||||
</template>
|
||||
<button
|
||||
v-else
|
||||
@click="emits('unassign', peer.assignment_id)"
|
||||
:class="{disabled: assignmentStore.unassigning}"
|
||||
aria-label="Delete Assignment"
|
||||
class="btn bg-danger-subtle text-danger-emphasis ms-auto">
|
||||
<i class="bi bi-trash-fill"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
@@ -0,0 +1,109 @@
|
||||
<script setup lang="ts" async>
|
||||
import {onMounted, ref, watch, watchEffect} from "vue";
|
||||
import { fetchGet } from "@/utilities/fetch.js"
|
||||
import {DashboardClientAssignmentStore} from "@/stores/DashboardClientAssignmentStore.js";
|
||||
import AvailablePeersGroup from "@/components/clientComponents/availablePeersGroup.vue";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
const props = defineProps(['client', 'clientAssignedPeers'])
|
||||
const loading = ref(false)
|
||||
const assignmentStore = DashboardClientAssignmentStore()
|
||||
const manage = ref(false)
|
||||
const emits = defineEmits(['refresh'])
|
||||
|
||||
const assign = async (ConfigurationName, Peer, ClientID) => {
|
||||
await assignmentStore.assignClient(ConfigurationName, Peer, ClientID, false)
|
||||
emits('refresh')
|
||||
}
|
||||
|
||||
const unassign = async (AssignmentID) => {
|
||||
await assignmentStore.unassignClient(undefined, undefined, AssignmentID)
|
||||
emits('refresh')
|
||||
}
|
||||
|
||||
const availablePeerSearchString = ref("")
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<div class="d-flex rounded-0 border-0 flex-column d-flex flex-column border-bottom pb-1" v-if="!loading">
|
||||
<div class="d-flex flex-column p-3 gap-3">
|
||||
<div class="d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Assigned Peers"></LocaleText>
|
||||
<span class="text-bg-primary badge ms-2">
|
||||
{{ Object.keys(clientAssignedPeers).length }} <LocaleText :t="Object.keys(clientAssignedPeers).length > 1 ? 'Configurations' : 'Configuration'"></LocaleText>
|
||||
</span>
|
||||
<span class="text-bg-info badge ms-2">
|
||||
{{ Object.values(clientAssignedPeers).flat().length }} <LocaleText :t="Object.values(clientAssignedPeers).flat().length > 1 ? 'Peers' : 'Peer'"></LocaleText>
|
||||
</span>
|
||||
</h6>
|
||||
<button class="btn btn-sm bg-primary-subtle text-primary-emphasis rounded-3 ms-auto"
|
||||
@click="manage = !manage">
|
||||
<template v-if="!manage">
|
||||
<i class="bi bi-list-check me-2"></i>
|
||||
<LocaleText t="Manage"></LocaleText>
|
||||
</template>
|
||||
<template v-else>
|
||||
<i class="bi bi-check me-2"></i>
|
||||
<LocaleText t="Done"></LocaleText>
|
||||
</template>
|
||||
</button>
|
||||
</div>
|
||||
<div class="rounded-3 availablePeers border h-100 overflow-scroll flex-grow-1 d-flex flex-column">
|
||||
<AvailablePeersGroup
|
||||
:configuration="configuration"
|
||||
:peers="peers"
|
||||
@unassign="async (id) => await unassign(id)"
|
||||
v-for="(peers, configuration) in clientAssignedPeers">
|
||||
</AvailablePeersGroup>
|
||||
<h6 class="text-muted m-auto p-3" v-if="Object.keys(clientAssignedPeers).length === 0">
|
||||
<LocaleText t="No peer assigned to this client"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
</div>
|
||||
<div style="height: 500px" class="d-flex flex-column p-3" v-if="manage">
|
||||
<div class="availablePeers border h-100 card rounded-3">
|
||||
<div class="card-header sticky-top p-3">
|
||||
<h6 class="mb-0 d-flex align-items-center">
|
||||
<LocaleText t="Available Peers"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
<div class="card-body p-0 overflow-scroll">
|
||||
<AvailablePeersGroup
|
||||
:availablePeerSearchString="availablePeerSearchString"
|
||||
:configuration="configuration"
|
||||
:clientAssignedPeers="clientAssignedPeers"
|
||||
:peers="peers"
|
||||
:key="configuration"
|
||||
@assign="async (id) => await assign(configuration, id, props.client.ClientID)"
|
||||
v-for="(peers, configuration) in assignmentStore.allConfigurationsPeers">
|
||||
</AvailablePeersGroup>
|
||||
<h6 class="text-muted m-auto" v-if="Object.keys(assignmentStore.allConfigurationsPeers).length === 0">
|
||||
<LocaleText t="No peer is available to assign"></LocaleText>
|
||||
</h6>
|
||||
</div>
|
||||
<div class="card-footer d-flex gap-2 p-3 align-items-center justify-content-end">
|
||||
<label for="availablePeerSearchString">
|
||||
<i class="bi bi-search me-2"></i>
|
||||
</label>
|
||||
<input
|
||||
id="availablePeerSearchString"
|
||||
v-model="availablePeerSearchString"
|
||||
class="form-control form-control-sm rounded-3 w-auto" type="text">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else>
|
||||
<div class="p-3 placeholder-glow border-bottom">
|
||||
<h6 class="placeholder w-100 rounded-3"></h6>
|
||||
<div class="placeholder w-100 rounded-3" style="height: 400px"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
@@ -0,0 +1,62 @@
|
||||
<script setup lang="ts">
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import { fetchPost } from "@/utilities/fetch"
|
||||
import {ref} from "vue";
|
||||
import { DashboardConfigurationStore } from "@/stores/DashboardConfigurationStore.js"
|
||||
|
||||
const props = defineProps(['client'])
|
||||
const deleting = ref(false)
|
||||
const confirmDelete = ref(false)
|
||||
const emits = defineEmits(['refresh'])
|
||||
const dashboardConfigurationStore = DashboardConfigurationStore()
|
||||
const deleteClient = async () => {
|
||||
deleting.value = true
|
||||
await fetchPost("/api/clients/deleteClient", {
|
||||
ClientID: props.client.ClientID
|
||||
}, (res) => {
|
||||
deleting.value = false
|
||||
if (res.status){
|
||||
emits("deleteSuccess")
|
||||
dashboardConfigurationStore.newMessage("Server", "Delete client successfully", "success")
|
||||
}else {
|
||||
dashboardConfigurationStore.newMessage("Server", "Failed to delete client", "danger")
|
||||
}
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="p-3 d-flex gap-3 flex-column border-bottom">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<h6 class="mb-0">
|
||||
<LocaleText t="Delete Client" v-if="!confirmDelete"></LocaleText>
|
||||
<LocaleText t="Are you sure to delete this client?" v-else></LocaleText>
|
||||
</h6>
|
||||
<button class="btn btn-sm bg-danger-subtle text-danger-emphasis rounded-3 ms-auto"
|
||||
v-if="!confirmDelete"
|
||||
@click="confirmDelete = true"
|
||||
>
|
||||
<i class="bi bi-trash-fill me-2"></i>
|
||||
<LocaleText t="Delete"></LocaleText>
|
||||
</button>
|
||||
|
||||
<template v-if="confirmDelete">
|
||||
<button
|
||||
@click="deleteClient"
|
||||
class="btn btn-sm bg-danger-subtle text-danger-emphasis rounded-3 ms-auto">
|
||||
<i class="bi bi-trash-fill me-2"></i>
|
||||
<LocaleText t="Yes"></LocaleText>
|
||||
</button>
|
||||
<button class="btn btn-sm bg-secondary-subtle text-secondary-emphasis rounded-3"
|
||||
v-if="confirmDelete" @click="confirmDelete = false">
|
||||
<i class="bi bi-x-lg me-2"></i>
|
||||
<LocaleText t="No"></LocaleText>
|
||||
</button>
|
||||
</template>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
@@ -0,0 +1,57 @@
|
||||
<script setup lang="ts">
|
||||
import {computed, onMounted} from "vue";
|
||||
import LocaleText from "@/components/text/localeText.vue";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
const props = defineProps(['groupName', 'clients', 'searchString'])
|
||||
|
||||
const getClients = computed(() => {
|
||||
const s = props.searchString.toLowerCase()
|
||||
if (!props.searchString){
|
||||
return props.clients
|
||||
}
|
||||
return props.clients.filter(
|
||||
x =>
|
||||
(x.ClientID && x.ClientID.toLowerCase().includes(s)) ||
|
||||
(x.Email && x.Email.toLowerCase().includes(s) ||
|
||||
(x.Name && x.Name.toLowerCase().includes(s)))
|
||||
)
|
||||
})
|
||||
const route = useRoute()
|
||||
onMounted(() => {
|
||||
document.querySelector(".clientList .active")?.scrollIntoView()
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="card rounded-0 border-0">
|
||||
<div class="card-header d-flex align-items-center rounded-0">
|
||||
<h6 class="my-2">{{ groupName }}</h6>
|
||||
<span class="badge text-bg-primary ms-auto">
|
||||
<LocaleText :t="getClients.length + ' Client' + (getClients.length > 1 ? 's': '')"></LocaleText>
|
||||
</span>
|
||||
</div>
|
||||
<div class="card-body p-0">
|
||||
<div class="list-group list-group-flush clientList">
|
||||
<RouterLink
|
||||
:key="client.ClientID"
|
||||
:id="'client_' + client.ClientID"
|
||||
active-class="active"
|
||||
:to="{ name: 'Client Viewer', params: { id: client.ClientID } }"
|
||||
class="list-group-item d-flex flex-column border-bottom list-group-item-action client"
|
||||
v-for="client in getClients" >
|
||||
<small class="text-body">
|
||||
{{ client.Email }}
|
||||
</small>
|
||||
<small class="text-muted">
|
||||
{{ client.Name ? client.Name : 'No Name'}}
|
||||
</small>
|
||||
</RouterLink>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
@@ -0,0 +1,11 @@
|
||||
<script setup lang="ts">
|
||||
|
||||
</script>
|
||||
|
||||
<template>
|
||||
|
||||
</template>
|
||||
|
||||
<style scoped>
|
||||
|
||||
</style>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user