Browse Source

Merge remote-tracking branch 'origin/k8s-unofficial' into k8s-unofficial

k8s-unofficial
Louis Lam 3 years ago
parent
commit
96c4c84bff
  1. 46
      .dockerignore
  2. 21
      .editorconfig
  3. 113
      .eslintrc.js
  4. 12
      .github/FUNDING.yml
  5. 21
      .github/ISSUE_TEMPLATE/ask-for-help.md
  6. 42
      .github/ISSUE_TEMPLATE/bug_report.md
  7. 22
      .github/ISSUE_TEMPLATE/feature_request.md
  8. 35
      .github/workflows/auto-test.yml
  9. 15
      .gitignore
  10. 9
      .stylelintrc
  11. 1
      CNAME
  12. 128
      CODE_OF_CONDUCT.md
  13. 180
      CONTRIBUTING.md
  14. 21
      LICENSE
  15. 138
      README.md
  16. 31
      SECURITY.md
  17. 11
      babel.config.js
  18. 5
      config/jest-backend.config.js
  19. 5
      config/jest-frontend.config.js
  20. 6
      config/jest-puppeteer.config.js
  21. 11
      config/jest.config.js
  22. 24
      config/vite.config.js
  23. 0
      data/.gitkeep
  24. BIN
      db/kuma.db
  25. 10
      db/patch-2fa.sql
  26. 7
      db/patch-add-retry-interval-monitor.sql
  27. 30
      db/patch-group-table.sql
  28. 13
      db/patch-http-monitor-method-body-and-headers.sql
  29. 10
      db/patch-improve-performance.sql
  30. 18
      db/patch-incident-table.sql
  31. 7
      db/patch-monitor-push_token.sql
  32. 22
      db/patch-setting-value-type.sql
  33. 37
      db/patch1.sql
  34. 19
      db/patch10.sql
  35. 9
      db/patch2.sql
  36. 37
      db/patch3.sql
  37. 40
      db/patch4.sql
  38. 70
      db/patch5.sql
  39. 74
      db/patch6.sql
  40. 10
      db/patch7.sql
  41. 7
      db/patch8.sql
  42. 7
      db/patch9.sql
  43. 8
      docker/alpine-base.dockerfile
  44. 12
      docker/debian-base.dockerfile
  45. 13
      docker/docker-compose.yml
  46. 51
      docker/dockerfile
  47. 26
      docker/dockerfile-alpine
  48. 6
      ecosystem.config.js
  49. 2
      extra/compile-install-script.ps1
  50. 57
      extra/download-dist.js
  51. 21
      extra/entrypoint.sh
  52. 34
      extra/healthcheck.js
  53. 245
      extra/install.batsh
  54. 24
      extra/mark-as-nightly.js
  55. 70
      extra/reset-password.js
  56. 144
      extra/simple-dns-server.js
  57. 3
      extra/update-language-files/.gitignore
  58. 86
      extra/update-language-files/index.js
  59. 12
      extra/update-language-files/package.json
  60. 100
      extra/update-version.js
  61. 64
      extra/upload-github-release-asset.sh
  62. 17
      index.html
  63. 203
      install.sh
  64. 22037
      package-lock.json
  65. 128
      package.json
  66. BIN
      public/apple-touch-icon-precomposed.png
  67. BIN
      public/apple-touch-icon.png
  68. BIN
      public/favicon.ico
  69. BIN
      public/icon-192x192.png
  70. BIN
      public/icon-512x512.png
  71. BIN
      public/icon.png
  72. 3
      public/icon.svg
  73. 19
      public/manifest.json
  74. 51
      server/auth.js
  75. 42
      server/check-version.js
  76. 100
      server/client.js
  77. 7
      server/config.js
  78. 378
      server/database.js
  79. 57
      server/image-data-uri.js
  80. 31
      server/jobs.js
  81. 40
      server/jobs/clear-old-data.js
  82. 39
      server/jobs/util-worker.js
  83. 34
      server/model/group.js
  84. 39
      server/model/heartbeat.js
  85. 18
      server/model/incident.js
  86. 609
      server/model/monitor.js
  87. 13
      server/model/tag.js
  88. 21
      server/model/user.js
  89. 749
      server/modules/apicache/apicache.js
  90. 14
      server/modules/apicache/index.js
  91. 59
      server/modules/apicache/memory-cache.js
  92. 108
      server/notification-providers/aliyun-sms.js
  93. 26
      server/notification-providers/apprise.js
  94. 79
      server/notification-providers/dingding.js
  95. 115
      server/notification-providers/discord.js
  96. 83
      server/notification-providers/feishu.js
  97. 28
      server/notification-providers/gotify.js
  98. 60
      server/notification-providers/line.js
  99. 48
      server/notification-providers/lunasea.js
  100. 45
      server/notification-providers/matrix.js

46
.dockerignore

@ -1,46 +0,0 @@
/.idea
/dist
/node_modules
/data
/out
/test
/kubernetes
/.do
**/.dockerignore
/private
**/.git
**/.gitignore
**/docker-compose*
**/[Dd]ockerfile*
LICENSE
README.md
.editorconfig
.vscode
.eslint*
.stylelint*
/.github
yarn.lock
app.json
CODE_OF_CONDUCT.md
CONTRIBUTING.md
CNAME
install.sh
SECURITY.md
tsconfig.json
.env
/tmp
### .gitignore content (commented rules are duplicated)
#node_modules
.DS_Store
#dist
dist-ssr
*.local
#.idea
#/data
#!/data/.gitkeep
#.vscode
### End of .gitignore content

21
.editorconfig

@ -1,21 +0,0 @@
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false
[*.yaml]
indent_size = 2
[*.yml]
indent_size = 2
[*.vue]
trim_trailing_whitespace = false

113
.eslintrc.js

@ -1,113 +0,0 @@
module.exports = {
root: true,
env: {
browser: true,
commonjs: true,
es2020: true,
node: true,
},
extends: [
"eslint:recommended",
"plugin:vue/vue3-recommended",
],
parser: "vue-eslint-parser",
parserOptions: {
parser: "@babel/eslint-parser",
sourceType: "module",
requireConfigFile: false,
},
rules: {
"linebreak-style": ["error", "unix"],
"camelcase": ["warn", {
"properties": "never",
"ignoreImports": true
}],
// override/add rules settings here, such as:
// 'vue/no-unused-vars': 'error'
"no-unused-vars": "warn",
indent: [
"error",
4,
{
ignoredNodes: ["TemplateLiteral"],
SwitchCase: 1,
},
],
quotes: ["warn", "double"],
semi: "warn",
"vue/html-indent": ["warn", 4], // default: 2
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"no-multi-spaces": ["error", {
ignoreEOLComments: true,
}],
"space-before-function-paren": ["error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": ["error", "always"],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
"brace-style": "error",
"no-var": "error",
"key-spacing": "warn",
"keyword-spacing": "warn",
"space-infix-ops": "warn",
"arrow-spacing": "warn",
"no-trailing-spaces": "warn",
"no-constant-condition": ["error", {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": ["warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": ["warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error",
"array-bracket-newline": ["error", "consistent"],
"eol-last": ["error", "always"],
//'prefer-template': 'error',
"comma-dangle": ["warn", "only-multiline"],
"no-empty": ["error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": ["error", "never"],
"max-statements-per-line": ["error", { "max": 1 }]
},
"overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": ["error", "always-multiline"],
}
},
// Override for jest puppeteer
{
"files": [
"**/*.spec.js",
"**/*.spec.jsx"
],
env: {
jest: true,
},
globals: {
page: true,
browser: true,
context: true,
jestPuppeteer: true,
},
}
]
};

12
.github/FUNDING.yml

@ -1,12 +0,0 @@
# These are supported funding model platforms
github: louislam # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
#patreon: # Replace with a single Patreon username
open_collective: uptime-kuma # Replace with a single Open Collective username
#ko_fi: # Replace with a single Ko-fi username
#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
#liberapay: # Replace with a single Liberapay username
#issuehunt: # Replace with a single IssueHunt username
#otechie: # Replace with a single Otechie username
#custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

21
.github/ISSUE_TEMPLATE/ask-for-help.md

@ -1,21 +0,0 @@
---
name: Ask for help
about: You can ask any question related to Uptime Kuma.
title: ''
labels: help
assignees: ''
---
**Is it a duplicate question?**
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
**Describe your problem**
Please describe what you are asking for
**Info**
Uptime Kuma Version:
Using Docker?: Yes/No
Docker Version:
Node.js Version (Without Docker only):
OS:
Browser:

42
.github/ISSUE_TEMPLATE/bug_report.md

@ -1,42 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Is it a duplicate question?**
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Info**
Uptime Kuma Version:
Using Docker?: Yes/No
Docker Version:
Node.js Version (Without Docker only):
OS:
Browser:
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Error Log**
It is easier for us to find out the problem.
Docker: `docker logs <container id>`
PM2: `~/.pm2/logs/` (e.g. `/home/ubuntu/.pm2/logs`)

22
.github/ISSUE_TEMPLATE/feature_request.md

@ -1,22 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is it a duplicate question?**
Please search in Issues without filters: https://github.com/louislam/uptime-kuma/issues?q=
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

35
.github/workflows/auto-test.yml

@ -1,35 +0,0 @@
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Auto Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
auto-test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
node-version: [14.x, 16.x]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- run: npm run install-legacy
- run: npm run build
- run: npm test
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}

15
.gitignore

@ -1,15 +0,0 @@
node_modules
.DS_Store
dist
dist-ssr
*.local
.idea
/data
!/data/.gitkeep
.vscode
/private
/out
/tmp
.env

9
.stylelintrc

@ -1,9 +0,0 @@
{
"extends": "stylelint-config-standard",
"rules": {
"indentation": 4,
"no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null
}
}

1
CNAME

@ -1 +0,0 @@
git.kuma.pet

128
CODE_OF_CONDUCT.md

@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
louis@uptimekuma.louislam.net.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

180
CONTRIBUTING.md

@ -1,180 +0,0 @@
# Project Info
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structed and commented so well, lol. Sorry about that.
The project was created with vite.js (vue3). Then I created a sub-directory called "server" for server part. Both frontend and backend share the same package.json.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working.
## Key Technical Skills
- Node.js (You should know what are promise, async/await and arrow function etc.)
- Socket.io
- SCSS
- Vue.js
- Bootstrap
- SQLite
## Directories
- data (App data)
- dist (Frontend build)
- extra (Extra useful scripts)
- public (Frontend resources for dev only)
- server (Server source code)
- src (Frontend source code)
- test (unit test)
## Can I create a pull request for Uptime Kuma?
Generally, if the pull request is working fine and it do not affect any existing logic, workflow and perfomance, I will merge into the master branch once it is tested.
If you are not sure, feel free to create an empty pull request draft first.
### Pull Request Examples
#### ✅ High - Medium Priority
- Add a new notification
- Add a chart
- Fix a bug
- Translations
#### *️⃣ Requires one more reviewer
I do not have such knowledge to test it.
- Add k8s supports
#### *️⃣ Low Priority
It changed my current workflow and require further studies.
- Change my release approach
#### ❌ Won't Merge
- Duplicated pull request
- Buggy
- Existing logic is completely modified or deleted
- A function that is completely out of scope
## Project Styles
I personally do not like something need to learn so much and need to config so much before you can finally start the app.
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Env var is not encouraged.
- Easy to use
## Coding Styles
- 4 spaces indentation
- Follow `.editorconfig`
- Follow ESLint
## Name convention
- Javascript/Typescript: camelCaseType
- SQLite: underscore_type
- CSS/SCSS: dash-type
## Tools
- Node.js >= 14
- Git
- IDE that supports ESLint and EditorConfig (I am using Intellji Idea)
- A SQLite tool (SQLite Expert Personal is suggested)
## Install dependencies
```bash
npm ci
```
## How to start the Backend Dev Server
(2021-09-23 Update)
```bash
npm run start-server-dev
```
It binds to `0.0.0.0:3001` by default.
### Backend Details
It is mainly a socket.io app + express.js.
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- notification-providers/ (indivdual notification logic)
- routers/ (Express Routers)
- scoket-handler (Socket.io Handlers)
- server.js (Server main logic)
## How to start the Frontend Dev Server
1. Set the env var `NODE_ENV` to "development".
2. Start the frontend dev server by the following command.
```bash
npm run dev
```
It binds to `0.0.0.0:3000` by default.
You can use Vue.js devtools Chrome extension for debugging.
### Build the frontend
```bash
npm run build
```
### Frontend Details
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
The router is in `src/router.js`
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`.
## Database Migration
1. Create `patch-{name}.sql` in `./db/`
2. Add your patch filename in the `patchList` list in `./server/database.js`
## Unit Test
It is an end-to-end testing. It is using Jest and Puppeteer.
```bash
npm run build
npm test
```
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments.
## Update Dependencies
Install `ncu`
https://github.com/raineorshine/npm-check-updates
```bash
ncu -u -t patch
npm install
```
Since previously updating vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/))
## Translations
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages

21
LICENSE

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 Louis Lam
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

138
README.md

@ -1,138 +0,0 @@
# Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Backers&color=brightgreen" /></a>
<div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" />
</div>
It is a self-hosted monitoring tool like "Uptime Robot".
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" />
## 🥔 Live Demo
Try it!
https://demo.uptime.kuma.pet
It is a temporary live demo, all data will be deleted after 10 minutes. The server is located at Tokyo, so if you live far from there it may affect your experience. I suggest that you should install and try it out for the best demo experience.
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / Ping / DNS Record / Push.
* Fancy, Reactive, Fast UI/UX.
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [70+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* 20 second intervals.
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
* Simple Status Page
* Ping Chart
* Certificate Info
## 🔧 How to Install
### 🐳 Docker
```bash
docker volume create uptime-kuma
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
```
Browse to http://localhost:3001 after starting.
### 💪🏻 Without Docker
Required Tools: Node.js >= 14, git and pm2.
```bash
# Update your npm to the latest version
npm install npm -g
git clone https://github.com/louislam/uptime-kuma.git
cd uptime-kuma
npm run setup
# Option 1. Try it
node server/server.js
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it: npm install pm2 -g
pm2 start server/server.js --name uptime-kuma
```
Browse to http://localhost:3001 after starting.
### Advanced Installation
If you need more options or need to browse via a reserve proxy, please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
## 🆙 How to Update
Please read:
https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next?
I will mark requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones
Project Plan:
https://github.com/louislam/uptime-kuma/projects/1
## 🖼 More Screenshots
Light Mode:
<img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="" />
Status Page:
<img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="" />
Settings Page:
<img src="https://louislam.net/uptimekuma/2.jpg" width="400" alt="" />
Telegram Notification Sample:
<img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="" />
## Motivation
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
* Want to build a fancy UI.
* Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5.
* Try to use WebSocket with SPA instead of REST API.
* Deploy my first Docker image to Docker Hub.
If you love this project, please consider giving me a ⭐.
## 🗣️ Discussion
### Issues Page
You can discuss or ask for help in [Issues](https://github.com/louislam/uptime-kuma/issues).
### Subreddit
My Reddit account: louislamlam
You can mention me if you ask a question on Reddit.
https://www.reddit.com/r/UptimeKuma/
## Contribute
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
If you want to translate Uptime Kuma into your langauge, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
English proofreading is needed too because my grammar is not that great sadly. Feel free to correct my grammar in this readme, source code, or wiki.

31
SECURITY.md

@ -1,31 +0,0 @@
# Security Policy
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
### Uptime Kuma Versions
| Version | Supported |
| ------- | ------------------ |
| 1.8.X | :white_check_mark: |
| <= 1.7.X | ❌ |
### Upgradable Docker Tags
| Tag | Supported |
| ------- | ------------------ |
| 1 | :white_check_mark: |
| 1-debian | :white_check_mark: |
| 1-alpine | :white_check_mark: |
| latest | :white_check_mark: |
| debian | :white_check_mark: |
| alpine | :white_check_mark: |
| All other tags | ❌ |
## Reporting a Vulnerability
Please report security issues to uptime@kuma.pet.
Do not use the issue tracker or discuss it in the public as it will cause more damage.

11
babel.config.js

@ -1,11 +0,0 @@
const config = {};
if (process.env.TEST_FRONTEND) {
config.presets = ["@babel/preset-env"];
}
if (process.env.TEST_BACKEND) {
config.plugins = ["babel-plugin-rewire"];
}
module.exports = config;

5
config/jest-backend.config.js

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/backend.spec.js",
};

5
config/jest-frontend.config.js

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/frontend.spec.js",
};

6
config/jest-puppeteer.config.js

@ -1,6 +0,0 @@
module.exports = {
"launch": {
"headless": process.env.HEADLESS_TEST || false,
"userDataDir": "./data/test-chrome-profile",
}
};

11
config/jest.config.js

@ -1,11 +0,0 @@
module.exports = {
"verbose": true,
"preset": "jest-puppeteer",
"globals": {
"__DEV__": true
},
"testRegex": "./test/e2e.spec.js",
"rootDir": "..",
"testTimeout": 30000,
};

24
config/vite.config.js

@ -1,24 +0,0 @@
import legacy from "@vitejs/plugin-legacy";
import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite";
const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss");
// https://vitejs.dev/config/
export default defineConfig({
plugins: [
vue(),
legacy({
targets: ["ie > 11"],
additionalLegacyPolyfills: ["regenerator-runtime/runtime"]
})
],
css: {
postcss: {
"parser": postCssScss,
"map": false,
"plugins": [postcssRTLCSS]
}
},
});

0
data/.gitkeep

BIN
db/kuma.db

Binary file not shown.

10
db/patch-2fa.sql

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE user
ADD twofa_secret VARCHAR(64);
ALTER TABLE user
ADD twofa_status BOOLEAN default 0 NOT NULL;
COMMIT;

7
db/patch-add-retry-interval-monitor.sql

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD retry_interval INTEGER default 0 not null;
COMMIT;

30
db/patch-group-table.sql

@ -1,30 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table `group`
(
id INTEGER not null
constraint group_pk
primary key autoincrement,
name VARCHAR(255) not null,
created_date DATETIME default (DATETIME('now')) not null,
public BOOLEAN default 0 not null,
active BOOLEAN default 1 not null,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE TABLE [monitor_group]
(
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[monitor_id] INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[group_id] INTEGER NOT NULL REFERENCES [group] ([id]) ON DELETE CASCADE ON UPDATE CASCADE,
weight BOOLEAN NOT NULL DEFAULT 1000
);
CREATE INDEX [fk]
ON [monitor_group] (
[monitor_id],
[group_id]);
COMMIT;

13
db/patch-http-monitor-method-body-and-headers.sql

@ -1,13 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD method TEXT default 'GET' not null;
ALTER TABLE monitor
ADD body TEXT default null;
ALTER TABLE monitor
ADD headers TEXT default null;
COMMIT;

10
db/patch-improve-performance.sql

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- For sendHeartbeatList
CREATE INDEX monitor_time_index ON heartbeat (monitor_id, time);
-- For sendImportantHeartbeatList
CREATE INDEX monitor_important_time_index ON heartbeat (monitor_id, important,time);
COMMIT;

18
db/patch-incident-table.sql

@ -1,18 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
create table incident
(
id INTEGER not null
constraint incident_pk
primary key autoincrement,
title VARCHAR(255) not null,
content TEXT not null,
style VARCHAR(30) default 'warning' not null,
created_date DATETIME default (DATETIME('now')) not null,
last_updated_date DATETIME,
pin BOOLEAN default 1 not null,
active BOOLEAN default 1 not null
);
COMMIT;

7
db/patch-monitor-push_token.sql

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD push_token VARCHAR(20) DEFAULT NULL;
COMMIT;

22
db/patch-setting-value-type.sql

@ -1,22 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Generated by Intellij IDEA
create table setting_dg_tmp
(
id INTEGER
primary key autoincrement,
key VARCHAR(200) not null
unique,
value TEXT,
type VARCHAR(20)
);
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
drop table setting;
alter table setting_dg_tmp rename to setting;
COMMIT;

37
db/patch1.sql

@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Change Monitor.created_date from "TIMESTAMP" to "DATETIME"
-- SQL Generated by Intellij Idea
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255)
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

19
db/patch10.sql

@ -1,19 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
CREATE TABLE tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name VARCHAR(255) NOT NULL,
color VARCHAR(255) NOT NULL,
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
);
CREATE TABLE monitor_tag (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
tag_id INTEGER NOT NULL,
value TEXT,
CONSTRAINT FK_tag FOREIGN KEY (tag_id) REFERENCES tag(id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor(id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX monitor_tag_monitor_id_index ON monitor_tag (monitor_id);
CREATE INDEX monitor_tag_tag_id_index ON monitor_tag (tag_id);

9
db/patch2.sql

@ -1,9 +0,0 @@
BEGIN TRANSACTION;
CREATE TABLE monitor_tls_info (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
info_json TEXT
);
COMMIT;

37
db/patch3.sql

@ -1,37 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- Add maxretries column to monitor
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

40
db/patch4.sql

@ -1,40 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
-- OK.... serious wrong, missing maxretries column
-- Developers should patch it manually if you have missing the maxretries column
PRAGMA foreign_keys=off;
BEGIN TRANSACTION;
create table monitor_dg_tmp
(
id INTEGER not null
primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER
references user
on update cascade on delete set null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into monitor_dg_tmp(id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries) select id, name, active, user_id, interval, url, type, weight, hostname, port, created_date, keyword, maxretries from monitor;
drop table monitor;
alter table monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys=on;

70
db/patch5.sql

@ -1,70 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

74
db/patch6.sql

@ -1,74 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
PRAGMA foreign_keys = off;
BEGIN TRANSACTION;
create table monitor_dg_tmp (
id INTEGER not null primary key autoincrement,
name VARCHAR(150),
active BOOLEAN default 1 not null,
user_id INTEGER references user on update cascade on delete
set
null,
interval INTEGER default 20 not null,
url TEXT,
type VARCHAR(20),
weight INTEGER default 2000,
hostname VARCHAR(255),
port INTEGER,
created_date DATETIME default (DATETIME('now')) not null,
keyword VARCHAR(255),
maxretries INTEGER NOT NULL DEFAULT 0,
ignore_tls BOOLEAN default 0 not null,
upside_down BOOLEAN default 0 not null,
maxredirects INTEGER default 10 not null,
accepted_statuscodes_json TEXT default '["200-299"]' not null
);
insert into
monitor_dg_tmp(
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
)
select
id,
name,
active,
user_id,
interval,
url,
type,
weight,
hostname,
port,
created_date,
keyword,
maxretries,
ignore_tls,
upside_down
from
monitor;
drop table monitor;
alter table
monitor_dg_tmp rename to monitor;
create index user_id on monitor (user_id);
COMMIT;
PRAGMA foreign_keys = on;

10
db/patch7.sql

@ -1,10 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_resolve_type VARCHAR(5);
ALTER TABLE monitor
ADD dns_resolve_server VARCHAR(255);
COMMIT;

7
db/patch8.sql

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD dns_last_result VARCHAR(255);
COMMIT;

7
db/patch9.sql

@ -1,7 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE notification
ADD is_default BOOLEAN default 0 NOT NULL;
COMMIT;

8
docker/alpine-base.dockerfile

@ -1,8 +0,0 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:14-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
pip3 --no-cache-dir install apprise && \
rm -rf /root/.cache

12
docker/debian-base.dockerfile

@ -1,12 +0,0 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:14-buster-slim
WORKDIR /app
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specific --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \
pip3 --no-cache-dir install apprise && \
rm -rf /var/lib/apt/lists/*

13
docker/docker-compose.yml

@ -1,13 +0,0 @@
# Simple docker-composer.yml
# You can change your port or volume location
version: '3.3'
services:
uptime-kuma:
image: louislam/uptime-kuma
container_name: uptime-kuma
volumes:
- ./uptime-kuma:/app/data
ports:
- 3001:3001

51
docker/dockerfile

@ -1,51 +0,0 @@
FROM louislam/uptime-kuma:base-debian AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci && \
npm run build && \
npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-debian AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly
# Upload the artifact to Github
FROM louislam/uptime-kuma:base-debian AS upload-artifact
WORKDIR /
RUN apt update && \
apt --yes install curl file
ARG GITHUB_TOKEN
ARG TARGETARCH
ARG PLATFORM=debian
ARG VERSION=1.9.0
ARG FILE=$PLATFORM-$TARGETARCH-$VERSION.tar.gz
ARG DIST=dist.tar.gz
COPY --from=build /app /app
RUN chmod +x /app/extra/upload-github-release-asset.sh
# Full Build
# RUN tar -zcvf $FILE app
# RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=$FILE
# Dist only
RUN cd /app && tar -zcvf $DIST dist
RUN /app/extra/upload-github-release-asset.sh github_api_token=$GITHUB_TOKEN owner=louislam repo=uptime-kuma tag=$VERSION filename=/app/$DIST

26
docker/dockerfile-alpine

@ -1,26 +0,0 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY . .
RUN npm ci && \
npm run build && \
npm ci --production && \
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

6
ecosystem.config.js

@ -1,6 +0,0 @@
module.exports = {
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
}

2
extra/compile-install-script.ps1

@ -1,2 +0,0 @@
# Must enable File Sharing in Docker Desktop
docker run -it --rm -v ${pwd}:/app louislam/batsh /usr/bin/batsh bash --output ./install.sh ./extra/install.batsh

57
extra/download-dist.js

@ -1,57 +0,0 @@
console.log("Downloading dist");
const https = require("https");
const tar = require("tar");
const packageJSON = require("../package.json");
const fs = require("fs");
const version = packageJSON.version;
const filename = "dist.tar.gz";
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
download(url);
function download(url) {
console.log(url);
https.get(url, (response) => {
if (response.statusCode === 200) {
console.log("Extracting dist...");
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
fs.rmdirSync("./dist-backup", {
recursive: true
});
}
fs.renameSync("./dist", "./dist-backup");
}
const tarStream = tar.x({
cwd: "./",
});
tarStream.on("close", () => {
fs.rmdirSync("./dist-backup", {
recursive: true
});
console.log("Done");
});
tarStream.on("error", () => {
if (fs.existsSync("./dist-backup")) {
fs.renameSync("./dist-backup", "./dist");
}
console.log("Done");
});
response.pipe(tarStream);
} else if (response.statusCode === 302) {
download(response.headers.location);
} else {
console.log("dist not found");
}
});
}

21
extra/entrypoint.sh

@ -1,21 +0,0 @@
#!/usr/bin/env sh
# set -e Exit the script if an error happens
set -e
PUID=${PUID=0}
PGID=${PGID=0}
files_ownership () {
# -h Changes the ownership of an encountered symbolic link and not that of the file or directory pointed to by the symbolic link.
# -R Recursively descends the specified directories
# -c Like verbose but report only when a change is made
chown -hRc "$PUID":"$PGID" /app/data
}
echo "==> Performing startup jobs and maintenance tasks"
files_ownership
echo "==> Starting application with user $PUID group $PGID"
# --clear-groups Clear supplementary groups.
exec setpriv --reuid "$PUID" --regid "$PGID" --clear-groups "$@"

34
extra/healthcheck.js

@ -1,34 +0,0 @@
/*
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
let client;
if (process.env.SSL_KEY && process.env.SSL_CERT) {
client = require("https");
} else {
client = require("http");
}
let options = {
host: process.env.HOST || "127.0.0.1",
port: parseInt(process.env.PORT) || 3001,
timeout: 28 * 1000,
};
let request = client.request(options, (res) => {
console.log(`Health Check OK [Res Code: ${res.statusCode}]`);
if (res.statusCode === 200) {
process.exit(0);
} else {
process.exit(1);
}
});
request.on("error", function (err) {
console.error("Health Check ERROR");
process.exit(1);
});
request.end();

245
extra/install.batsh

@ -1,245 +0,0 @@
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
// "npm run compile-install-script" to compile install.sh
// The command is working on Windows PowerShell and Docker for Windows only.
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
println("=====================");
println("Uptime Kuma Installer");
println("=====================");
println("Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian");
println("---------------------------------------");
println("This script is designed for Linux and basic usage.");
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
println("---------------------------------------");
println("");
println("Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2");
println("Docker - Install Uptime Kuma Docker container");
println("");
if ("$1" != "") {
type = "$1";
} else {
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
}
defaultPort = "3001";
function checkNode() {
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
println("Node Version: " ++ nodeVersion);
if (nodeVersion < "12") {
println("Error: Required Node.js 14");
call("exit", "1");
}
if (nodeVersion == "12") {
println("Warning: NodeJS " ++ nodeVersion ++ " is not tested.");
}
}
function deb() {
bash("nodeCheck=$(node -v)");
bash("apt --yes update");
if (nodeCheck != "") {
checkNode();
} else {
// Old nodejs binary name is "nodejs"
bash("check=$(nodejs --version)");
if (check != "") {
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
bash("exit 1");
}
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("apt --yes install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt");
bash("apt --yes install nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("apt --yes install git");
}
}
if (type == "local") {
defaultInstallPath = "/opt/uptime-kuma";
if (exists("/etc/redhat-release")) {
os = call("cat", "/etc/redhat-release");
distribution = "rhel";
} else if (exists("/etc/issue")) {
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
if (os == "Ubuntu") {
distribution = "ubuntu";
}
if (os == "Debian") {
distribution = "debian";
}
}
bash("arch=$(uname -i)");
println("Your OS: " ++ os);
println("Distribution: " ++ distribution);
println("Arch: " ++ arch);
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
installPath = "$2";
} else {
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
if (installPath == "") {
installPath = defaultInstallPath;
}
}
// CentOS
if (distribution == "rhel") {
bash("nodeCheck=$(node -v)");
if (nodeCheck != "") {
checkNode();
} else {
bash("curlCheck=$(curl --version)");
if (curlCheck == "") {
println("Installing Curl");
bash("yum -y -q install curl");
}
println("Installing Node.js 14");
bash("curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt");
bash("yum install -y -q nodejs");
bash("node -v");
bash("nodeCheckAgain=$(node -v)");
if (nodeCheckAgain == "") {
println("Error during Node.js installation");
bash("exit 1");
}
}
bash("check=$(git --version)");
if (check == "") {
println("Installing Git");
bash("yum -y -q install git");
}
// Ubuntu
} else if (distribution == "ubuntu") {
deb();
// Debian
} else if (distribution == "debian") {
deb();
} else {
// Unknown distribution
error = 0;
bash("check=$(git --version)");
if (check == "") {
error = 1;
println("Error: git is missing");
}
bash("check=$(node -v)");
if (check == "") {
error = 1;
println("Error: node is missing");
}
if (error > 0) {
println("Please install above missing software");
bash("exit 1");
}
}
bash("check=$(pm2 --version)");
if (check == "") {
println("Installing PM2");
bash("npm install pm2 -g");
bash("pm2 startup");
}
bash("mkdir -p $installPath");
bash("cd $installPath");
bash("git clone https://github.com/louislam/uptime-kuma.git .");
bash("npm run setup");
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
} else {
defaultVolume = "uptime-kuma";
bash("check=$(docker -v)");
if (check == "") {
println("Error: docker is not found!");
bash("exit 1");
}
bash("check=$(docker info)");
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
\"echo\" \"Error: docker is not running\"
\"exit\" \"1\"
fi");
if ("$3" != "") {
port = "$3";
} else {
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
if (port == "") {
port = defaultPort;
}
}
if ("$2" != "") {
volume = "$2";
} else {
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
if (volume == "") {
volume = defaultVolume;
}
}
println("Port: $port");
println("Volume: $volume");
bash("docker volume create $volume");
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
}
println("http://localhost:$port");

24
extra/mark-as-nightly.js

@ -1,24 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version
const newVersion = oldVersion + "-nightly"
console.log("Old Version: " + oldVersion)
console.log("New Version: " + newVersion)
if (newVersion) {
// Process package.json
pkg.version = newVersion
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
// Process README.md
if (fs.existsSync("README.md")) {
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion))
}
}

70
extra/reset-password.js

@ -1,70 +0,0 @@
console.log("== Uptime Kuma Reset Password Tool ==");
console.log("Loading the database");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const main = async () => {
Database.init(args);
await Database.connect();
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
throw new Error("user not found, have you installed?");
}
console.log("Found user: " + user.username);
while (true) {
let password = await question("New Password: ");
let confirmPassword = await question("Confirm New Password: ");
if (password === confirmPassword) {
await user.resetPassword(password);
// Reset all sessions by reset jwt secret
await initJWTSecret();
break;
} else {
console.log("Passwords do not match, please try again.");
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
}
await Database.close();
rl.close();
console.log("Finished.");
};
function question(question) {
return new Promise((resolve) => {
rl.question(question, (answer) => {
resolve(answer);
});
});
}
if (!process.env.TEST_BACKEND) {
main();
}
module.exports = {
main,
};

144
extra/simple-dns-server.js

@ -1,144 +0,0 @@
/*
* Simple DNS Server
* For testing DNS monitoring type, dev only
*/
const dns2 = require("dns2");
const { Packet } = dns2;
const server = dns2.createServer({
udp: true
});
server.on("request", (request, send, rinfo) => {
for (let question of request.questions) {
console.log(question.name, type(question.type), question.class);
const response = Packet.createResponseFromRequest(request);
if (question.name === "existing.com") {
if (question.type === Packet.TYPE.A) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "1.2.3.4"
});
} if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "fe80::::1234:5678:abcd:ef00",
});
} else if (question.type === Packet.TYPE.CNAME) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "cname1.existing.com",
});
} else if (question.type === Packet.TYPE.MX) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
exchange: "mx1.existing.com",
priority: 5
});
} else if (question.type === Packet.TYPE.NS) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
ns: "ns1.existing.com",
});
} else if (question.type === Packet.TYPE.SOA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
primary: "existing.com",
admin: "admin@existing.com",
serial: 2021082701,
refresh: 300,
retry: 3,
expiration: 10,
minimum: 10,
});
} else if (question.type === Packet.TYPE.SRV) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
priority: 5,
weight: 5,
port: 8080,
target: "srv1.existing.com",
});
} else if (question.type === Packet.TYPE.TXT) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
data: "#v=spf1 include:_spf.existing.com ~all",
});
} else if (question.type === Packet.TYPE.CAA) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
flags: 0,
tag: "issue",
value: "ca.existing.com",
});
}
}
if (question.name === "4.3.2.1.in-addr.arpa") {
if (question.type === Packet.TYPE.PTR) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
domain: "ptr1.existing.com",
});
}
}
send(response);
}
});
server.on("listening", () => {
console.log("Listening");
console.log(server.addresses());
});
server.on("close", () => {
console.log("server closed");
});
server.listen({
udp: 5300
});
function type(code) {
for (let name in Packet.TYPE) {
if (Packet.TYPE[name] === code) {
return name;
}
}
}

3
extra/update-language-files/.gitignore

@ -1,3 +0,0 @@
package-lock.json
test.js
languages/

86
extra/update-language-files/index.js

@ -1,86 +0,0 @@
// Need to use ES6 to read language files
import fs from "fs";
import path from "path";
import util from "util";
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
/**
* Look ma, it's cp -R.
* @param {string} src The path to the thing to copy.
* @param {string} dest The path to the new copy.
*/
const copyRecursiveSync = function (src, dest) {
let exists = fs.existsSync(src);
let stats = exists && fs.statSync(src);
let isDirectory = exists && stats.isDirectory();
if (isDirectory) {
fs.mkdirSync(dest);
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName),
path.join(dest, childItemName));
});
} else {
fs.copyFileSync(src, dest);
}
};
console.log("Arguments:", process.argv);
const baseLangCode = process.argv[2] || "en";
console.log("Base Lang: " + baseLangCode);
if (fs.existsSync("./languages")) {
fs.rmdirSync("./languages", { recursive: true });
}
copyRecursiveSync("../../src/languages", "./languages");
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
const files = fs.readdirSync("./languages");
console.log("Files:", files);
for (const file of files) {
if (!file.endsWith(".js")) {
console.log("Skipping " + file);
continue;
}
console.log("Processing " + file);
const lang = await import("./languages/" + file);
let obj;
if (lang.default) {
obj = lang.default;
} else {
console.log("Empty file");
obj = {
languageName: "<Your Language name in your language (not in English)>"
};
}
// En first
for (const key in en) {
if (! obj[key]) {
obj[key] = en[key];
}
}
if (baseLang !== en) {
// Base second
for (const key in baseLang) {
if (! obj[key]) {
obj[key] = key;
}
}
}
const code = "export default " + util.inspect(obj, {
depth: null,
});
fs.writeFileSync(`../../src/languages/${file}`, code);
}
fs.rmdirSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint...");

12
extra/update-language-files/package.json

@ -1,12 +0,0 @@
{
"name": "update-language-files",
"type": "module",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}

100
extra/update-version.js

@ -1,100 +0,0 @@
const pkg = require("../package.json");
const fs = require("fs");
const child_process = require("child_process");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version;
const newVersion = process.argv[2];
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);
if (! newVersion) {
console.error("invalid version");
process.exit(1);
}
const exists = tagExists(newVersion);
if (! exists) {
// Process package.json
pkg.version = newVersion;
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
commit(newVersion);
tag(newVersion);
updateWiki(oldVersion, newVersion);
} else {
console.log("version exists");
}
function commit(version) {
let msg = "update to " + version;
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
if (stdout.includes("no changes added to commit")) {
throw new Error("commit error");
}
}
function tag(version) {
let res = child_process.spawnSync("git", ["tag", version]);
console.log(res.stdout.toString().trim());
}
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = child_process.spawnSync("git", ["tag", "-l", version]);
return res.stdout.toString().trim() === version;
}
function updateWiki(oldVersion, newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
safeDelete(wikiDir);
child_process.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
let content = fs.readFileSync(howToUpdateFilename).toString();
content = content.replaceAll(`git checkout ${oldVersion}`, `git checkout ${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
child_process.spawnSync("git", ["add", "-A"], {
cwd: wikiDir,
});
child_process.spawnSync("git", ["commit", "-m", `Update to ${newVersion} from ${oldVersion}`], {
cwd: wikiDir,
});
console.log("Pushing to Github");
child_process.spawnSync("git", ["push"], {
cwd: wikiDir,
});
safeDelete(wikiDir);
}
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rmdirSync(dir, {
recursive: true,
});
}
}

64
extra/upload-github-release-asset.sh

@ -1,64 +0,0 @@
#!/usr/bin/env bash
#
# Author: Stefan Buck
# License: MIT
# https://gist.github.com/stefanbuck/ce788fee19ab6eb0b4447a85fc99f447
#
#
# This script accepts the following parameters:
#
# * owner
# * repo
# * tag
# * filename
# * github_api_token
#
# Script to upload a release asset using the GitHub API v3.
#
# Example:
#
# upload-github-release-asset.sh github_api_token=TOKEN owner=stefanbuck repo=playground tag=v0.1.0 filename=./build.zip
#
# Check dependencies.
set -e
xargs=$(which gxargs || which xargs)
# Validate settings.
[ "$TRACE" ] && set -x
CONFIG=$@
for line in $CONFIG; do
eval "$line"
done
# Define variables.
GH_API="https://api.github.com"
GH_REPO="$GH_API/repos/$owner/$repo"
GH_TAGS="$GH_REPO/releases/tags/$tag"
AUTH="Authorization: token $github_api_token"
WGET_ARGS="--content-disposition --auth-no-challenge --no-cookie"
CURL_ARGS="-LJO#"
if [[ "$tag" == 'LATEST' ]]; then
GH_TAGS="$GH_REPO/releases/latest"
fi
# Validate token.
curl -o /dev/null -sH "$AUTH" $GH_REPO || { echo "Error: Invalid repo, token or network issue!"; exit 1; }
# Read asset tags.
response=$(curl -sH "$AUTH" $GH_TAGS)
# Get ID of the asset based on given filename.
eval $(echo "$response" | grep -m 1 "id.:" | grep -w id | tr : = | tr -cd '[[:alnum:]]=')
[ "$id" ] || { echo "Error: Failed to get release id for tag: $tag"; echo "$response" | awk 'length($0)<100' >&2; exit 1; }
# Upload asset
echo "Uploading asset... "
# Construct url
GH_ASSET="https://uploads.github.com/repos/$owner/$repo/releases/$id/assets?name=$(basename $filename)"
curl "$GITHUB_OAUTH_BASIC" --data-binary @"$filename" -H "Authorization: token $github_api_token" -H "Content-Type: application/octet-stream" $GH_ASSET

17
index.html

@ -1,17 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png">
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
<link rel="manifest" href="manifest.json" />
<meta name="theme-color" id="theme-color" content="" />
<meta name="description" content="Uptime Kuma monitoring tool" />
<title>Uptime Kuma</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.js"></script>
</body>
</html>

203
install.sh

@ -1,203 +0,0 @@
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
# "npm run compile-install-script" to compile install.sh
# The command is working on Windows PowerShell and Docker for Windows only.
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
"echo" "-e" "====================="
"echo" "-e" "Uptime Kuma Installer"
"echo" "-e" "====================="
"echo" "-e" "Supported OS: CentOS 7/8, Ubuntu >= 16.04 and Debian"
"echo" "-e" "---------------------------------------"
"echo" "-e" "This script is designed for Linux and basic usage."
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
"echo" "-e" "---------------------------------------"
"echo" "-e" ""
"echo" "-e" "Local - Install Uptime Kuma in your current machine with git, Node.js 14 and pm2"
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
"echo" "-e" ""
if [ "$1" != "" ]; then
type="$1"
else
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
fi
defaultPort="3001"
function checkNode {
local _0
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
"echo" "-e" "Node Version: ""$nodeVersion"
_0="12"
if [ $(($nodeVersion < $_0)) == 1 ]; then
"echo" "-e" "Error: Required Node.js 14"
"exit" "1"
fi
if [ "$nodeVersion" == "12" ]; then
"echo" "-e" "Warning: NodeJS ""$nodeVersion"" is not tested."
fi
}
function deb {
nodeCheck=$(node -v)
apt --yes update
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
# Old nodejs binary name is "nodejs"
check=$(nodejs --version)
if [ "$check" != "" ]; then
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
exit 1
fi
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
apt --yes install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://deb.nodesource.com/setup_14.x | bash - > log.txt
apt --yes install nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
apt --yes install git
fi
}
if [ "$type" == "local" ]; then
defaultInstallPath="/opt/uptime-kuma"
if [ -e "/etc/redhat-release" ]; then
os=$("cat" "/etc/redhat-release")
distribution="rhel"
else
if [ -e "/etc/issue" ]; then
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
if [ "$os" == "Ubuntu" ]; then
distribution="ubuntu"
fi
if [ "$os" == "Debian" ]; then
distribution="debian"
fi
fi
fi
arch=$(uname -i)
"echo" "-e" "Your OS: ""$os"
"echo" "-e" "Distribution: ""$distribution"
"echo" "-e" "Arch: ""$arch"
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Listening Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
installPath="$2"
else
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
if [ "$installPath" == "" ]; then
installPath="$defaultInstallPath"
fi
fi
# CentOS
if [ "$distribution" == "rhel" ]; then
nodeCheck=$(node -v)
if [ "$nodeCheck" != "" ]; then
"checkNode"
else
curlCheck=$(curl --version)
if [ "$curlCheck" == "" ]; then
"echo" "-e" "Installing Curl"
yum -y -q install curl
fi
"echo" "-e" "Installing Node.js 14"
curl -sL https://rpm.nodesource.com/setup_14.x | bash - > log.txt
yum install -y -q nodejs
node -v
nodeCheckAgain=$(node -v)
if [ "$nodeCheckAgain" == "" ]; then
"echo" "-e" "Error during Node.js installation"
exit 1
fi
fi
check=$(git --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing Git"
yum -y -q install git
fi
# Ubuntu
else
if [ "$distribution" == "ubuntu" ]; then
"deb"
# Debian
else
if [ "$distribution" == "debian" ]; then
"deb"
else
# Unknown distribution
error=$((0))
check=$(git --version)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: git is missing"
fi
check=$(node -v)
if [ "$check" == "" ]; then
error=$((1))
"echo" "-e" "Error: node is missing"
fi
if [ $(($error > 0)) == 1 ]; then
"echo" "-e" "Please install above missing software"
exit 1
fi
fi
fi
fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing PM2"
npm install pm2 -g
pm2 startup
fi
mkdir -p $installPath
cd $installPath
git clone https://github.com/louislam/uptime-kuma.git .
npm run setup
pm2 start server/server.js --name uptime-kuma -- --port=$port
else
defaultVolume="uptime-kuma"
check=$(docker -v)
if [ "$check" == "" ]; then
"echo" "-e" "Error: docker is not found!"
exit 1
fi
check=$(docker info)
if [[ "$check" == *"Is the docker daemon running"* ]]; then
"echo" "Error: docker is not running"
"exit" "1"
fi
if [ "$3" != "" ]; then
port="$3"
else
"read" "-p" "Expose Port [$defaultPort]: " "port"
if [ "$port" == "" ]; then
port="$defaultPort"
fi
fi
if [ "$2" != "" ]; then
volume="$2"
else
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
if [ "$volume" == "" ]; then
volume="$defaultVolume"
fi
fi
"echo" "-e" "Port: $port"
"echo" "-e" "Volume: $volume"
docker volume create $volume
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
fi
"echo" "-e" "http://localhost:$port"

22037
package-lock.json

File diff suppressed because it is too large

128
package.json

@ -1,128 +0,0 @@
{
"name": "uptime-kuma",
"version": "1.9.0",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/louislam/uptime-kuma.git"
},
"engines": {
"node": "14.*"
},
"scripts": {
"install-legacy": "npm install --legacy-peer-deps",
"update-legacy": "npm update --legacy-peer-deps",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"dev": "vite --host --config ./config/vite.config.js",
"start": "npm run start-server",
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test",
"test-with-build": "npm run build && npm test",
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend && jest --config=./config/jest.config.js",
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
"jest-backend": "cross-env TEST_BACKEND=1 jest --config=./config/jest-backend.config.js",
"tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.9.0-alpine --target release . --push",
"build-docker-debian": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.9.0 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.9.0-debian --target release . --push",
"build-docker-nightly": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.9.0 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"update-version": "node extra/update-version.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
"compile-install-script": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ./extra/compile-install-script.ps1",
"test-install-script-centos7": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/centos7.dockerfile .",
"test-install-script-alpine3": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/alpine3.dockerfile .",
"test-install-script-ubuntu": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu.dockerfile .",
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.0.0-4",
"@louislam/sqlite3": "~6.0.0",
"@popperjs/core": "~2.10.2",
"args-parser": "~1.3.0",
"axios": "~0.21.4",
"bcryptjs": "~2.4.3",
"bootstrap": "~5.1.1",
"chardet": "^1.3.0",
"bree": "~6.3.1",
"chart.js": "~3.5.1",
"chartjs-adapter-dayjs": "~1.0.0",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"dayjs": "~1.10.7",
"express": "~4.17.1",
"express-basic-auth": "~1.2.0",
"form-data": "~4.0.0",
"http-graceful-shutdown": "~3.1.4",
"iconv-lite": "^0.6.3",
"jsonwebtoken": "~8.5.1",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"password-hash": "~1.2.2",
"postcss-rtlcss": "~3.4.1",
"postcss-scss": "~4.0.1",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.0",
"qrcode": "~1.4.4",
"redbean-node": "0.1.2",
"socket.io": "~4.2.0",
"socket.io-client": "~4.2.0",
"tar": "^6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"timezones-list": "~3.0.1",
"v-pagination-3": "~0.1.6",
"vue": "next",
"vue-chart-3": "~0.5.8",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.11",
"vue-toastification": "~2.0.0-rc.1",
"vuedraggable": "~4.1.0"
},
"devDependencies": {
"@babel/eslint-parser": "~7.15.7",
"@babel/preset-env": "^7.15.8",
"@types/bootstrap": "~5.1.6",
"@vitejs/plugin-legacy": "~1.6.1",
"@vitejs/plugin-vue": "~1.9.2",
"@vue/compiler-sfc": "~3.2.19",
"babel-plugin-rewire": "~1.2.0",
"core-js": "~3.18.1",
"cross-env": "~7.0.3",
"dns2": "~2.0.1",
"eslint": "~7.32.0",
"eslint-plugin-vue": "~7.18.0",
"jest": "~27.2.4",
"jest-puppeteer": "~6.0.0",
"puppeteer": "~10.4.0",
"sass": "~1.42.1",
"stylelint": "~13.13.1",
"stylelint-config-standard": "~22.0.0",
"typescript": "~4.4.3",
"vite": "~2.6.4"
}
}

BIN
public/apple-touch-icon-precomposed.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

BIN
public/apple-touch-icon.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

BIN
public/favicon.ico

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

BIN
public/icon-192x192.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

BIN
public/icon-512x512.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.5 KiB

BIN
public/icon.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

3
public/icon.svg

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.4 KiB

19
public/manifest.json

@ -1,19 +0,0 @@
{
"name": "Uptime Kuma",
"short_name": "Uptime Kuma",
"start_url": "/",
"background_color": "#fff",
"display": "standalone",
"icons": [
{
"src": "icon-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "icon-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
]
}

51
server/auth.js

@ -1,51 +0,0 @@
const basicAuth = require("express-basic-auth")
const passwordHash = require("./password-hash");
const { R } = require("redbean-node");
const { setting } = require("./util-server");
const { debug } = require("../src/util");
/**
*
* @param username : string
* @param password : string
* @returns {Promise<Bean|null>}
*/
exports.login = async function (username, password) {
let user = await R.findOne("user", " username = ? AND active = 1 ", [
username,
])
if (user && passwordHash.verify(password, user.password)) {
// Upgrade the hash to bcrypt
if (passwordHash.needRehash(user.password)) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
passwordHash.generate(password),
user.id,
]);
}
return user;
}
return null;
}
function myAuthorizer(username, password, callback) {
setting("disableAuth").then((result) => {
if (result) {
callback(null, true)
} else {
exports.login(username, password).then((user) => {
callback(null, user != null)
})
}
})
}
exports.basicAuth = basicAuth({
authorizer: myAuthorizer,
authorizeAsync: true,
challenge: true,
});

42
server/check-version.js

@ -1,42 +0,0 @@
const { setSetting } = require("./util-server");
const axios = require("axios");
exports.version = require("../package.json").version;
exports.latestVersion = null;
let interval;
exports.startInterval = () => {
let check = async () => {
try {
const res = await axios.get("https://raw.githubusercontent.com/louislam/uptime-kuma/master/package.json");
if (typeof res.data === "string") {
res.data = JSON.parse(res.data);
}
// For debug
if (process.env.TEST_CHECK_VERSION === "1") {
res.data.version = "1000.0.0";
}
exports.latestVersion = res.data.version;
} catch (_) { }
};
check();
interval = setInterval(check, 3600 * 1000 * 48);
};
exports.enableCheckUpdate = async (value) => {
await setSetting("checkUpdate", value);
clearInterval(interval);
if (value) {
exports.startInterval();
}
};
exports.socket = null;

100
server/client.js

@ -1,100 +0,0 @@
/*
* For Client Socket
*/
const { TimeLogger } = require("../src/util");
const { R } = require("redbean-node");
const { io } = require("./server");
const { setting } = require("./util-server");
const checkVersion = require("./check-version");
async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("notification", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
result.push(bean.export());
}
io.to(socket.userID).emit("notificationList", result);
timeLogger.print("Send Notification List");
return list;
}
/**
* Send Heartbeat History list to socket
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 100
`, [
monitorID,
]);
let result = list.reverse();
if (toUser) {
io.to(socket.userID).emit("heartbeatList", monitorID, result, overwrite);
} else {
socket.emit("heartbeatList", monitorID, result, overwrite);
}
timeLogger.print(`[Monitor: ${monitorID}] sendHeartbeatList`);
}
/**
* Important Heart beat list (aka event list)
* @param socket
* @param monitorID
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
*/
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.find("heartbeat", `
monitor_id = ?
AND important = 1
ORDER BY time DESC
LIMIT 500
`, [
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
if (toUser) {
io.to(socket.userID).emit("importantHeartbeatList", monitorID, list, overwrite);
} else {
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
}
}
async function sendInfo(socket) {
socket.emit("info", {
version: checkVersion.version,
latestVersion: checkVersion.latestVersion,
primaryBaseURL: await setting("primaryBaseURL")
});
}
module.exports = {
sendNotificationList,
sendImportantHeartbeatList,
sendHeartbeatList,
sendInfo
};

7
server/config.js

@ -1,7 +0,0 @@
const args = require("args-parser")(process.argv);
const demoMode = args["demo"] || false;
module.exports = {
args,
demoMode
};

378
server/database.js

@ -1,378 +0,0 @@
const fs = require("fs");
const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { debug, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
/**
* Database & App Data Folder
*/
class Database {
static templatePath = "./db/kuma.db";
/**
* Data Dir (Default: ./data)
*/
static dataDir;
/**
* User Upload Dir (Default: ./data/upload)
*/
static uploadDir;
static path;
/**
* @type {boolean}
*/
static patched = false;
/**
* For Backup only
*/
static backupPath = null;
/**
* Add patch filename in key
* Values:
* true: Add it regardless of order
* false: Do nothing
* { parents: []}: Need parents before add it
*/
static patchList = {
"patch-setting-value-type.sql": true,
"patch-improve-performance.sql": true,
"patch-2fa.sql": true,
"patch-add-retry-interval-monitor.sql": true,
"patch-incident-table.sql": true,
"patch-group-table.sql": true,
"patch-monitor-push_token.sql": true,
"patch-http-monitor-method-body-and-headers.sql": true,
}
/**
* The final version should be 10 after merged tag feature
* @deprecated Use patchList for any new feature
*/
static latestVersion = 10;
static noReject = true;
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
console.log(`Data Dir: ${Database.dataDir}`);
}
static async connect() {
const acquireConnectionTimeout = 120 * 1000;
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
});
R.setup(knexInstance);
if (process.env.SQL_LOG === "1") {
R.debug(true);
}
// Auto map the model to a bean object
R.freeze(true);
await R.autoloadModels("./server/model");
await R.exec("PRAGMA foreign_keys = ON");
// Change to WAL
await R.exec("PRAGMA journal_mode = WAL");
await R.exec("PRAGMA cache_size = -12000");
console.log("SQLite config:");
console.log(await R.getAll("PRAGMA journal_mode"));
console.log(await R.getAll("PRAGMA cache_size"));
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
static async patch() {
let version = parseInt(await setting("database_version"));
if (! version) {
version = 0;
}
console.info("Your database version: " + version);
console.info("Latest database version: " + this.latestVersion);
if (version === this.latestVersion) {
console.info("Database no need to patch");
} else if (version > this.latestVersion) {
console.info("Warning: Database version is newer than expected");
} else {
console.info("Database patch is needed");
this.backup(version);
// Try catch anything here, if gone wrong, restore the backup
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
console.info(`Patching ${sqlFile}`);
await Database.importSQLFile(sqlFile);
console.info(`Patched ${sqlFile}`);
await setSetting("database_version", i);
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
}
await this.patch2();
}
/**
* Call it from patch() only
* @returns {Promise<void>}
*/
static async patch2() {
console.log("Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (! databasePatchedFiles) {
databasePatchedFiles = {};
}
debug("Patched files:");
debug(databasePatchedFiles);
try {
for (let sqlFilename in this.patchList) {
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
}
if (this.patched) {
console.log("Database Patched Successfully");
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to patch db failed");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
await setSetting("databasePatchedFiles", databasePatchedFiles);
}
/**
* Used it patch2() only
* @param sqlFilename
* @param databasePatchedFiles
*/
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (! value) {
console.log(sqlFilename + " skip");
return;
}
// Check if patched
if (! databasePatchedFiles[sqlFilename]) {
console.log(sqlFilename + " is not patched");
if (value.parents) {
console.log(sqlFilename + " need parents");
for (let parentSQLFilename of value.parents) {
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
}
}
this.backup(dayjs().format("YYYYMMDDHHmmss"));
console.log(sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
console.log(sqlFilename + " is patched successfully");
} else {
debug(sqlFilename + " is already patched, skip");
}
}
/**
* Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
* @param filename
* @returns {Promise<void>}
*/
static async importSQLFile(filename) {
await R.getCell("SELECT 1");
let text = fs.readFileSync(filename).toString();
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n");
let statements = text.split(";")
.map((statement) => {
return statement.trim();
})
.filter((statement) => {
return statement !== "";
});
for (let statement of statements) {
await R.exec(statement);
}
}
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
}
/**
* Special handle, because tarn.js throw a promise reject that cannot be caught
* @returns {Promise<void>}
*/
static async close() {
const listener = (reason, p) => {
Database.noReject = false;
};
process.addListener("unhandledRejection", listener);
console.log("Closing DB");
while (true) {
Database.noReject = true;
await R.close();
await sleep(2000);
if (Database.noReject) {
break;
} else {
console.log("Waiting to close the db");
}
}
console.log("SQLite closed");
process.removeListener("unhandledRejection", listener);
}
/**
* One backup one time in this process.
* Reset this.backupPath if you want to backup again
* @param version
*/
static backup(version) {
if (! this.backupPath) {
console.info("Backup the db");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);
const shmPath = Database.path + "-shm";
if (fs.existsSync(shmPath)) {
this.backupShmPath = shmPath + ".bak" + version;
fs.copyFileSync(shmPath, this.backupShmPath);
}
const walPath = Database.path + "-wal";
if (fs.existsSync(walPath)) {
this.backupWalPath = walPath + ".bak" + version;
fs.copyFileSync(walPath, this.backupWalPath);
}
}
}
/**
*
*/
static restore() {
if (this.backupPath) {
console.error("Patch db failed!!! Restoring the backup");
const shmPath = Database.path + "-shm";
const walPath = Database.path + "-wal";
// Delete patch failed db
try {
if (fs.existsSync(Database.path)) {
fs.unlinkSync(Database.path);
}
if (fs.existsSync(shmPath)) {
fs.unlinkSync(shmPath);
}
if (fs.existsSync(walPath)) {
fs.unlinkSync(walPath);
}
} catch (e) {
console.log("Restore failed, you may need to restore the backup manually");
process.exit(1);
}
// Restore backup
fs.copyFileSync(this.backupPath, Database.path);
if (this.backupShmPath) {
fs.copyFileSync(this.backupShmPath, shmPath);
}
if (this.backupWalPath) {
fs.copyFileSync(this.backupWalPath, walPath);
}
} else {
console.log("Nothing to restore");
}
}
}
module.exports = Database;

57
server/image-data-uri.js

@ -1,57 +0,0 @@
/*
From https://github.com/DiegoZoracKy/image-data-uri/blob/master/lib/image-data-uri.js
Modified with 0 dependencies
*/
let fs = require("fs");
let ImageDataURI = (() => {
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
return null;
}
let regExMatches = dataURI.match("data:(image/.*);base64,(.*)");
return {
imageType: regExMatches[1],
dataBase64: regExMatches[2],
dataBuffer: new Buffer(regExMatches[2], "base64")
};
}
function encode(data, mediaType) {
if (!data || !mediaType) {
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
return null;
}
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
return dataImgBase64;
}
function outputFile(dataURI, filePath) {
filePath = filePath || "./";
return new Promise((resolve, reject) => {
let imageDecoded = decode(dataURI);
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
if (err) {
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
}
resolve(filePath);
});
});
}
return {
decode: decode,
encode: encode,
outputFile: outputFile,
};
})();
module.exports = ImageDataURI;

31
server/jobs.js

@ -1,31 +0,0 @@
const path = require("path");
const Bree = require("bree");
const { SHARE_ENV } = require("worker_threads");
const jobs = [
{
name: "clear-old-data",
interval: "at 03:14",
}
];
const initBackgroundJobs = function (args) {
const bree = new Bree({
root: path.resolve("server", "jobs"),
jobs,
worker: {
env: SHARE_ENV,
workerData: args,
},
workerMessageHandler: (message) => {
console.log("[Background Job]:", message);
}
});
bree.start();
return bree;
};
module.exports = {
initBackgroundJobs
};

40
server/jobs/clear-old-data.js

@ -1,40 +0,0 @@
const { log, exit, connectDb } = require("./util-worker");
const { R } = require("redbean-node");
const { setSetting, setting } = require("../util-server");
const DEFAULT_KEEP_PERIOD = 180;
(async () => {
await connectDb();
let period = await setting("keepDataPeriodDays");
// Set Default Period
if (period == null) {
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
period = DEFAULT_KEEP_PERIOD;
}
// Try parse setting
let parsedPeriod;
try {
parsedPeriod = parseInt(period);
} catch (_) {
log("Failed to parse setting, resetting to default..");
await setSetting("keepDataPeriodDays", DEFAULT_KEEP_PERIOD, "general");
parsedPeriod = DEFAULT_KEEP_PERIOD;
}
log(`Clearing Data older than ${parsedPeriod} days...`);
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[parsedPeriod]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);
}
exit();
})();

39
server/jobs/util-worker.js

@ -1,39 +0,0 @@
const { parentPort, workerData } = require("worker_threads");
const Database = require("../database");
const path = require("path");
const log = function (any) {
if (parentPort) {
parentPort.postMessage(any);
}
};
const exit = function (error) {
if (error && error != 0) {
process.exit(error);
} else {
if (parentPort) {
parentPort.postMessage("done");
} else {
process.exit(0);
}
}
};
const connectDb = async function () {
const dbPath = path.join(
process.env.DATA_DIR || workerData["data-dir"] || "./data/"
);
Database.init({
"data-dir": dbPath,
});
await Database.connect();
};
module.exports = {
log,
exit,
connectDb,
};

34
server/model/group.js

@ -1,34 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
class Group extends BeanModel {
async toPublicJSON() {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON());
}
return {
id: this.id,
name: this.name,
weight: this.weight,
monitorList,
};
}
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
SELECT monitor.* FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight
`, [
this.id,
]));
}
}
module.exports = Group;

39
server/model/heartbeat.js

@ -1,39 +0,0 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model");
/**
* status:
* 0 = DOWN
* 1 = UP
* 2 = PENDING
*/
class Heartbeat extends BeanModel {
toPublicJSON() {
return {
status: this.status,
time: this.time,
msg: "", // Hide for public
ping: this.ping,
};
}
toJSON() {
return {
monitorID: this.monitor_id,
status: this.status,
time: this.time,
msg: this.msg,
ping: this.ping,
important: this.important,
duration: this.duration,
};
}
}
module.exports = Heartbeat;

18
server/model/incident.js

@ -1,18 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
toPublicJSON() {
return {
id: this.id,
style: this.style,
title: this.title,
content: this.content,
pin: this.pin,
createdDate: this.createdDate,
lastUpdatedDate: this.lastUpdatedDate,
};
}
}
module.exports = Incident;

609
server/model/monitor.js

@ -1,609 +0,0 @@
const https = require("https");
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting } = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification");
const { demoMode } = require("../config");
const version = require("../../package.json").version;
const apicache = require("../modules/apicache");
/**
* status:
* 0 = DOWN
* 1 = UP
* 2 = PENDING
*/
class Monitor extends BeanModel {
/**
* Return a object that ready to parse to JSON for public
* Only show necessary data to public
*/
async toPublicJSON() {
return {
id: this.id,
name: this.name,
};
}
/**
* Return a object that ready to parse to JSON
*/
async toJSON() {
let notificationIDList = {};
let list = await R.find("monitor_notification", " monitor_id = ? ", [
this.id,
]);
for (let bean of list) {
notificationIDList[bean.notification_id] = true;
}
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
return {
id: this.id,
name: this.name,
url: this.url,
method: this.method,
body: this.body,
headers: this.headers,
hostname: this.hostname,
port: this.port,
maxretries: this.maxretries,
weight: this.weight,
active: this.active,
type: this.type,
interval: this.interval,
retryInterval: this.retryInterval,
keyword: this.keyword,
ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(),
maxredirects: this.maxredirects,
accepted_statuscodes: this.getAcceptedStatuscodes(),
dns_resolve_type: this.dns_resolve_type,
dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
notificationIDList,
tags: tags,
};
}
/**
* Parse to boolean
* @returns {boolean}
*/
getIgnoreTls() {
return Boolean(this.ignoreTls);
}
/**
* Parse to boolean
* @returns {boolean}
*/
isUpsideDown() {
return Boolean(this.upsideDown);
}
getAcceptedStatuscodes() {
return JSON.parse(this.accepted_statuscodes_json);
}
start(io) {
let previousBeat = null;
let retries = 0;
let prometheus = new Prometheus(this);
const beat = async () => {
// Expose here for prometheus update
// undefined if not https
let tlsInfo = undefined;
if (! previousBeat) {
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
this.id,
]);
}
const isFirstBeat = !previousBeat;
let bean = R.dispense("heartbeat");
bean.monitor_id = this.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.status = DOWN;
if (this.isUpsideDown()) {
bean.status = flipStatus(bean.status);
}
// Duration
if (! isFirstBeat) {
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
} else {
bean.duration = 0;
}
try {
if (this.type === "http" || this.type === "keyword") {
// Do not do any queries/high loading things before the "bean.ping"
let startTime = dayjs().valueOf();
const options = {
url: this.url,
method: (this.method || "get").toLowerCase(),
...(this.body ? { data: JSON.parse(this.body) } : {}),
timeout: this.interval * 1000 * 0.8,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
...(this.headers ? JSON.parse(this.headers) : {}),
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
};
let res = await axios.request(options);
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
// Check certificate if https is used
let certInfoStartTime = dayjs().valueOf();
if (this.getUrl()?.protocol === "https:") {
try {
tlsInfo = await this.updateTlsInfo(checkCertificate(res));
} catch (e) {
if (e.message !== "No TLS certificate in response") {
console.error(e.message);
}
}
}
if (process.env.TIMELOGGER === "1") {
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
}
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID == this.id) {
console.log(res.data);
}
if (this.type === "http") {
bean.status = UP;
} else {
let data = res.data;
// Convert to string for object/array
if (typeof data !== "string") {
data = JSON.stringify(data);
}
if (data.includes(this.keyword)) {
bean.msg += ", keyword is found";
bean.status = UP;
} else {
throw new Error(bean.msg + ", but keyword is not found");
}
}
} else if (this.type === "port") {
bean.ping = await tcping(this.hostname, this.port);
bean.msg = "";
bean.status = UP;
} else if (this.type === "ping") {
bean.ping = await ping(this.hostname);
bean.msg = "";
bean.status = UP;
} else if (this.type === "dns") {
let startTime = dayjs().valueOf();
let dnsMessage = "";
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
bean.ping = dayjs().valueOf() - startTime;
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
dnsMessage += "Records: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
dnsMessage = dnsRes[0];
} else if (this.dns_resolve_type == "CAA") {
dnsMessage = dnsRes[0].issue;
} else if (this.dns_resolve_type == "MX") {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type == "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "SOA") {
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
} else if (this.dns_resolve_type == "SRV") {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
}
if (this.dnsLastResult !== dnsMessage) {
R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [
dnsMessage,
this.id
]);
}
bean.msg = dnsMessage;
bean.status = UP;
} else if (this.type === "push") { // Type: Push
const time = R.isoDateTime(dayjs.utc().subtract(this.interval, "second"));
let heartbeatCount = await R.count("heartbeat", " monitor_id = ? AND time > ? ", [
this.id,
time
]);
debug("heartbeatCount" + heartbeatCount + " " + time);
if (heartbeatCount <= 0) {
throw new Error("No heartbeat in the time window");
} else {
// No need to insert successful heartbeat for push type, so end here
retries = 0;
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
return;
}
} else if (this.type === "steam") {
const steamApiUrl = "https://api.steampowered.com/IGameServersService/GetServerList/v1/";
const steamAPIKey = await setting("steamAPIKey");
const filter = `addr\\${this.hostname}:${this.port}`;
if (!steamAPIKey) {
throw new Error("Steam API Key not found");
}
let res = await axios.get(steamApiUrl, {
timeout: this.interval * 1000 * 0.8,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
params: {
filter: filter,
key: steamAPIKey,
}
});
if (res.data.response && res.data.response.servers && res.data.response.servers.length > 0) {
bean.status = UP;
bean.msg = res.data.response.servers[0].name;
try {
bean.ping = await ping(this.hostname);
} catch (_) { }
} else {
throw new Error("Server not found on Steam");
}
} else {
bean.msg = "Unknown Monitor Type";
bean.status = PENDING;
}
if (this.isUpsideDown()) {
bean.status = flipStatus(bean.status);
if (bean.status === DOWN) {
throw new Error("Flip UP to DOWN");
}
}
retries = 0;
} catch (error) {
bean.msg = error.message;
// If UP come in here, it must be upside down mode
// Just reset the retries
if (this.isUpsideDown() && bean.status === UP) {
retries = 0;
} else if ((this.maxretries > 0) && (retries < this.maxretries)) {
retries++;
bean.status = PENDING;
}
}
let beatInterval = this.interval;
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
// Mark as important if status changed, ignore pending pings,
// Don't notify if disrupted changes to up
if (isImportant) {
bean.important = true;
await Monitor.sendNotification(isFirstBeat, this, bean);
} else {
bean.important = false;
}
if (bean.status === UP) {
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === PENDING) {
if (this.retryInterval > 0) {
beatInterval = this.retryInterval;
}
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else {
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
}
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id);
await R.store(bean);
prometheus.update(bean, tlsInfo);
previousBeat = bean;
if (! this.isStop) {
if (demoMode) {
if (beatInterval < 20) {
console.log("beat interval too low, reset to 20s");
beatInterval = 20;
}
}
this.heartbeatInterval = setTimeout(beat, beatInterval * 1000);
}
};
// Delay Push Type
if (this.type === "push") {
setTimeout(() => {
beat();
}, this.interval * 1000);
} else {
beat();
}
}
stop() {
clearTimeout(this.heartbeatInterval);
this.isStop = true;
}
/**
* Helper Method:
* returns URL object for further usage
* returns null if url is invalid
* @returns {null|URL}
*/
getUrl() {
try {
return new URL(this.url);
} catch (_) {
return null;
}
}
/**
* Store TLS info to database
* @param checkCertificateResult
* @returns {Promise<object>}
*/
async updateTlsInfo(checkCertificateResult) {
let tls_info_bean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
this.id,
]);
if (tls_info_bean == null) {
tls_info_bean = R.dispense("monitor_tls_info");
tls_info_bean.monitor_id = this.id;
}
tls_info_bean.info_json = JSON.stringify(checkCertificateResult);
await R.store(tls_info_bean);
return checkCertificateResult;
}
static async sendStats(io, monitorID, userID) {
const hasClients = getTotalClientInRoom(io, userID) > 0;
if (hasClients) {
await Monitor.sendAvgPing(24, io, monitorID, userID);
await Monitor.sendUptime(24, io, monitorID, userID);
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
await Monitor.sendCertInfo(io, monitorID, userID);
} else {
debug("No clients in the room, no need to send stats");
}
}
/**
*
* @param duration : int Hours
*/
static async sendAvgPing(duration, io, monitorID, userID) {
const timeLogger = new TimeLogger();
let avgPing = parseInt(await R.getCell(`
SELECT AVG(ping)
FROM heartbeat
WHERE time > DATETIME('now', ? || ' hours')
AND ping IS NOT NULL
AND monitor_id = ? `, [
-duration,
monitorID,
]));
timeLogger.print(`[Monitor: ${monitorID}] avgPing`);
io.to(userID).emit("avgPing", monitorID, avgPing);
}
static async sendCertInfo(io, monitorID, userID) {
let tls_info = await R.findOne("monitor_tls_info", "monitor_id = ?", [
monitorID,
]);
if (tls_info != null) {
io.to(userID).emit("certInfo", monitorID, tls_info.info_json);
}
}
/**
* Uptime with calculation
* Calculation based on:
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param duration : int Hours
*/
static async calcUptime(duration, monitorID) {
const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
// Handle if heartbeat duration longer than the target duration
// e.g. If the last beat's duration is bigger that the 24hrs window, it will use the duration between the (beat time - window margin) (THEN case in SQL)
let result = await R.getRow(`
SELECT
-- SUM all duration, also trim off the beat out of time window
SUM(
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
) AS total_duration,
-- SUM all uptime duration, also trim off the beat out of time window
SUM(
CASE
WHEN (status = 1)
THEN
CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
THEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400
ELSE duration
END
END
) AS uptime_duration
FROM heartbeat
WHERE time > ?
AND monitor_id = ?
`, [
startTime, startTime, startTime, startTime, startTime,
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}][${duration}] sendUptime`);
let totalDuration = result.total_duration;
let uptimeDuration = result.uptime_duration;
let uptime = 0;
if (totalDuration > 0) {
uptime = uptimeDuration / totalDuration;
if (uptime < 0) {
uptime = 0;
}
} else {
// Handle new monitor with only one beat, because the beat's duration = 0
let status = parseInt(await R.getCell("SELECT `status` FROM heartbeat WHERE monitor_id = ?", [ monitorID ]));
if (status === UP) {
uptime = 1;
}
}
return uptime;
}
/**
* Send Uptime
* @param duration : int Hours
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
static isImportantBeat(isFirstBeat, previousBeatStatus, currentBeatStatus) {
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
// * UP -> DOWN = important
// UP -> UP = not important
// PENDING -> PENDING = not important
// * PENDING -> DOWN = important
// PENDING -> UP = not important
// DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important
// * DOWN -> UP = important
let isImportant = isFirstBeat ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN);
return isImportant;
}
static async sendNotification(isFirstBeat, monitor, bean) {
if (!isFirstBeat || bean.status === DOWN) {
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
monitor.id,
]);
let text;
if (bean.status === UP) {
text = "✅ Up";
} else {
text = "🔴 Down";
}
let msg = `[${monitor.name}] [${text}] ${bean.msg}`;
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(), bean.toJSON());
} catch (e) {
console.error("Cannot send notification to " + notification.name);
console.log(e);
}
}
// Clear Status Page Cache
apicache.clear();
}
}
}
module.exports = Monitor;

13
server/model/tag.js

@ -1,13 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Tag extends BeanModel {
toJSON() {
return {
id: this._id,
name: this._name,
color: this._color,
};
}
}
module.exports = Tag;

21
server/model/user.js

@ -1,21 +0,0 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const passwordHash = require("../password-hash");
const { R } = require("redbean-node");
class User extends BeanModel {
/**
* Direct execute, no need R.store()
* @param newPassword
* @returns {Promise<void>}
*/
async resetPassword(newPassword) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
passwordHash.generate(newPassword),
this.id
]);
this.password = newPassword;
}
}
module.exports = User;

749
server/modules/apicache/apicache.js

@ -1,749 +0,0 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
let t = {
ms: 1,
second: 1000,
minute: 60000,
hour: 3600000,
day: 3600000 * 24,
week: 3600000 * 24 * 7,
month: 3600000 * 24 * 30,
};
let instances = [];
let matches = function (a) {
return function (b) {
return a === b;
};
};
let doesntMatch = function (a) {
return function (b) {
return !matches(a)(b);
};
};
let logDuration = function (d, prefix) {
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
};
function getSafeHeaders(res) {
return res.getHeaders ? res.getHeaders() : res._headers;
}
function ApiCache() {
let memCache = new MemoryCache();
let globalOptions = {
debug: false,
defaultDuration: 3600000,
enabled: true,
appendKey: [],
jsonp: false,
redisClient: false,
headerBlacklist: [],
statusCodes: {
include: [],
exclude: [],
},
events: {
expire: undefined,
},
headers: {
// 'cache-control': 'no-cache' // example of header overwrite
},
trackPerformance: false,
respectCacheControl: false,
};
let middlewareOptions = [];
let instance = this;
let index = null;
let timers = {};
let performanceArray = []; // for tracking cache hit rate
instances.push(this);
this.id = instances.length;
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
function shouldCacheResponse(request, response, toggle) {
let opt = globalOptions;
let codes = opt.statusCodes;
if (!response) {
return false;
}
if (toggle && !toggle(request, response)) {
return false;
}
if (codes.exclude && codes.exclude.length && codes.exclude.indexOf(response.statusCode) !== -1) {
return false;
}
if (codes.include && codes.include.length && codes.include.indexOf(response.statusCode) === -1) {
return false;
}
return true;
}
function addIndexEntries(key, req) {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
index.all.unshift(key);
}
function filterBlacklistedHeaders(headers) {
return Object.keys(headers)
.filter(function (key) {
return globalOptions.headerBlacklist.indexOf(key) === -1;
})
.reduce(function (acc, header) {
acc[header] = headers[header];
return acc;
}, {});
}
function createCacheObject(status, headers, data, encoding) {
return {
status: status,
headers: filterBlacklistedHeaders(headers),
data: data,
encoding: encoding,
timestamp: new Date().getTime() / 1000, // seconds since epoch. This is used to properly decrement max-age headers in cached responses.
};
}
function cacheResponse(key, value, duration) {
let redis = globalOptions.redisClient;
let expireCallback = globalOptions.events.expire;
if (redis && redis.connected) {
try {
redis.hset(key, "response", JSON.stringify(value));
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
debug("[apicache] error in redis.hset()");
}
} else {
memCache.add(key, value, duration, expireCallback);
}
// add automatic cache clearing from duration, includes max limit on setTimeout
timers[key] = setTimeout(function () {
instance.clear(key, true);
}, Math.min(duration, 2147483647));
}
function accumulateContent(res, content) {
if (content) {
if (typeof content == "string") {
res._apicache.content = (res._apicache.content || "") + content;
} else if (Buffer.isBuffer(content)) {
let oldContent = res._apicache.content;
if (typeof oldContent === "string") {
oldContent = !Buffer.from ? new Buffer(oldContent) : Buffer.from(oldContent);
}
if (!oldContent) {
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
}
res._apicache.content = Buffer.concat(
[oldContent, content],
oldContent.length + content.length
);
} else {
res._apicache.content = content;
}
}
}
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
// monkeypatch res.end to create cache object
res._apicache = {
write: res.write,
writeHead: res.writeHead,
end: res.end,
cacheable: true,
content: undefined,
};
// append header overwrites if applicable
Object.keys(globalOptions.headers).forEach(function (name) {
res.setHeader(name, globalOptions.headers[name]);
});
res.writeHead = function () {
// add cache control headers
if (!globalOptions.headers["cache-control"]) {
if (shouldCacheResponse(req, res, toggle)) {
res.setHeader("cache-control", "max-age=" + (duration / 1000).toFixed(0));
} else {
res.setHeader("cache-control", "no-cache, no-store, must-revalidate");
}
}
res._apicache.headers = Object.assign({}, getSafeHeaders(res));
return res._apicache.writeHead.apply(this, arguments);
};
// patch res.write
res.write = function (content) {
accumulateContent(res, content);
return res._apicache.write.apply(this, arguments);
};
// patch res.end
res.end = function (content, encoding) {
if (shouldCacheResponse(req, res, toggle)) {
accumulateContent(res, content);
if (res._apicache.cacheable && res._apicache.content) {
addIndexEntries(key, req);
let headers = res._apicache.headers || getSafeHeaders(res);
let cacheObject = createCacheObject(
res.statusCode,
headers,
res._apicache.content,
encoding
);
cacheResponse(key, cacheObject, duration);
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
}
}
return res._apicache.end.apply(this, arguments);
};
next();
}
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
if (toggle && !toggle(request, response)) {
return next();
}
let headers = getSafeHeaders(response);
// Modified by @louislam, removed Cache-control, since I don't need client side cache!
// Original Source: https://github.com/kwhitley/apicache/blob/0d5686cc21fad353c6dddee646288c2fca3e4f50/src/apicache.js#L254
Object.assign(headers, filterBlacklistedHeaders(cacheObject.headers || {}));
// only embed apicache headers when not in production environment
if (process.env.NODE_ENV !== "production") {
Object.assign(headers, {
"apicache-store": globalOptions.redisClient ? "redis" : "memory",
"apicache-version": "1.6.2-modified",
});
}
// unstringify buffers
let data = cacheObject.data;
if (data && data.type === "Buffer") {
data =
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
}
// test Etag against If-None-Match for 304
let cachedEtag = cacheObject.headers.etag;
let requestEtag = request.headers["if-none-match"];
if (requestEtag && cachedEtag === requestEtag) {
response.writeHead(304, headers);
return response.end();
}
response.writeHead(cacheObject.status || 200, headers);
return response.end(data, cacheObject.encoding);
}
function syncOptions() {
for (let i in middlewareOptions) {
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
}
}
this.clear = function (target, isAutomatic) {
let group = index.groups[target];
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
memCache.delete(key);
} else {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
});
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
if (!redis) {
memCache.delete(target);
} else {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
}
}
// remove from global index
index.all = index.all.filter(doesntMatch(target));
// remove target from each group that it may exist in
Object.keys(index.groups).forEach(function (groupName) {
index.groups[groupName] = index.groups[groupName].filter(doesntMatch(target));
// delete group if now empty
if (!index.groups[groupName].length) {
delete index.groups[groupName];
}
});
} else {
debug("clearing entire index");
if (!redis) {
memCache.clear();
} else {
// clear redis keys one by one from internal index to prevent clearing non-apicache entries
index.all.forEach(function (key) {
clearTimeout(timers[key]);
delete timers[key];
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
});
}
this.resetIndex();
}
return this.getIndex();
};
function parseDuration(duration, defaultDuration) {
if (typeof duration === "number") {
return duration;
}
if (typeof duration === "string") {
let split = duration.match(/^([\d\.,]+)\s?(\w+)$/);
if (split.length === 3) {
let len = parseFloat(split[1]);
let unit = split[2].replace(/s$/i, "").toLowerCase();
if (unit === "m") {
unit = "ms";
}
return (len || 1) * (t[unit] || 0);
}
}
return defaultDuration;
}
this.getDuration = function (duration) {
return parseDuration(duration, globalOptions.defaultDuration);
};
/**
* Return cache performance statistics (hit rate). Suitable for putting into a route:
* <code>
* app.get('/api/cache/performance', (req, res) => {
* res.json(apicache.getPerformance())
* })
* </code>
*/
this.getPerformance = function () {
return performanceArray.map(function (p) {
return p.report();
});
};
this.getIndex = function (group) {
if (group) {
return index.groups[group];
} else {
return index;
}
};
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
let duration = instance.getDuration(strDuration);
let opt = {};
middlewareOptions.push({
options: opt,
});
let options = function (localOptions) {
if (localOptions) {
middlewareOptions.find(function (middleware) {
return middleware.options === opt;
}).localOptions = localOptions;
}
syncOptions();
return opt;
};
options(localOptions);
/**
* A Function for non tracking performance
*/
function NOOPCachePerformance() {
this.report = this.hit = this.miss = function () {}; // noop;
}
/**
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
*/
function CachePerformance() {
/**
* Tracks the hit rate for the last 100 requests.
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 1000 requests.
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 10000 requests.
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 100000 requests.
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
*/
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
/**
* The number of calls that have passed through the middleware since the server started.
*/
this.callCount = 0;
/**
* The total number of hits since the server started
*/
this.hitCount = 0;
/**
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheHit = null;
/**
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
*/
this.lastCacheMiss = null;
/**
* Return performance statistics
*/
this.report = function () {
return {
lastCacheHit: this.lastCacheHit,
lastCacheMiss: this.lastCacheMiss,
callCount: this.callCount,
hitCount: this.hitCount,
missCount: this.callCount - this.hitCount,
hitRate: this.callCount == 0 ? null : this.hitCount / this.callCount,
hitRateLast100: this.hitRate(this.hitsLast100),
hitRateLast1000: this.hitRate(this.hitsLast1000),
hitRateLast10000: this.hitRate(this.hitsLast10000),
hitRateLast100000: this.hitRate(this.hitsLast100000),
};
};
/**
* Computes a cache hit rate from an array of hits and misses.
* @param {Uint8Array} array An array representing hits and misses.
* @returns a number between 0 and 1, or null if the array has no hits or misses
*/
this.hitRate = function (array) {
let hits = 0;
let misses = 0;
for (let i = 0; i < array.length; i++) {
let n8 = array[i];
for (let j = 0; j < 4; j++) {
switch (n8 & 3) {
case 1:
hits++;
break;
case 2:
misses++;
break;
}
n8 >>= 2;
}
}
let total = hits + misses;
if (total == 0) {
return null;
}
return hits / total;
};
/**
* Record a hit or miss in the given array. It will be recorded at a position determined
* by the current value of the callCount variable.
* @param {Uint8Array} array An array representing hits and misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
* Each hit or miss is encoded as to bits as follows:
* 00 means no hit or miss has been recorded in these bits
* 01 encodes a hit
* 10 encodes a miss
*/
this.recordHitInArray = function (array, hit) {
let arrayIndex = ~~(this.callCount / 4) % array.length;
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
let clearMask = ~(3 << bitOffset);
let record = (hit ? 1 : 2) << bitOffset;
array[arrayIndex] = (array[arrayIndex] & clearMask) | record;
};
/**
* Records the hit or miss in the tracking arrays and increments the call count.
* @param {boolean} hit true records a hit, false records a miss
*/
this.recordHit = function (hit) {
this.recordHitInArray(this.hitsLast100, hit);
this.recordHitInArray(this.hitsLast1000, hit);
this.recordHitInArray(this.hitsLast10000, hit);
this.recordHitInArray(this.hitsLast100000, hit);
if (hit) {
this.hitCount++;
}
this.callCount++;
};
/**
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
this.hit = function (key) {
this.recordHit(true);
this.lastCacheHit = key;
};
/**
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
this.miss = function (key) {
this.recordHit(false);
this.lastCacheMiss = key;
};
}
let perf = globalOptions.trackPerformance ? new CachePerformance() : new NOOPCachePerformance();
performanceArray.push(perf);
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
return next();
}
// initial bypass chances
if (!opt.enabled) {
return bypass();
}
if (
req.headers["x-apicache-bypass"] ||
req.headers["x-apicache-force-fetch"] ||
(opt.respectCacheControl && req.headers["cache-control"] == "no-cache")
) {
return bypass();
}
// REMOVED IN 0.11.1 TO CORRECT MIDDLEWARE TOGGLE EXECUTE ORDER
// if (typeof middlewareToggle === 'function') {
// if (!middlewareToggle(req, res)) return bypass()
// } else if (middlewareToggle !== undefined && !middlewareToggle) {
// return bypass()
// }
// embed timer
req.apicacheTimer = new Date();
// In Express 4.x the url is ambigious based on where a router is mounted. originalUrl will give the full Url
let key = req.originalUrl || req.url;
// Remove querystring from key if jsonp option is enabled
if (opt.jsonp) {
key = url.parse(key).pathname;
}
// add appendKey (either custom function or response path)
if (typeof opt.appendKey === "function") {
key += "$$appendKey=" + opt.appendKey(req, res);
} else if (opt.appendKey.length > 0) {
let appendKey = req;
for (let i = 0; i < opt.appendKey.length; i++) {
appendKey = appendKey[opt.appendKey[i]];
}
key += "$$appendKey=" + appendKey;
}
// attempt cache hit
let redis = opt.redisClient;
let cached = !redis ? memCache.getValue(key) : null;
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
}
// send if cache hit from redis
if (redis && redis.connected) {
try {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
debug("sending cached (redis) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
req,
res,
JSON.parse(obj.response),
middlewareToggle,
next,
duration
);
} else {
perf.miss(key);
return makeResponseCacheable(
req,
res,
next,
key,
duration,
strDuration,
middlewareToggle
);
}
});
} catch (err) {
// bypass redis on error
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
} else {
perf.miss(key);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
};
cache.options = options;
return cache;
};
this.options = function (options) {
if (options) {
Object.assign(globalOptions, options);
syncOptions();
if ("defaultDuration" in options) {
// Convert the default duration to a number in milliseconds (if needed)
globalOptions.defaultDuration = parseDuration(globalOptions.defaultDuration, 3600000);
}
if (globalOptions.trackPerformance) {
debug("WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;
} else {
return globalOptions;
}
};
this.resetIndex = function () {
index = {
all: [],
groups: {},
};
};
this.newInstance = function (config) {
let instance = new ApiCache();
if (config) {
instance.options(config);
}
return instance;
};
this.clone = function () {
return this.newInstance(this.options());
};
// initialize index
this.resetIndex();
}
module.exports = new ApiCache();

14
server/modules/apicache/index.js

@ -1,14 +0,0 @@
const apicache = require("./apicache");
apicache.options({
headerBlacklist: [
"cache-control"
],
headers: {
// Disable client side cache, only server side cache.
// BUG! Not working for the second request
"cache-control": "no-cache",
},
});
module.exports = apicache;

59
server/modules/apicache/memory-cache.js

@ -1,59 +0,0 @@
function MemoryCache() {
this.cache = {};
this.size = 0;
}
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
let old = this.cache[key];
let instance = this;
let entry = {
value: value,
expire: time + Date.now(),
timeout: setTimeout(function () {
instance.delete(key);
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
}, time)
};
this.cache[key] = entry;
this.size = Object.keys(this.cache).length;
return entry;
};
MemoryCache.prototype.delete = function (key) {
let entry = this.cache[key];
if (entry) {
clearTimeout(entry.timeout);
}
delete this.cache[key];
this.size = Object.keys(this.cache).length;
return null;
};
MemoryCache.prototype.get = function (key) {
let entry = this.cache[key];
return entry;
};
MemoryCache.prototype.getValue = function (key) {
let entry = this.get(key);
return entry && entry.value;
};
MemoryCache.prototype.clear = function () {
Object.keys(this.cache).forEach(function (key) {
this.delete(key);
}, this);
return true;
};
module.exports = MemoryCache;

108
server/notification-providers/aliyun-sms.js

@ -1,108 +0,0 @@
const NotificationProvider = require("./notification-provider");
const { DOWN, UP } = require("../../src/util");
const { default: axios } = require("axios");
const Crypto = require("crypto");
const qs = require("qs");
class AliyunSMS extends NotificationProvider {
name = "AliyunSMS";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
if (heartbeatJSON != null) {
let msgBody = JSON.stringify({
name: monitorJSON["name"],
time: heartbeatJSON["time"],
status: this.statusToString(heartbeatJSON["status"]),
msg: heartbeatJSON["msg"],
});
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
} else {
let msgBody = JSON.stringify({
name: "",
time: "",
status: "",
msg: msg,
});
if (this.sendSms(notification, msgBody)) {
return okMsg;
}
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
async sendSms(notification, msgbody) {
let params = {
PhoneNumbers: notification.phonenumber,
TemplateCode: notification.templateCode,
SignName: notification.signName,
TemplateParam: msgbody,
AccessKeyId: notification.accessKeyId,
Format: "JSON",
SignatureMethod: "HMAC-SHA1",
SignatureVersion: "1.0",
SignatureNonce: Math.random().toString(),
Timestamp: new Date().toISOString(),
Action: "SendSms",
Version: "2017-05-25",
};
params.Signature = this.sign(params, notification.secretAccessKey);
let config = {
method: "POST",
url: "http://dysmsapi.aliyuncs.com/",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
data: qs.stringify(params),
};
let result = await axios(config);
if (result.data.Message == "OK") {
return true;
}
return false;
}
/** Aliyun request sign */
sign(param, AccessKeySecret) {
let param2 = {};
let data = [];
let oa = Object.keys(param).sort();
for (let i = 0; i < oa.length; i++) {
let key = oa[i];
param2[key] = param[key];
}
for (let key in param2) {
data.push(`${encodeURIComponent(key)}=${encodeURIComponent(param2[key])}`);
}
let StringToSign = `POST&${encodeURIComponent("/")}&${encodeURIComponent(data.join("&"))}`;
return Crypto
.createHmac("sha1", `${AccessKeySecret}&`)
.update(Buffer.from(StringToSign))
.digest("base64");
}
statusToString(status) {
switch (status) {
case DOWN:
return "DOWN";
case UP:
return "UP";
default:
return status;
}
}
}
module.exports = AliyunSMS;

26
server/notification-providers/apprise.js

@ -1,26 +0,0 @@
const NotificationProvider = require("./notification-provider");
const child_process = require("child_process");
class Apprise extends NotificationProvider {
name = "apprise";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
if (output) {
if (! output.includes("ERROR")) {
return "Sent Successfully";
}
throw new Error(output)
} else {
return "No output from apprise";
}
}
}
module.exports = Apprise;

79
server/notification-providers/dingding.js

@ -1,79 +0,0 @@
const NotificationProvider = require("./notification-provider");
const { DOWN, UP } = require("../../src/util");
const { default: axios } = require("axios");
const Crypto = require("crypto");
class DingDing extends NotificationProvider {
name = "DingDing";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
if (heartbeatJSON != null) {
let params = {
msgtype: "markdown",
markdown: {
title: monitorJSON["name"],
text: `## [${this.statusToString(heartbeatJSON["status"])}] \n > ${heartbeatJSON["msg"]} \n > Time(UTC):${heartbeatJSON["time"]}`,
}
};
if (this.sendToDingDing(notification, params)) {
return okMsg;
}
} else {
let params = {
msgtype: "text",
text: {
content: msg
}
};
if (this.sendToDingDing(notification, params)) {
return okMsg;
}
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
async sendToDingDing(notification, params) {
let timestamp = Date.now();
let config = {
method: "POST",
headers: {
"Content-Type": "application/json",
},
url: `${notification.webHookUrl}&timestamp=${timestamp}&sign=${encodeURIComponent(this.sign(timestamp, notification.secretKey))}`,
data: JSON.stringify(params),
};
let result = await axios(config);
if (result.data.errmsg == "ok") {
return true;
}
return false;
}
/** DingDing sign */
sign(timestamp, secretKey) {
return Crypto
.createHmac("sha256", Buffer.from(secretKey, "utf8"))
.update(Buffer.from(`${timestamp}\n${secretKey}`, "utf8"))
.digest("base64");
}
statusToString(status) {
switch (status) {
case DOWN:
return "DOWN";
case UP:
return "UP";
default:
return status;
}
}
}
module.exports = DingDing;

115
server/notification-providers/discord.js

@ -1,115 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class Discord extends NotificationProvider {
name = "discord";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
const discordDisplayName = notification.discordUsername || "Uptime Kuma";
// If heartbeatJSON is null, assume we're testing.
if (heartbeatJSON == null) {
let discordtestdata = {
username: discordDisplayName,
content: msg,
}
await axios.post(notification.discordWebhookUrl, discordtestdata)
return okMsg;
}
let url;
if (monitorJSON["type"] === "port") {
url = monitorJSON["hostname"];
if (monitorJSON["port"]) {
url += ":" + monitorJSON["port"];
}
} else {
url = monitorJSON["url"];
}
// If heartbeatJSON is not null, we go into the normal alerting loop.
if (heartbeatJSON["status"] == DOWN) {
let discorddowndata = {
username: discordDisplayName,
embeds: [{
title: "❌ Your service " + monitorJSON["name"] + " went down. ❌",
color: 16711680,
timestamp: heartbeatJSON["time"],
fields: [
{
name: "Service Name",
value: monitorJSON["name"],
},
{
name: "Service URL",
value: url,
},
{
name: "Time (UTC)",
value: heartbeatJSON["time"],
},
{
name: "Error",
value: heartbeatJSON["msg"],
},
],
}],
}
if (notification.discordPrefixMessage) {
discorddowndata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discorddowndata)
return okMsg;
} else if (heartbeatJSON["status"] == UP) {
let discordupdata = {
username: discordDisplayName,
embeds: [{
title: "✅ Your service " + monitorJSON["name"] + " is up! ✅",
color: 65280,
timestamp: heartbeatJSON["time"],
fields: [
{
name: "Service Name",
value: monitorJSON["name"],
},
{
name: "Service URL",
value: url.startsWith("http") ? "[Visit Service](" + url + ")" : url,
},
{
name: "Time (UTC)",
value: heartbeatJSON["time"],
},
{
name: "Ping",
value: heartbeatJSON["ping"] + "ms",
},
],
}],
}
if (notification.discordPrefixMessage) {
discordupdata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discordupdata)
return okMsg;
}
} catch (error) {
this.throwGeneralAxiosError(error)
}
}
}
module.exports = Discord;

83
server/notification-providers/feishu.js

@ -1,83 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class Feishu extends NotificationProvider {
name = "Feishu";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let feishuWebHookUrl = notification.feishuWebHookUrl;
try {
if (heartbeatJSON == null) {
let testdata = {
msg_type: "text",
content: {
text: msg,
},
};
await axios.post(feishuWebHookUrl, testdata);
return okMsg;
}
if (heartbeatJSON["status"] == DOWN) {
let downdata = {
msg_type: "post",
content: {
post: {
zh_cn: {
title: "UptimeKuma Alert: " + monitorJSON["name"],
content: [
[
{
tag: "text",
text:
"[Down] " +
heartbeatJSON["msg"] +
"\nTime (UTC): " +
heartbeatJSON["time"],
},
],
],
},
},
},
};
await axios.post(feishuWebHookUrl, downdata);
return okMsg;
}
if (heartbeatJSON["status"] == UP) {
let updata = {
msg_type: "post",
content: {
post: {
zh_cn: {
title: "UptimeKuma Alert: " + monitorJSON["name"],
content: [
[
{
tag: "text",
text:
"[Up] " +
heartbeatJSON["msg"] +
"\nTime (UTC): " +
heartbeatJSON["time"],
},
],
],
},
},
},
};
await axios.post(feishuWebHookUrl, updata);
return okMsg;
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Feishu;

28
server/notification-providers/gotify.js

@ -1,28 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class Gotify extends NotificationProvider {
name = "gotify";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
if (notification.gotifyserverurl && notification.gotifyserverurl.endsWith("/")) {
notification.gotifyserverurl = notification.gotifyserverurl.slice(0, -1);
}
await axios.post(`${notification.gotifyserverurl}/message?token=${notification.gotifyapplicationToken}`, {
"message": msg,
"priority": notification.gotifyPriority || 8,
"title": "Uptime-Kuma",
})
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Gotify;

60
server/notification-providers/line.js

@ -1,60 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class Line extends NotificationProvider {
name = "line";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let lineAPIUrl = "https://api.line.me/v2/bot/message/push";
let config = {
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + notification.lineChannelAccessToken
}
};
if (heartbeatJSON == null) {
let testMessage = {
"to": notification.lineUserID,
"messages": [
{
"type": "text",
"text": "Test Successful!"
}
]
}
await axios.post(lineAPIUrl, testMessage, config)
} else if (heartbeatJSON["status"] == DOWN) {
let downMessage = {
"to": notification.lineUserID,
"messages": [
{
"type": "text",
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
}
]
}
await axios.post(lineAPIUrl, downMessage, config)
} else if (heartbeatJSON["status"] == UP) {
let upMessage = {
"to": notification.lineUserID,
"messages": [
{
"type": "text",
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
}
]
}
await axios.post(lineAPIUrl, upMessage, config)
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error)
}
}
}
module.exports = Line;

48
server/notification-providers/lunasea.js

@ -1,48 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class LunaSea extends NotificationProvider {
name = "lunasea";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
try {
if (heartbeatJSON == null) {
let testdata = {
"title": "Uptime Kuma Alert",
"body": "Testing Successful.",
}
await axios.post(lunaseadevice, testdata)
return okMsg;
}
if (heartbeatJSON["status"] == DOWN) {
let downdata = {
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(lunaseadevice, downdata)
return okMsg;
}
if (heartbeatJSON["status"] == UP) {
let updata = {
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(lunaseadevice, updata)
return okMsg;
}
} catch (error) {
this.throwGeneralAxiosError(error)
}
}
}
module.exports = LunaSea;

45
server/notification-providers/matrix.js

@ -1,45 +0,0 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const Crypto = require("crypto");
const { debug } = require("../../src/util");
class Matrix extends NotificationProvider {
name = "matrix";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
const size = 20;
const randomString = encodeURIComponent(
Crypto
.randomBytes(size)
.toString("base64")
.slice(0, size)
);
debug("Random String: " + randomString);
const roomId = encodeURIComponent(notification.internalRoomId);
debug("Matrix Room ID: " + roomId);
try {
let config = {
headers: {
"Authorization": `Bearer ${notification.accessToken}`,
}
};
let data = {
"msgtype": "m.text",
"body": msg
};
await axios.put(`${notification.homeserverUrl}/_matrix/client/r0/rooms/${roomId}/send/m.room.message/${randomString}`, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Matrix;

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save