Mbround18/discord webhooks (#380)

* #207 Discord Webhook refactor

* cleaned up yarn cache

* change which file is hashed

* deprecated action for yarn

* fixed warning

* test should not panic if env var is set

* triring noew pipeline config
This commit is contained in:
Michael
2021-05-30 21:46:27 -07:00
committed by GitHub
parent eefdeb275a
commit 9b4aea893e
37 changed files with 4329 additions and 2072 deletions

View File

@@ -5,4 +5,8 @@ target/
tmp/
docs/
.run
release/
docker-compose*
node_modules/
package.json
yarn.lock
.yarn

View File

@@ -24,8 +24,6 @@ jobs:
images: |
mbround18/${{ matrix.image }}
ghcr.io/mbround18/${{ matrix.image }}
flavor: |
latest=false
# generate Docker tags based on the following events/attributes
tags: |
@@ -41,14 +39,13 @@ jobs:
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: mbround18
password: ${{ secrets.DOCKER_TOKEN }}
- name: Login to GitHub Container Registry
if: github.event_name != 'pull_request'
# if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
registry: ghcr.io
@@ -60,23 +57,29 @@ jobs:
with:
context: .
file: ./Dockerfile.${{ matrix.image }}
push: ${{ github.event_name != 'pull_request' }}
tags: mbround18/${{ matrix.image }}:latest,ghcr.io/mbround18/${{ matrix.image }}:latest,${{ steps.meta.outputs.tags }}
# push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
"GITHUB_SHA=${GITHUB_SHA}"
"GITHUB_REF=${GITHUB_REF}"
"GITHUB_REPOSITORY=${GITHUB_REPOSITORY}"
# - name: Send Release Notification
# if: github.event_name != 'pull_request' && "${{ matrix.image }}" == "valheim"
# run: |
# if [ "${{ matrix.image }}" != "valheim" ]; then
# exit 0
# fi
#
# # Send release notification
# docker run --rm \
# -e NAME="New Version: mbround18/${{ matrix.image }}:latest" \
# mbround18/odin:latest notify "New release of mbround18/${{ matrix.image }}:latest !! Please use pull the latest mbround18/${{ matrix.image }}:latest. Then restart your server via your preferred method." \
# --webhook "${{ secrets.RELEASE_WEBHOOK }}"
- name: Send Release Notification
if: github.event_name != 'pull_request' && "${{ matrix.image }}" == "valheim"
env:
NAME: "GitHub Release Notification"
run: |
if [ "${{ matrix.image }}" != "valheim" ]; then
exit 0
fi
# Send release notification
docker run --rm \
-e NAME="New Version: mbround18/${{ matrix.image }}:latest" \
mbround18/odin:latest notify \
--webhook "${{ secrets.RELEASE_WEBHOOK }}" \
--title "New Release! <3" \
"New release of "
"New release of mbround18/${{ matrix.image }}:latest!! Tagged with: ${{ steps.meta.outputs.tags }}!!" \

View File

@@ -21,11 +21,13 @@ jobs:
- uses: actions/cache@v2
with:
path: '**/node_modules'
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
path: |
**/node_modules
.yarn/cache
key: ${{ runner.os }}-${{ hashFiles('**/package.json') }}
- name: Yarn install
run: yarn install --frozen-lockfile
run: yarn install
- name: Install SSH key
uses: shimataro/ssh-key-action@v2

View File

@@ -7,6 +7,7 @@ on:
branches: [ main ]
env:
NAME: "Rust Test"
CARGO_TERM_COLOR: always
jobs:

231
.gitignore vendored
View File

@@ -4,4 +4,233 @@ tmp/
*.env*
docker-compose.*.yml
release
node_modules/
node_modules/
.yarn/*
!.yarn/patches
!.yarn/releases
!.yarn/plugins
!.yarn/sdks
!.yarn/versions
.pnp.*
# Created by https://www.toptal.com/developers/gitignore/api/intellij+all,rust,node,yarn
# Edit at https://www.toptal.com/developers/gitignore?templates=intellij+all,rust,node,yarn
### Intellij+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### Intellij+all Patch ###
# Ignores the whole .idea folder and all .iml files
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
*.iml
modules.xml
.idea/misc.xml
*.ipr
# Sonarlint plugin
.idea/sonarlint
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
.env*.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Storybook build outputs
.out
.storybook-out
storybook-static
# rollup.js default build output
dist/
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# Temporary folders
temp/
### Rust ###
# Generated by Cargo
# will have compiled files and executables
/target/
# End of https://www.toptal.com/developers/gitignore/api/intellij+all,rust,node,yarn

55
.yarn/releases/yarn-2.4.1.cjs vendored Normal file

File diff suppressed because one or more lines are too long

3
.yarn/sdks/integrations.yml vendored Normal file
View File

@@ -0,0 +1,3 @@
# This file is automatically generated by PnPify.
# Manual changes will be lost!

20
.yarn/sdks/typescript/bin/tsc vendored Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire, createRequireFromPath} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.js";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/bin/tsc
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/bin/tsc your application uses
module.exports = absRequire(`typescript/bin/tsc`);

20
.yarn/sdks/typescript/bin/tsserver vendored Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire, createRequireFromPath} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.js";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/bin/tsserver
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/bin/tsserver your application uses
module.exports = absRequire(`typescript/bin/tsserver`);

20
.yarn/sdks/typescript/lib/tsc.js vendored Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire, createRequireFromPath} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.js";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/tsc.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/tsc.js your application uses
module.exports = absRequire(`typescript/lib/tsc.js`);

125
.yarn/sdks/typescript/lib/tsserver.js vendored Normal file
View File

@@ -0,0 +1,125 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire, createRequireFromPath} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.js";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
const moduleWrapper = tsserver => {
const {isAbsolute} = require(`path`);
const pnpApi = require(`pnpapi`);
const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => {
return `${locator.name}@${locator.reference}`;
}));
// VSCode sends the zip paths to TS using the "zip://" prefix, that TS
// doesn't understand. This layer makes sure to remove the protocol
// before forwarding it to TS, and to add it back on all returned paths.
function toEditorPath(str) {
// We add the `zip:` prefix to both `.zip/` paths and virtual paths
if (isAbsolute(str) && !str.match(/^\^zip:/) && (str.match(/\.zip\//) || str.match(/\/(\$\$virtual|__virtual__)\//))) {
// We also take the opportunity to turn virtual paths into physical ones;
// this makes is much easier to work with workspaces that list peer
// dependencies, since otherwise Ctrl+Click would bring us to the virtual
// file instances instead of the real ones.
//
// We only do this to modules owned by the the dependency tree roots.
// This avoids breaking the resolution when jumping inside a vendor
// with peer dep (otherwise jumping into react-dom would show resolution
// errors on react).
//
const resolved = pnpApi.resolveVirtual(str);
if (resolved) {
const locator = pnpApi.findPackageLocator(resolved);
if (locator && dependencyTreeRoots.has(`${locator.name}@${locator.reference}`)) {
str = resolved;
}
}
str = str.replace(/\\/g, `/`)
str = str.replace(/^\/?/, `/`);
// Absolute VSCode `Uri.fsPath`s need to start with a slash.
// VSCode only adds it automatically for supported schemes,
// so we have to do it manually for the `zip` scheme.
// The path needs to start with a caret otherwise VSCode doesn't handle the protocol
//
// Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910
//
if (str.match(/\.zip\//)) {
str = `${isVSCode ? `^` : ``}zip:${str}`;
}
}
return str;
}
function fromEditorPath(str) {
return process.platform === `win32`
? str.replace(/^\^?zip:\//, ``)
: str.replace(/^\^?zip:/, ``);
}
// Force enable 'allowLocalPluginLoads'
// TypeScript tries to resolve plugins using a path relative to itself
// which doesn't work when using the global cache
// https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238
// VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but
// TypeScript already does local loads and if this code is running the user trusts the workspace
// https://github.com/microsoft/vscode/issues/45856
const ConfiguredProject = tsserver.server.ConfiguredProject;
const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype;
ConfiguredProject.prototype.enablePluginsWithOptions = function() {
this.projectService.allowLocalPluginLoads = true;
return originalEnablePluginsWithOptions.apply(this, arguments);
};
// And here is the point where we hijack the VSCode <-> TS communications
// by adding ourselves in the middle. We locate everything that looks
// like an absolute path of ours and normalize it.
const Session = tsserver.server.Session;
const {onMessage: originalOnMessage, send: originalSend} = Session.prototype;
let isVSCode = false;
return Object.assign(Session.prototype, {
onMessage(/** @type {string} */ message) {
const parsedMessage = JSON.parse(message)
if (
parsedMessage != null &&
typeof parsedMessage === `object` &&
parsedMessage.arguments &&
parsedMessage.arguments.hostInfo === `vscode`
) {
isVSCode = true;
}
return originalOnMessage.call(this, JSON.stringify(parsedMessage, (key, value) => {
return typeof value === `string` ? fromEditorPath(value) : value;
}));
},
send(/** @type {any} */ msg) {
return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => {
return typeof value === `string` ? toEditorPath(value) : value;
})));
}
});
};
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/tsserver.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/tsserver.js your application uses
module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`));

20
.yarn/sdks/typescript/lib/typescript.js vendored Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env node
const {existsSync} = require(`fs`);
const {createRequire, createRequireFromPath} = require(`module`);
const {resolve} = require(`path`);
const relPnpApiPath = "../../../../.pnp.js";
const absPnpApiPath = resolve(__dirname, relPnpApiPath);
const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath);
if (existsSync(absPnpApiPath)) {
if (!process.versions.pnp) {
// Setup the environment to be able to require typescript/lib/typescript.js
require(absPnpApiPath).setup();
}
}
// Defer to the real typescript/lib/typescript.js your application uses
module.exports = absRequire(`typescript/lib/typescript.js`);

6
.yarn/sdks/typescript/package.json vendored Normal file
View File

@@ -0,0 +1,6 @@
{
"name": "typescript",
"version": "3.9.9-pnpify",
"main": "./lib/typescript.js",
"type": "commonjs"
}

1
.yarnrc.yml Normal file
View File

@@ -0,0 +1 @@
yarnPath: .yarn/releases/yarn-2.4.1.cjs

208
Cargo.lock generated
View File

@@ -47,13 +47,34 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "block-buffer"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b"
dependencies = [
"block-padding",
"byte-tools",
"byteorder",
"generic-array 0.12.4",
]
[[package]]
name = "block-buffer"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
dependencies = [
"generic-array",
"generic-array 0.14.4",
]
[[package]]
name = "block-padding"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5"
dependencies = [
"byte-tools",
]
[[package]]
@@ -80,9 +101,15 @@ checksum = "b4ae4235e6dac0694637c763029ecea1a2ec9e4e06ec2729bd21ba4d9c863eb7"
[[package]]
name = "bumpalo"
version = "3.6.1"
version = "3.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe"
checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631"
[[package]]
name = "byte-tools"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
[[package]]
name = "byteorder"
@@ -125,9 +152,9 @@ checksum = "7b02b629252fe8ef6460461409564e2c21d0c8e77e0944f3d189ff06c4e932ad"
[[package]]
name = "cc"
version = "1.0.67"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd"
checksum = "4a72c244c1ff497a746a7e1fb3d14bd08420ecda70c8f25c7112f2781652d787"
[[package]]
name = "cfg-if"
@@ -224,13 +251,22 @@ dependencies = [
"libc",
]
[[package]]
name = "digest"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5"
dependencies = [
"generic-array 0.12.4",
]
[[package]]
name = "digest"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
"generic-array 0.14.4",
]
[[package]]
@@ -239,6 +275,12 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dotenv"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
[[package]]
name = "either"
version = "1.6.1"
@@ -254,6 +296,12 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "fake-simd"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
[[package]]
name = "filetime"
version = "0.2.14"
@@ -381,6 +429,15 @@ dependencies = [
"slab",
]
[[package]]
name = "generic-array"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd"
dependencies = [
"typenum",
]
[[package]]
name = "generic-array"
version = "0.14.4"
@@ -438,6 +495,20 @@ dependencies = [
"tracing",
]
[[package]]
name = "handlebars"
version = "3.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4498fc115fa7d34de968184e473529abb40eeb6be8bc5f7faba3d08c316cb3e3"
dependencies = [
"log",
"pest",
"pest_derive",
"quick-error 2.0.1",
"serde",
"serde_json",
]
[[package]]
name = "hashbrown"
version = "0.9.1"
@@ -456,7 +527,7 @@ dependencies = [
"headers-core",
"http",
"mime",
"sha-1",
"sha-1 0.9.6",
"time",
]
@@ -517,9 +588,9 @@ checksum = "f3a87b616e37e93c22fb19bcd386f02f3af5ea98a25670ad0fce773de23c5e68"
[[package]]
name = "httpdate"
version = "1.0.0"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05842d0d43232b23ccb7060ecb0f0626922c21f30012e97b767b30afd4a5d4b9"
checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440"
[[package]]
name = "huginn"
@@ -534,9 +605,9 @@ dependencies = [
[[package]]
name = "hyper"
version = "0.14.7"
version = "0.14.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e5f105c494081baa3bf9e200b279e27ec1623895cd504c7dbef8d0b080fcf54"
checksum = "d3f71a7eea53a3f8257a7b4795373ff886397178cd634430ea94e12d7fe4fe34"
dependencies = [
"bytes",
"futures-channel",
@@ -645,9 +716,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.94"
version = "0.2.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
checksum = "789da6d93f1b866ffe175afc5322a4d76c038605a1c3319bb57b06967ca98a36"
[[package]]
name = "linked-hash-map"
@@ -673,6 +744,12 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "maplit"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
[[package]]
name = "matches"
version = "0.1.8"
@@ -750,7 +827,7 @@ dependencies = [
"log",
"mime",
"mime_guess",
"quick-error",
"quick-error 1.2.3",
"rand 0.7.3",
"safemem",
"tempfile",
@@ -804,9 +881,11 @@ dependencies = [
"chrono",
"clap",
"daemonize",
"dotenv",
"flate2",
"fs_extra",
"glob",
"handlebars",
"inflections",
"log",
"md5",
@@ -829,6 +908,12 @@ version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3"
[[package]]
name = "opaque-debug"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c"
[[package]]
name = "opaque-debug"
version = "0.3.0"
@@ -872,6 +957,49 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pest"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
dependencies = [
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d"
dependencies = [
"maplit",
"pest",
"sha-1 0.8.2",
]
[[package]]
name = "pin-project"
version = "1.0.7"
@@ -967,6 +1095,12 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quote"
version = "1.0.9"
@@ -1238,17 +1372,29 @@ dependencies = [
"syn",
]
[[package]]
name = "sha-1"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df"
dependencies = [
"block-buffer 0.7.3",
"digest 0.8.1",
"fake-simd",
"opaque-debug 0.2.3",
]
[[package]]
name = "sha-1"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
dependencies = [
"block-buffer",
"block-buffer 0.9.0",
"cfg-if",
"cpufeatures",
"digest",
"opaque-debug",
"digest 0.9.0",
"opaque-debug 0.3.0",
]
[[package]]
@@ -1322,9 +1468,9 @@ dependencies = [
[[package]]
name = "tar"
version = "0.4.33"
version = "0.4.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0bcfbd6a598361fda270d82469fff3d65089dc33e175c9a131f7b4cd395f228"
checksum = "7d779dc6aeff029314570f666ec83f19df7280bb36ef338442cfa8c604021b80"
dependencies = [
"filetime",
"libc",
@@ -1365,18 +1511,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.24"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
checksum = "fa6f76457f59514c7eeb4e59d891395fab0b2fd1d40723ae737d64153392e9c6"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.24"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
checksum = "8a36768c0fbf1bb15eca10defa29526bda730a2376c2ab4393ccfa16fb1a318d"
dependencies = [
"proc-macro2",
"quote",
@@ -1410,9 +1556,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
version = "1.6.0"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd3076b5c8cc18138b8f8814895c11eb4de37114a5d127bafdc5e55798ceef37"
checksum = "0a38d31d7831c6ed7aad00aa4c12d9375fd225a6dd77da1d25b707346319a975"
dependencies = [
"autocfg",
"bytes",
@@ -1535,7 +1681,7 @@ dependencies = [
"input_buffer",
"log",
"rand 0.8.3",
"sha-1",
"sha-1 0.9.6",
"url",
"utf-8",
]
@@ -1555,6 +1701,12 @@ version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06"
[[package]]
name = "ucd-trie"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
[[package]]
name = "unicase"
version = "2.6.0"
@@ -1575,9 +1727,9 @@ dependencies = [
[[package]]
name = "unicode-normalization"
version = "0.1.17"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef"
checksum = "33717dca7ac877f497014e10d73f3acf948c342bee31b5ca7892faf94ccc6b49"
dependencies = [
"tinyvec",
]

View File

@@ -1,5 +1,4 @@
[workspace]
members = [
"src/odin",
"src/huginn"

View File

@@ -26,33 +26,37 @@ ARG GITHUB_REF="not-set"
ARG GITHUB_REPOSITORY="not-set"
# User config
ENV PUID=1000 \
PGID=1000 \
ENV PUID=1000 \
PGID=1000 \
# Set up timezone information
TZ=America/Los_Angeles \
TZ=America/Los_Angeles \
# Server Specific env variables.
PORT="2456" \
NAME="Valheim Docker" \
WORLD="Dedicated" \
PUBLIC="1" \
PASSWORD="" \
PORT="2456" \
NAME="Valheim Docker" \
WORLD="Dedicated" \
PUBLIC="1" \
PASSWORD="" \
TYPE="Vanilla" \
UPDATE_ON_STARTUP="1" \
# Auto Update Configs
AUTO_UPDATE="0" \
AUTO_UPDATE_SCHEDULE="0 1 * * *" \
AUTO_UPDATE="0" \
AUTO_UPDATE_SCHEDULE="0 1 * * *" \
# Auto Backup Configs
AUTO_BACKUP="0" \
AUTO_BACKUP_SCHEDULE="*/15 * * * *" \
AUTO_BACKUP_REMOVE_OLD="1" \
AUTO_BACKUP_DAYS_TO_LIVE="3" \
AUTO_BACKUP_ON_UPDATE="0" \
AUTO_BACKUP_ON_SHUTDOWN="0" \
AUTO_BACKUP_PAUSE_WITH_NO_PLAYERS="0" \
UPDATE_ON_STARTUP="1" \
SAVE_LOCATION="/home/steam/.config/unity3d/IronGate/Valheim" \
MODS_LOCATION="/home/steam/staging/mods" \
GAME_LOCATION="/home/steam/valheim" \
BACKUP_LOCATION="/home/steam/backups" \
TYPE="Vanilla"
AUTO_BACKUP="0" \
AUTO_BACKUP_SCHEDULE="*/15 * * * *" \
AUTO_BACKUP_REMOVE_OLD="1" \
AUTO_BACKUP_DAYS_TO_LIVE="3" \
AUTO_BACKUP_ON_UPDATE="0" \
AUTO_BACKUP_ON_SHUTDOWN="0" \
AUTO_BACKUP_PAUSE_WITH_NO_PLAYERS="0" \
# Folders and file system related
SAVE_LOCATION="/home/steam/.config/unity3d/IronGate/Valheim" \
MODS_LOCATION="/home/steam/staging/mods" \
GAME_LOCATION="/home/steam/valheim" \
BACKUP_LOCATION="/home/steam/backups" \
# Webhook Information
WEBHOOK_STATUS_SUCCESSFUL="1" \
WEBHOOK_STATUS_FAILED="1"
COPY ./src/scripts/*.sh /home/steam/scripts/
COPY ./src/scripts/entrypoint.sh /entrypoint.sh

View File

@@ -18,22 +18,29 @@ run_task = { name = "clippy", fork = true }
# Build Docker
[tasks."docker:build"]
workspace = false
command = "docker-compose"
args = ["-f", "./docker-compose.dev.yml", "build"]
command = "docker"
args = ["compose", "-f", "./docker-compose.dev.yml", "build"]
dependencies = ["setup"]
# Launch Docker
[tasks."docker:up"]
workspace = false
command = "docker-compose"
args = ["-f", "./docker-compose.dev.yml", "up"]
command = "docker"
args = ["compose", "-f", "./docker-compose.dev.yml", "up"]
dependencies = ["setup"]
# Launch Docker
[tasks."docker:down"]
workspace = false
command = "docker"
args = ["compose", "-f", "./docker-compose.dev.yml", "down"]
dependencies = ["setup"]
# Launch Docker
[tasks."docker:push"]
workspace = false
command = "docker-compose"
args = ["-f", "./docker-compose.dev.yml", "push"]
command = "docker"
args = ["compose", "-f", "./docker-compose.dev.yml", "push"]
dependencies = ["setup"]
# Start Development Workflow
@@ -44,7 +51,7 @@ dependencies = ["member_format", "member_clippy", "docker:up"]
# Start Development Workflow
[tasks."start:dev"]
workspace = false
dependencies = ["member_format", "member_clippy", "docker:build", "docker:up"]
dependencies = ["member_format", "member_clippy", "docker:down", "docker:build", "docker:up"]
# Start Development Workflow

View File

@@ -6,15 +6,9 @@
| --------------------------------- | ---------------------------------- | -------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| WEBHOOK_URL | `<nothing>` | FALSE | Supply this to get information regarding your server's status in a webhook or Discord notification! [Click here to learn how to get a webhook url for Discord](https://help.dashe.io/en/articles/2521940-how-to-create-a-discord-webhook-url) |
| WEBHOOK_BROADCAST_MESSAGE | CHANGE_ME | TRUE | You set this. See `odin notify --help` |
| WEBHOOK_UPDATING_MESSAGE | `Server Status: Updating` | FALSE | Set the Updating message of your server |
| WEBHOOK_UPDATE_SUCCESSFUL_MESSAGE | `Server Status: Update Successful` | FALSE | Set the Update Successful message of your server |
| WEBHOOK_UPDATE_FAILED_MESSAGE | `Server Status: Update Failed` | FALSE | Set the Update Failed message of your server |
| WEBHOOK_STARTING_MESSAGE | `Server Status: Starting` | FALSE | Set the Starting message of your server |
| WEBHOOK_START_SUCCESSFUL_MESSAGE | `Server Status: Start Successful` | FALSE | Set the Start Successful message of your server |
| WEBHOOK_START_FAILED_MESSAGE | `Server Status: Start Failed` | FALSE | Set the Start Failed message of your server |
| WEBHOOK_STOPPING_MESSAGE | `Server Status: Stopping` | FALSE | Set the Stopping message of your server |
| WEBHOOK_STOP_SUCCESSFUL_MESSAGE | `Server Status: Stop Successful` | FALSE | Set the Stop Successful message of your server |
| WEBHOOK_STOP_FAILED_MESSAGE | `Server Status: Stop Failed` | FALSE | Set the Stop Failed message of your server |
| WEBHOOK_STATUS_RUNNING | "0" | FALSE | Posts a running status to discord when a command is initialized. |
| WEBHOOK_STATUS_FAILED | "1" | FALSE | Posts a failed status to discord in the event of a failure. |
| WEBHOOK_STATUS_SUCCESSFUL | "1" | FALSE | Posts a running status to discord when the command succeeds. |
## POST Body Example
@@ -43,6 +37,26 @@
- 201 was included in case you want to stream into an endpoint for creating a resource.
- Example 1, logging actions on the server.
- Example 2, using json-server to debug webhooks.
## Discord Configs
Generates a file in the server directory called `discord.json`. There are a series of variables provided that you can use
from the templating engine. See below, note if you use `{{some_var}}` and its not provided by the table below it will show as a blank.
If any of the values turn out blank, discord might reject the post.
title: String::from(&notification.event_type.name),
description: String::from(&notification.event_message),
status: String::from(&notification.event_type.status),
timestamp: String::from(&notification.timestamp),
server_name: get_server_name(),
| Variable | Value | Example |
|----------|-------|-------------|
| `{{title}}` | Event title | `Start`
| `{{description}}` | Event Message | `Server Status: Start Successful` |
| `{{status}}` | Event Status | `Successful`
| `{{timestamp}}` | tiemstamp of event | `2021-05-30T08:16:39.294366700-07:00` |
| `{{server_name}}` | Name pulled from env or config | `Created with Valheim Docker` |
## Developing/Debugging Webhooks

View File

@@ -13,12 +13,14 @@
"@auto-it/all-contributors": "10.29.2",
"@auto-it/git-tag": "10.29.2",
"@types/node": "15.3.0",
"auto": "10.29.2"
"auto": "10.29.2",
"typescript": "^3.9.9"
},
"dependencies": {
"@auto-it/core": "^10.27.0",
"@octokit/core": "^3.4.0",
"@types/semver": "^7.3.5",
"semver": "^7.3.5",
"typescript": "^4.2.4"
"@yarnpkg/pnpify": "^3.0.0-rc.3",
"semver": "^7.3.5"
}
}

View File

@@ -27,6 +27,8 @@ path = "lib.rs"
[dependencies]
handlebars = "3"
dotenv = "0.15.0"
log = "0.4.14"
clap = { version = "3.0.0-beta.2", features = ["yaml"] }
which = "4.1.0"

View File

@@ -135,14 +135,21 @@ subcommands:
- MESSAGE:
about: Message to send to the webhook.
required: true
index: 1
- TITLE:
long: title
required: false
takes_value: true
about: >
Title of the message block (required by discord & generic webhook, automatically supplied, default: "Broadcast")
- webhook_url:
long: webhook
value_name: WEBHOOK_URL
takes_value: true
about: >
Sets the webhook to send a notification to, (Can be set with ENV variable WEBHOOK_URL)
takes_value: true
- mod:install:
about: >
Installs a mod from a given source by downloading the zip file and then extracting it.

View File

@@ -1,4 +1,5 @@
use crate::files::config::{config_file, write_config};
use crate::files::discord::{discord_file, write_discord};
use clap::ArgMatches;
use log::debug;
@@ -7,4 +8,8 @@ pub fn invoke(args: &ArgMatches) {
let config = config_file();
debug!("Writing config file...");
write_config(config, args);
debug!("Pulling Discord config file...");
let discord = discord_file();
debug!("Writing Discord config file...");
write_discord(discord);
}

View File

@@ -1,14 +1,34 @@
use crate::notifications::enums::notification_event::NotificationEvent;
use crate::utils::parse_arg_variable;
use crate::notifications::enums::notification_event::{EventType, NotificationEvent};
use crate::notifications::NotificationMessage;
use crate::utils::{get_server_name, parse_arg_variable};
use chrono::Local;
use clap::ArgMatches;
use log::{error, info};
pub fn invoke(args: &ArgMatches) {
let message = parse_arg_variable(&args, "MESSAGE", "Test Notification");
let webhook_url = parse_arg_variable(&args, "webhook_url", "");
let name = String::from(&args.value_of("TITLE").unwrap_or("Broadcast").to_string());
let event_message = String::from(
&args
.value_of("MESSAGE")
.unwrap_or("Test Notification")
.to_string(),
);
let webhook_url = parse_arg_variable(&args, "WEBHOOK_URL", "");
let notification = NotificationMessage {
author: get_server_name(),
event_type: EventType {
name,
status: "Triggered".to_string(),
},
event_message,
timestamp: Local::now().to_rfc3339(),
};
if !webhook_url.is_empty() {
info!("Sending Broadcast: {}", message);
NotificationEvent::Broadcast.send_custom_notification(webhook_url.as_str(), message.as_str())
info!(
"Sending Broadcast: {}",
serde_json::to_string_pretty(&notification).unwrap()
);
NotificationEvent::Broadcast.send_custom_notification(webhook_url.as_str(), &notification)
} else {
error!("Failed to send notification! Webhook url not provided!")
}

View File

@@ -1,28 +1,31 @@
use crate::files::config::load_config;
use crate::server;
use crate::{
files::config::load_config,
notifications::enums::{event_status::EventStatus, notification_event::NotificationEvent},
server,
};
use clap::ArgMatches;
use log::{debug, error, info};
use std::process::exit;
pub fn invoke(args: &ArgMatches) {
info!("Setting up start scripts...");
debug!("Loading config file...");
info!(target: "commands_start", "Setting up start scripts...");
NotificationEvent::Start(EventStatus::Running).send_notification();
debug!(target: "commands_start", "Loading config file...");
let config = load_config();
let dry_run: bool = args.is_present("dry_run");
debug!("Dry run condition: {}", dry_run);
info!("Looking for burial mounds...");
debug!(target: "commands_start", "Dry run condition: {}", dry_run);
info!(target: "commands_start", "Looking for burial mounds...");
if !dry_run {
match server::start_daemonized(config) {
Ok(_) => info!("Success, daemonized"),
Ok(_) => info!(target: "commands_start", "Success, daemonized"),
Err(e) => {
error!("Error: {}", e);
error!(target: "commands_start", "Error: {}", e);
exit(1);
}
}
} else {
info!(
target: "commands_start",
"This command would have launched\n{} -nographics -batchmode -port {} -name {} -world {} -password {} -public {}",
&config.command,
&config.port,

View File

@@ -3,9 +3,12 @@ use log::{error, info};
use std::process::exit;
use crate::notifications::enums::event_status::EventStatus;
use crate::notifications::enums::notification_event::NotificationEvent;
use crate::{constants, server, utils::get_working_dir};
pub fn invoke(args: &ArgMatches) {
NotificationEvent::Stop(EventStatus::Running).send_notification();
info!("Stopping server {}", get_working_dir());
if args.is_present("dry_run") {
info!("This command would have run: ");
@@ -17,4 +20,5 @@ pub fn invoke(args: &ArgMatches) {
}
server::blocking_shutdown();
}
NotificationEvent::Stop(EventStatus::Successful).send_notification();
}

View File

@@ -1,16 +1,24 @@
use crate::constants;
use crate::files::ValheimArguments;
use crate::files::{FileManager, ManagedFile};
use crate::utils::environment::fetch_var;
use crate::utils::{get_working_dir, parse_arg_variable};
use clap::ArgMatches;
use log::{debug, error};
use std::fs;
use std::path::PathBuf;
use std::process::exit;
use serde::{Deserialize, Serialize};
use std::{fs, path::PathBuf, process::exit};
const ODIN_CONFIG_FILE_VAR: &str = "ODIN_CONFIG_FILE";
#[derive(Deserialize, Serialize)]
pub struct ValheimArguments {
pub(crate) port: String,
pub(crate) name: String,
pub(crate) world: String,
pub(crate) public: String,
pub(crate) password: String,
pub(crate) command: String,
}
pub fn load_config() -> ValheimArguments {
let file = config_file();
let config = read_config(file);
@@ -63,7 +71,7 @@ pub fn write_config(config: ManagedFile, args: &ArgMatches) -> bool {
password: parse_arg_variable(args, "password", ""),
command,
};
let content_to_write = serde_json::to_string(content).unwrap();
let content_to_write = serde_json::to_string_pretty(content).unwrap();
debug!(
"Writing config content: \n{}",
serde_json::to_string_pretty(content).unwrap()

72
src/odin/files/discord.rs Normal file
View File

@@ -0,0 +1,72 @@
use crate::{
files::{FileManager, ManagedFile},
notifications::discord::{body_template, DiscordWebHookBody},
utils::{environment::fetch_var, path_exists},
};
use log::debug;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
const ODIN_DISCORD_FILE_VAR: &str = "ODIN_DISCORD_FILE";
#[derive(Deserialize, Serialize)]
pub struct DiscordConfigEvents {
broadcast: DiscordWebHookBody,
start: DiscordWebHookBody,
stop: DiscordWebHookBody,
update: DiscordWebHookBody,
}
#[derive(Deserialize, Serialize)]
pub struct DiscordConfig {
pub(crate) events: HashMap<String, DiscordWebHookBody>,
}
fn basic_template() -> DiscordConfig {
let events: HashMap<String, DiscordWebHookBody> = HashMap::new();
let mut config = DiscordConfig { events };
config
.events
.insert(String::from("broadcast"), body_template());
config.events.insert(String::from("start"), body_template());
config.events.insert(String::from("stop"), body_template());
config
.events
.insert(String::from("update"), body_template());
config
}
pub fn load_discord() -> DiscordConfig {
let file = discord_file();
read_discord(file)
}
pub fn discord_file() -> ManagedFile {
let name = fetch_var(ODIN_DISCORD_FILE_VAR, "discord.json");
debug!("Config file set to: {}", name);
ManagedFile { name }
}
pub fn read_discord(discord: ManagedFile) -> DiscordConfig {
let content = discord.read();
if content.is_empty() {
basic_template()
} else {
serde_json::from_str(content.as_str()).unwrap()
}
}
pub fn write_discord(discord: ManagedFile) -> bool {
let notification = basic_template();
let content_to_write = serde_json::to_string_pretty(&notification).unwrap();
debug!(
"Writing discord config: \n{}",
serde_json::to_string_pretty(&notification).unwrap()
);
if path_exists(&discord.path()) {
false
} else {
discord.write(content_to_write)
}
}

View File

@@ -1,25 +1,15 @@
pub mod config;
pub mod discord;
use crate::executable::create_execution;
use crate::utils::get_working_dir;
use log::{error, info};
use serde::{Deserialize, Serialize};
use log::{debug, error, info};
use std::fs;
use std::fs::{remove_file, File};
use std::fs::{create_dir_all, remove_file, File};
use std::io::Write;
use std::path::Path;
use std::process::exit;
#[derive(Deserialize, Serialize)]
pub struct ValheimArguments {
pub(crate) port: String,
pub(crate) name: String,
pub(crate) world: String,
pub(crate) public: String,
pub(crate) password: String,
pub(crate) command: String,
}
pub fn create_file(path: &str) -> File {
let output_path = Path::new(path);
match File::create(output_path) {
@@ -56,6 +46,8 @@ pub trait FileManager {
}
}
fn write(&self, content: String) -> bool {
debug!("Writing file path: {}", self.path().as_str());
create_dir_all(Path::new(self.path().as_str()).parent().unwrap()).unwrap();
let mut file = create_file(self.path().as_str());
match file.write_all(content.as_bytes()) {
Ok(_) => {
@@ -89,7 +81,8 @@ pub struct ManagedFile {
impl FileManager for ManagedFile {
fn path(&self) -> String {
let supplied_path = Path::new(self.name.as_str());
if supplied_path.exists() {
debug!("Managed File: Path - {}", self.name.as_str());
if supplied_path.parent().unwrap().exists() {
supplied_path.to_str().unwrap().to_string()
} else {
format!("{}/{}", get_working_dir(), self.name)

View File

@@ -1,4 +1,5 @@
use clap::{load_yaml, App, AppSettings};
use dotenv::dotenv;
use log::debug;
use crate::executable::handle_exit_status;
@@ -16,10 +17,8 @@ pub mod server;
mod steamcmd;
pub mod utils;
use crate::notifications::enums::event_status::EventStatus;
use crate::notifications::enums::notification_event::NotificationEvent;
fn main() {
dotenv().ok();
// The YAML file is found relative to the current file, similar to how modules are found
let yaml = load_yaml!("cli.yaml");
let app = App::from(yaml)
@@ -39,20 +38,12 @@ fn main() {
};
match matches.subcommand().expect("Subcommand is required") {
("configure", sub_m) => commands::configure::invoke(sub_m),
("install", _) => {
let result = commands::install::invoke(constants::GAME_ID);
handle_exit_status(result, "Successfully installed Valheim!".to_string())
}
("start", sub_m) => {
NotificationEvent::Start(EventStatus::Running).send_notification();
commands::start::invoke(sub_m);
NotificationEvent::Start(EventStatus::Successful).send_notification();
}
("stop", sub_m) => {
NotificationEvent::Stop(EventStatus::Running).send_notification();
commands::stop::invoke(sub_m);
NotificationEvent::Stop(EventStatus::Successful).send_notification();
}
("install", _) => handle_exit_status(
commands::install::invoke(constants::GAME_ID),
"Successfully installed Valheim!".to_string(),
),
("start", sub_m) => commands::start::invoke(sub_m),
("stop", sub_m) => commands::stop::invoke(sub_m),
("backup", sub_m) => commands::backup::invoke(sub_m),
("notify", sub_m) => commands::notify::invoke(sub_m),
("update", sub_m) => commands::update::invoke(sub_m),

View File

@@ -1,13 +1,10 @@
use crate::files::{
config::{config_file, read_config},
FileManager,
};
use crate::notifications::EventStatus;
use crate::{files::discord::load_discord, notifications::EventStatus};
use crate::notifications::NotificationMessage;
use inflections::case::to_title_case;
use crate::utils::get_server_name;
use handlebars::Handlebars;
use log::debug;
use serde::{Deserialize, Serialize};
use std::{env, str::FromStr};
#[derive(Debug)]
enum Color {
@@ -36,54 +33,76 @@ pub fn is_discord_webhook(webhook_url: &str) -> bool {
#[derive(Deserialize, Serialize)]
pub struct DiscordWebHookEmbed {
title: String,
description: String,
color: i32,
pub(crate) title: String,
pub(crate) description: String,
pub(crate) color: i32,
}
#[derive(Deserialize, Serialize)]
pub struct DiscordWebHookBody {
content: String,
embeds: Vec<DiscordWebHookEmbed>,
pub(crate) content: String,
pub(crate) embeds: Vec<DiscordWebHookEmbed>,
}
impl DiscordWebHookBody {
pub fn new(event: &NotificationMessage) -> Self {
// Some contexts currently don't get passed in $NAME so fall back to reading from the config
// if it's missing or invalid UTF-8
let server_name = match env::var("NAME") {
Ok(name) if !name.is_empty() => name,
_ => {
let config_file = config_file();
debug!(
"Empty or missing $NAME. Falling back to reading from {}",
config_file.path()
);
let config = read_config(config_file);
config.name
}
};
let status = &event.event_type.status;
let event_status = EventStatus::from_str(status).unwrap_or(EventStatus::Failed);
let color: i32 = Color::from(event_status) as i32;
let payload = DiscordWebHookBody {
content: to_title_case(format!("Notification From: {}", server_name).as_str()),
embeds: vec![DiscordWebHookEmbed {
title: String::from(&event.event_type.name),
description: String::from(&event.event_message),
color,
}],
};
debug!(
"Discord Payload: {}",
serde_json::to_string(&payload).unwrap()
);
payload
pub fn body_template() -> DiscordWebHookBody {
DiscordWebHookBody {
content: "Notification: {{server_name}}".to_string(),
embeds: vec![DiscordWebHookEmbed {
title: "{{title}}".to_string(),
description: "{{description}}".to_string(),
color: 16388413,
}],
}
}
#[derive(Deserialize, Serialize)]
pub struct IncomingNotification {
title: String,
description: String,
status: String,
timestamp: String,
server_name: String,
}
impl From<&NotificationMessage> for IncomingNotification {
fn from(notification: &NotificationMessage) -> IncomingNotification {
IncomingNotification {
title: String::from(&notification.event_type.name),
description: String::from(&notification.event_message),
status: String::from(&notification.event_type.status),
timestamp: String::from(&notification.timestamp),
server_name: get_server_name(),
}
}
}
impl From<&NotificationMessage> for DiscordWebHookBody {
fn from(event: &NotificationMessage) -> Self {
Self::new(event)
let discord_file = load_discord();
let mut handlebars = Handlebars::new();
let default_event = body_template();
let discord_event = &discord_file
.events
.get(&event.event_type.name.as_str().to_lowercase())
.unwrap_or(&default_event);
let source = serde_json::to_string(&discord_event).unwrap();
debug!("Discord Notification Template: {}", &source);
handlebars
.register_template_string("notification", source)
.unwrap();
let values = IncomingNotification::from(event);
debug!(
"Discord Notification Values: {}",
serde_json::to_string(&values).unwrap()
);
let rendered = match handlebars.render("notification", &values) {
Ok(value) => {
debug!("Discord Notification Parsed: \n{}", value);
value
}
Err(msg) => panic!("{}", msg.to_string()),
};
serde_json::from_str(&rendered).unwrap()
}
}

View File

@@ -1,28 +1,31 @@
use std::env;
use crate::utils::get_server_name;
use crate::{
notifications::{
discord::{is_discord_webhook, DiscordWebHookBody},
enums::{
event_status::EventStatus,
notification_event::{EventType, NotificationEvent},
},
},
utils::environment::fetch_var,
};
use chrono::prelude::*;
use inflections::case::{to_constant_case, to_title_case};
use inflections::case::to_title_case;
use log::{debug, error, info, warn};
use reqwest::blocking::RequestBuilder;
use reqwest::StatusCode;
use reqwest::{blocking::RequestBuilder, StatusCode, Url};
use serde::{Deserialize, Serialize};
use crate::notifications::discord::{is_discord_webhook, DiscordWebHookBody};
use crate::notifications::enums::event_status::EventStatus;
use crate::notifications::enums::notification_event::{EventType, NotificationEvent};
use crate::utils::environment::fetch_var;
use reqwest::Url;
mod discord;
pub mod discord;
pub mod enums;
pub const WEBHOOK_URL: &str = "WEBHOOK_URL";
#[derive(Deserialize, Serialize)]
pub struct NotificationMessage {
event_type: EventType,
event_message: String,
timestamp: String,
pub(crate) author: String,
pub(crate) event_type: EventType,
pub(crate) event_message: String,
pub(crate) timestamp: String,
}
fn fetch_webhook_url() -> String {
@@ -48,17 +51,10 @@ fn is_webhook_enabled() -> bool {
false
}
fn parse_webhook_env_var(event_type: EventType) -> String {
if event_type.name.to_lowercase().eq("broadcast") {
to_constant_case(format!("WEBHOOK_{}_MESSAGE", event_type.name).as_str())
} else {
to_constant_case(format!("WEBHOOK_{}_{}_MESSAGE", event_type.name, event_type.status).as_str())
}
}
impl NotificationEvent {
fn create_notification_message(&self) -> NotificationMessage {
NotificationMessage {
author: format!("Notification: {}", get_server_name()),
event_type: self.to_event_type(),
event_message: format!(
"Server Status: {}",
@@ -94,15 +90,16 @@ impl NotificationEvent {
debug!("Webhook URL: {}", webhook_url);
client.post(webhook_url)
}
pub fn send_custom_notification(&self, webhook_url: &str, message: &str) {
let mut notification = self.create_notification_message();
notification.event_message = message.to_string();
pub fn send_custom_notification(&self, webhook_url: &str, notification: &NotificationMessage) {
debug!("Webhook enabled, sending notification {}", self.to_string());
debug!(
"Event Received: {}",
serde_json::to_string_pretty(&notification).unwrap()
);
let mut req = self.build_request(webhook_url);
req = if is_discord_webhook(webhook_url) {
info!("Sending discord notification <3");
req.json(&DiscordWebHookBody::from(&notification))
req.json(&DiscordWebHookBody::from(notification))
} else {
debug!(
"Webhook Payload: {}",
@@ -113,12 +110,17 @@ impl NotificationEvent {
self.handle_request(req);
}
pub fn send_notification(&self) {
debug!("Checking for notification information...");
if is_webhook_enabled() {
debug!("Webhook found! Starting notification process...");
let event = self.create_notification_message();
let env_var_name = parse_webhook_env_var(event.event_type);
let notification_message = env::var(env_var_name).unwrap_or(event.event_message);
self.send_custom_notification(fetch_webhook_url().as_str(), notification_message.as_str());
let enabled_var = format!("WEBHOOK_STATUS_{}", event.event_type.status).to_uppercase();
debug!("Checking ENV Var: {}", &enabled_var);
if fetch_var(&enabled_var, "0").eq("1") {
self.send_custom_notification(&fetch_webhook_url(), &event);
} else {
debug!("Skipping notification, {} is set to 0", enabled_var);
}
} else {
debug!("Skipping notification, no webhook supplied!");
}

View File

@@ -4,12 +4,14 @@ use log::{debug, error, info};
use std::{io, process::Child};
use crate::mods::bepinex::BepInExEnvironment;
use crate::notifications::enums::event_status::EventStatus;
use crate::notifications::enums::notification_event::NotificationEvent;
use crate::utils::common_paths::{game_directory, saves_directory};
use crate::utils::environment::fetch_var;
use crate::{
constants,
executable::create_execution,
files::{create_file, ValheimArguments},
files::{config::ValheimArguments, create_file},
messages,
utils::environment,
};
@@ -29,13 +31,14 @@ pub fn start_daemonized(config: ValheimArguments) -> Result<CommandResult, Daemo
.exit_action(|| {
let bepinex_env = BepInExEnvironment::new();
if bepinex_env.is_installed() {
info!("Server has been started with BepInEx! Keep in mind this may cause errors!!");
info!(target: "server_startup","Server has been started with BepInEx! Keep in mind this may cause errors!!");
messages::modding_disclaimer();
debug!("{:#?}", bepinex_env);
debug!(target: "server_startup","{:#?}", bepinex_env);
}
info!("Server has been started and Daemonized. It should be online shortly!");
info!("Keep an eye out for 'Game server connected' in the log!");
info!("(this indicates its online without any errors.)")
info!(target: "server_startup","Server has been started and Daemonized. It should be online shortly!");
info!(target: "server_startup","Keep an eye out for 'Game server connected' in the log!");
NotificationEvent::Start(EventStatus::Successful).send_notification();
info!(target: "server_startup","(this indicates its online without any errors.)")
})
.privileged_action(move || start(&config))
.start()
@@ -43,12 +46,12 @@ pub fn start_daemonized(config: ValheimArguments) -> Result<CommandResult, Daemo
pub fn start(config: &ValheimArguments) -> CommandResult {
let mut command = create_execution(&config.command);
info!("--------------------------------------------------------------------------------------------------------------");
info!(target: "server_startup","--------------------------------------------------------------------------------------------------------------");
let ld_library_path_value = environment::fetch_multiple_var(
constants::LD_LIBRARY_PATH_VAR,
format!("{}/linux64", game_directory()).as_str(),
);
debug!("Setting up base command");
debug!(target: "server_startup","Setting up base command");
let mut base_command = command
// Extra launch arguments
.arg(fetch_var(
@@ -75,27 +78,27 @@ pub fn start(config: &ValheimArguments) -> CommandResult {
// If no password env variable
if !is_public && !is_vanilla && no_password {
debug!("No password found, skipping password flag.")
debug!(target: "server_startup","No password found, skipping password flag.")
} else if no_password && (is_public || is_vanilla) {
error!("Cannot run you server with no password! PUBLIC must be 0 and cannot be a Vanilla type server.");
exit(1)
} else {
debug!("Password found, adding password flag.");
debug!(target: "server_startup","Password found, adding password flag.");
base_command = base_command.args(&["-password", &config.password.as_str()]);
}
// Tack on save dir at the end.
base_command = base_command.args(&["-savedir", &saves_directory()]);
info!("Executable: {}", &config.command);
info!("Launching Command...");
info!(target: "server_startup","Executable: {}", &config.command);
info!(target: "server_startup","Launching Command...");
let bepinex_env = BepInExEnvironment::new();
if bepinex_env.is_installed() {
info!("BepInEx detected! Switching to run with BepInEx...");
debug!("BepInEx Environment: \n{:#?}", bepinex_env);
info!(target: "server_startup","BepInEx detected! Switching to run with BepInEx...");
debug!(target: "server_startup","BepInEx Environment: \n{:#?}", bepinex_env);
bepinex_env.launch(base_command)
} else {
info!("Everything looks good! Running normally!");
info!(target: "server_startup","Everything looks good! Running normally!");
base_command
.env(constants::LD_LIBRARY_PATH_VAR, ld_library_path_value)
.spawn()

View File

@@ -7,6 +7,8 @@ use std::env;
use std::path::Path;
use crate::constants;
use crate::files::config::{config_file, read_config};
use crate::files::FileManager;
use reqwest::Url;
pub fn get_working_dir() -> String {
@@ -16,6 +18,23 @@ pub fn get_working_dir() -> String {
)
}
pub fn get_server_name() -> String {
// Some contexts currently don't get passed in $NAME so fall back to reading from the config
// if it's missing or invalid UTF-8
match env::var("NAME") {
Ok(name) if !name.is_empty() => name,
_ => {
let config_file = config_file();
debug!(
"Empty or missing $NAME. Falling back to reading from {}",
config_file.path()
);
let config = read_config(config_file);
config.name
}
}
}
pub fn parse_arg_variable(args: &ArgMatches, name: &str, default: &str) -> String {
debug!("Checking env for {}", name);
if let Ok(env_val) = env::var(name.to_uppercase()) {

View File

@@ -5,9 +5,11 @@
export NAME="$(sed -e 's/^"//' -e 's/"$//' <<<"$NAME")"
export WORLD="$(sed -e 's/^"//' -e 's/"$//' <<<"$WORLD")"
export PASSWORD="$(sed -e 's/^"//' -e 's/"$//' <<<"$PASSWORD")"
export ODIN_CONFIG_FILE="${ODIN_CONFIG_FILE:-"${GAME_LOCATION}/config.json"}"
export ODIN_DISCORD_FILE="${ODIN_DISCORD_FILE:-"${GAME_LOCATION}/discord.json"}"
# Set up timezone
ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ >/etc/timezone
ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" >/etc/timezone
# shellcheck disable=SC2039
if [ "${EUID}" -ne 0 ]; then
@@ -41,7 +43,7 @@ check_version() {
clean_up() {
echo "Safely shutting down..." >>/home/steam/output.log
if [[ -n $CRON_PID ]]; then
kill $CRON_PID
kill "$CRON_PID"
fi
}
@@ -56,6 +58,7 @@ setup_cron() {
PRESET_ENV="
DEBUG_MODE=${DEBUG_MODE:=0}
ODIN_CONFIG_FILE=${ODIN_CONFIG_FILE}
ODIN_DISCORD_FILE=${ODIN_DISCORD_FILE}
ODIN_WORKING_DIR=${ODIN_WORKING_DIR}
SAVE_LOCATION=${SAVE_LOCATION}
MODS_LOCATION=${MODS_LOCATION}
@@ -66,7 +69,10 @@ setup_cron() {
PORT=${PORT}
PUBLIC=${PUBLIC}
UPDATE_ON_STARTUP=${UPDATE_ON_STARTUP}
WEBHOOK_URL=${WEBHOOK_URL:-""}
WEBHOOK_STATUS_SUCCESSFUL=${WEBHOOK_STATUS_SUCCESSFUL:-"1"}
WEBHOOK_STATUS_FAILED=${WEBHOOK_STATUS_FAILED:-"1"}
AUTO_UPDATE=${AUTO_UPDATE}
AUTO_UPDATE_PAUSE_WITH_PLAYERS=${AUTO_UPDATE_PAUSE_WITH_PLAYERS}
@@ -81,16 +87,17 @@ setup_cron() {
CRON_ENV="${PRESET_ENV} ${4}"
CRON_ENV="$(echo "${CRON_ENV}" | tr '\n' " " )"
LOG_LOCATION="/home/steam/valheim/logs/$CRON_NAME.out"
[ -f "$LOG_LOCATION" ] && rm $LOG_LOCATION
mkdir -p "/home/steam/valheim/logs"
[ -f "$LOG_LOCATION" ] && rm "$LOG_LOCATION"
printf "%s %s /usr/sbin/gosu steam /bin/bash %s >> %s 2>&1" \
"${CRON_SCHEDULE}" \
"${CRON_ENV}" \
"${SCRIPT_PATH}" \
"${LOG_LOCATION}" \
>/etc/cron.d/${CRON_NAME}
echo "" >>/etc/cron.d/${CRON_NAME}
> "/etc/cron.d/${CRON_NAME}"
echo "" >> "/etc/cron.d/${CRON_NAME}"
# Give execution rights on the cron job
chmod 0644 /etc/cron.d/${CRON_NAME}
chmod 0644 "/etc/cron.d/${CRON_NAME}"
set +f
}
@@ -100,18 +107,18 @@ setup_filesystem() {
STEAM_GID=${PGID:=1000}
# Save Files
mkdir -p ${SAVE_LOCATION}
mkdir -p "${SAVE_LOCATION}"
# Mod staging location
mkdir -p ${MODS_LOCATION}
mkdir -p "${MODS_LOCATION}"
# Backups
mkdir -p ${BACKUP_LOCATION}
mkdir -p "${BACKUP_LOCATION}"
# Valheim Server
mkdir -p ${GAME_LOCATION}
mkdir -p ${GAME_LOCATION}/logs
chown -R ${STEAM_UID}:${STEAM_GID} ${GAME_LOCATION}
mkdir -p "${GAME_LOCATION}"
mkdir -p "${GAME_LOCATION}/logs"
chown -R ${STEAM_UID}:${STEAM_GID} "${GAME_LOCATION}"
cp /home/steam/steamcmd/linux64/steamclient.so /home/steam/valheim

View File

@@ -28,10 +28,10 @@ cleanup() {
/bin/bash /home/steam/scripts/auto_backup.sh "shutdown"
fi
if [[ -n $TAIL_PID ]]; then
kill $TAIL_PID
kill "$TAIL_PID"
fi
if [[ -n $ODIN_HTTP_SERVER_PID ]]; then
kill $ODIN_HTTP_SERVER_PID
kill "$ODIN_HTTP_SERVER_PID"
fi
}
@@ -141,7 +141,10 @@ log "Herding Graydwarfs..."
log_names=("valheim_server.log" "valheim_server.err" "output.log" "auto-update.out" "auto-backup.out")
log_files=("${log_names[@]/#/\/home\/steam\/valheim\/logs/}")
touch "${log_files[@]}" # Destroy logs on start up, this can be changed later to roll logs or archive them.
# shellcheck disable=SC2086
tail -F ${log_files[*]} &
export TAIL_PID=$!
# Waiting for logs.
wait $TAIL_PID

5050
yarn.lock

File diff suppressed because it is too large Load Diff