92 Commits
1.0.2 ... main

Author SHA1 Message Date
gyurix
a46e1d69e2 Bump version to 1.1.5 in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-09-02 14:07:37 +02:00
gyurix
62a9bdc65d Update install script to replace local-proxy with local-loadbalancer
All checks were successful
continuous-integration/drone/push Build is passing
2025-09-02 12:03:08 +02:00
gyurix
4361299a0e Update system list in entrypoint script to include local-loadbalancer.json
All checks were successful
continuous-integration/drone/push Build is passing
2025-09-02 11:56:06 +02:00
gyurix
982b00acff Set working directory for scheduler entrypoint and upgrade functions
All checks were successful
continuous-integration/drone/push Build is passing
2025-09-02 11:49:17 +02:00
gyurix
41c7c73abf Refactor upgrade function to ensure .json extension is appended correctly
All checks were successful
continuous-integration/drone/push Build is passing
2025-09-02 11:40:50 +02:00
gyurix
2bd803c4b5 Update upgrade function to append .json extension if missing
All checks were successful
continuous-integration/drone/push Build is passing
2025-09-02 09:25:55 +02:00
gyurix
85dd5aa4ad Refactor backup service parameters and add default SSH host value
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-26 12:32:36 +02:00
gyurix
81dd98b952 Add default values for SSH parameters in backup service functions
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-26 09:39:33 +02:00
gyurix
7125f17b22 Fix JSON formatting and add TYPE field in backup configuration
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-26 08:05:40 +02:00
bd0753b4db json b64 debug removed
All checks were successful
continuous-integration/drone/tag Build is passing
2025-08-14 17:44:16 +00:00
gyurix
779f28a5f7 Clean temporary files in shared volume during task execution
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-13 09:22:38 +02:00
gyurix
9878fa9ce3 Bump version to 1.1.3 in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-09 09:54:36 +02:00
gyurix
aeb679a898 Add condition to check for upgrade-framework.json before starting webserver
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-09 09:39:59 +02:00
e40b900750 Update scripts/scheduler/entrypoint.sh
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-08 09:43:21 +00:00
ae83abef53 upgrade STATUS
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 09:35:12 +00:00
0e05eaa531 version fix
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-08 09:01:06 +00:00
gyurix
8c345020f7 Refactor debug mode handling in entrypoint script for improved clarity
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:48:26 +02:00
gyurix
10a6bc704c Enhance entrypoint script with debug mode and Docker registry environment variables
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:45:46 +02:00
gyurix
ba921a53a2 Log the Docker run command in the entrypoint script for debugging purposes
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:38:25 +02:00
gyurix
7bb96a1863 Add support for Docker registry credentials in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:27:39 +02:00
gyurix
d0f65b8841 Add Docker registry login functionality to entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:22:57 +02:00
gyurix
72e14d7199 Check for the existence of JSON file before creating it in add_json_target function
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:08:55 +02:00
gyurix
566aed3752 Prevent overwriting existing JSON files in add_json_target function
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 16:06:41 +02:00
3ab1b3ff54 do not create upgrade.json
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 13:49:40 +00:00
dafd8f345a git pull log
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 13:35:52 +00:00
gyurix
56e2f6f05d Silence output of git pull in entrypoint script to reduce log clutter
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 15:28:26 +02:00
gyurix
e343bc2cb5 Enhance entrypoint script to log completion of git pull operation
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 15:07:53 +02:00
gyurix
6d267dfd04 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-07 13:21:42 +02:00
gyurix
b049833844 Add logging for git pull operation in entrypoint script 2025-08-07 13:21:40 +02:00
fbf15c52b5 Update scripts/scheduler/entrypoint.sh
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-08-07 08:21:45 +00:00
3e1fee6022 create repo json
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-06 14:05:02 +00:00
gyurix
27c5be7964 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-06 15:57:39 +02:00
gyurix
7b2ac2e56a Create backup server secrets directory and update JSON output path 2025-08-06 15:57:37 +02:00
9407f7caaf Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-06 10:11:27 +00:00
12cfe301e3 VPN_STATUS swap 2025-08-06 10:10:47 +00:00
gyurix
c2a1fbd9d8 Set default SSH port and user credentials in backup client function
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-06 10:49:02 +02:00
gyurix
d43536b67b Enhance backup server configuration by adding default SSH credentials and updating directory structure for backups
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-06 10:18:48 +02:00
gyurix
da5521295d Refactor entrypoint script to set installed version number and add debug mode handling for input directory
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-31 11:45:19 +02:00
gyurix
e39e1033c6 Add versioning output to entrypoint script and simplify JSON handling
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-29 14:25:48 +02:00
gyurix
ec9d4c6e4f Add backup server configuration and versioning to entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-29 13:51:05 +02:00
6876c9351b Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-24 08:35:27 +00:00
4df278bb0b save_vpn changes 2025-07-24 08:35:10 +00:00
gyurix
550661c205 Add debug mode environment variable handling in upgrade_scheduler function
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-24 10:18:54 +02:00
gyurix
131982b37f Refactor upgrade_scheduler function to streamline debug mode handling and simplify Docker start command 2025-07-24 10:17:23 +02:00
gyurix
e6e772055e Pass task name to add_json_target function for improved JSON file naming
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 18:09:02 +02:00
gyurix
2af0300c5a Add function to create JSON target file for task output in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 17:36:46 +02:00
gyurix
5b2e3436f5 Enhance upgrade function to handle webserver service separately and improve logging
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 16:59:46 +02:00
gyurix
274e9456ea Enhance framework scheduler upgrade process with temporary file cleanup and reduced sleep duration
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 16:30:49 +02:00
gyurix
c1717a06e7 Add debug messages for framework scheduler upgrade and container removal
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 16:26:09 +02:00
gyurix
e28d6e47a8 Update entrypoint script to specify full path for Docker pull command and add debug message for framework scheduler upgrade
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 16:13:01 +02:00
gyurix
2e0129402d Refactor upgrade function to simplify web-installer handling and streamline service start process
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 15:57:52 +02:00
gyurix
3c1e396b19 Update framework scheduler name format and append random suffix in upgrade function
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 13:12:33 +02:00
gyurix
96c9dddf89 Update framework scheduler name and streamline upgrade process in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-23 10:10:33 +02:00
62f9ff63a8 deployment apps tree space fix
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-22 15:08:17 +00:00
gyurix
0102fc9241 Add debug mode support to Docker run command in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-22 14:45:55 +02:00
gyurix
5601eaf8b7 Refactor Docker start command in entrypoint script for cleaner execution
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-22 14:42:40 +02:00
gyurix
33d154eccc Refactor entrypoint script to improve error handling and restore firewall rule startup logic
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-22 13:29:35 +02:00
gyurix
77079a019c Enhance entrypoint script with backup service functions and debug logging
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-22 11:58:15 +02:00
gyurix
adb579572c Add debug logging and start service execution in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-22 11:46:13 +02:00
gyurix
b535f52493 Update entrypoint script to modify JSON configuration for domain certificates
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-22 11:42:48 +02:00
gyurix
33014294b0 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-21 14:01:01 +02:00
gyurix
b9eab36585 Implement backup service management and network scanning functionality in entrypoint and backup challenge scripts 2025-07-21 14:00:56 +02:00
hael
279c886c07 Update scripts/scheduler/entrypoint.sh
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-07-21 10:41:29 +00:00
hael
415d65ac1c Update scripts/scheduler/entrypoint.sh
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-21 08:35:09 +00:00
gyurix
6d00aefb21 Initialize JSON_TARGET variable for deployment edit action in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
2025-06-03 15:41:43 +02:00
gyurix
2c782808ed Refactor deployment logic to stop service before editing in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-29 12:14:02 +02:00
gyurix
ba8af23f42 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-29 12:00:27 +02:00
gyurix
c5da839c5b Add support for editing deployments in entrypoint script 2025-05-29 12:00:25 +02:00
a575bcbf46 app icon
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-29 09:43:38 +00:00
gyurix
06658b59d3 Remove installation of empty file and simplify deployment structure in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-29 11:22:57 +02:00
2f914cf8d9 app icon
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-28 16:13:35 +00:00
gyurix
b362f2e37f Filter out SHARED volumes during cleanup in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-26 13:23:42 +02:00
gyurix
8eb3d1eef1 Filter out USER and SYSTEM volumes during cleanup in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-26 13:17:32 +02:00
gyurix
2b91706d86 Remove redundant service stop command and add cleanup for environment files in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-26 12:42:43 +02:00
gyurix
933d182244 Reorder upgrade calls in entrypoint script for framework and web-installer
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-21 11:07:50 +02:00
153249211a Update Dockerfile
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-21 09:07:24 +00:00
c5765ca952 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:25:48 +00:00
2bcf430dfd upgrade debug 2025-05-15 13:25:02 +00:00
gyurix
bc7d30ea59 Reorder service stop command in entrypoint script for clarity during removal process
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-15 14:43:02 +02:00
gyurix
e23001223c Refactor service removal process in entrypoint script to streamline deletion of directories, files, and Docker volumes
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 14:06:09 +02:00
gyurix
4a7a854f6f Comment out service file removal in entrypoint script
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:54:12 +02:00
gyurix
5804346e42 Fix volume destination filtering in removal process
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:51:31 +02:00
gyurix
43fcc62014 Filter destinations by service name in removal process
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:28:27 +02:00
gyurix
a9ba3698bd Enhance entrypoint script to delete both volume destinations and Docker volumes during service removal
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:20:31 +02:00
gyurix
8e3a28334e Remove temporary firewall and domain files during service removal
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 13:01:30 +02:00
f808a394aa uninsall fix
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 11:00:15 +00:00
gyurix
97398388d6 Merge branch 'main' of https://git.format.hu/safebox/framework-scheduler
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 11:24:08 +02:00
gyurix
0839a78d41 Enhance entrypoint script to clean up environment files, volume destinations, firewall rules, and domains during service removal 2025-05-15 11:24:06 +02:00
43b529d2d0 PID
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 08:46:33 +00:00
81cc2b14ab upgrade check_pid
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-15 08:25:44 +00:00
gyurix
42c3275e19 Improve git clone and pull commands with timeout settings; update JSON_TARGET handling in task execution
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2025-05-13 13:12:16 +02:00
283b42bce1 rename letsencrypt task
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-07 16:16:40 +00:00
4 changed files with 677 additions and 103 deletions

View File

@@ -20,4 +20,4 @@ COPY scripts/scheduler/*.sh /scripts/
RUN find ./scripts -name "*.sh" | xargs dos2unix
RUN ["chmod", "+x", "-R", "/scripts/"]
ENTRYPOINT ["/scripts/entrypoint.sh"]
CMD /scripts/entrypoint.sh

View File

@@ -0,0 +1,89 @@
# Get MY IP
# Get VPN network if exists
# Define port
# Define local IP range
# Define VPN IP range
# Store results
scan_network(){
MyIP=$(ifconfig ${Interface}|grep inet |awk '{ print $2 }');
TargetIP=$(echo $MyIP|cut -d . -f1-3);
X=0
OpenIP=""
for i in $(seq 1 255); do
nc -w 1 -z $TargetIP.$i 60022;
if [ $? -eq 0 ]
then
if [ $MyIP != $TargetIP.$i ]
then
if [ $X = 1 ]
then
# tobb nyitott IP
echo "Found more than one IP addresses"
echo "MAILKULDES"
echo "">OpenIP.txt;
# TODO mailkuldes ahova kell
exit 1;
else
OpenIP=$TargetIP.$i;
fi
X=1;
fi
fi
done
if [ $X = 1 ]
then
echo $OpenIP>OpenIP.txt;
echo "start LVM SYNC";
echo "OpenIP mukodik = "$OpenIP;
lvm_sync_create $OpenIP;
else
echo "No available local IP address found!"
try_target_VPN;
fi
}
try_target_IP(){
MyIP=$(ifconfig ${Interface}|grep inet |awk '{ print $2 }');
nc -w 1 -z $OpenIP 60022;
if [ $? -eq 0 ]
then
if [ $MyIP = $OpenIP ]
then
echo "Only own IP address found = "$OpenIP
scan_network;
fi
else
scan_network;
fi
}
try_target_VPN(){
nc -w 1 -z $VPN 60022;
if [ $? -eq 0 ]
then
for i in {0..99}; do
MyVPN=$(ifconfig tun$i 2>/dev/null |grep inet |awk '{ print $2 }');
echo "My VPN="$MyVPN;
echo "Found VPN="$VPN;
if [ $VPN != $MyVPN ]
then
echo "VPN accessible="$VPN;
lvm_sync_create $VPN;
else
echo "Only own VPN accessible="$VPN;
exit 1;
fi
done
else
echo "No available server"
fi
}

View File

@@ -3,8 +3,23 @@
cd /scripts
DEBUG_MODE=${DEBUG_MODE:-false}
VERSION="1.1.5"
#DOCKER_REGISTRY_URL=${DOCKER_REGISTRY_URL:-registry.format.hu}
DOCKER_REGISTRY_URL=${DOCKER_REGISTRY_URL:-safebox}
DOCKER_REGISTRY_USERNAME=$DOCKER_REGISTRY_USERNAME
DOCKER_REGISTRY_PASSWORD=$DOCKER_REGISTRY_PASSWORD
if [ -n "$DOCKER_REGISTRY_USERNAME" ] && [ -n "$DOCKER_REGISTRY_PASSWORD" ]; then
echo "Logging in to Docker registry $DOCKER_REGISTRY_URL"
echo "$DOCKER_REGISTRY_PASSWORD" | docker login $DOCKER_REGISTRY_URL --username $DOCKER_REGISTRY_USERNAME --password-stdin
DOCKER_REGISTRY_ENVS="--env DOCKER_REGISTRY_USERNAME=$DOCKER_REGISTRY_USERNAME --env DOCKER_REGISTRY_PASSWORD=$DOCKER_REGISTRY_PASSWORD"
else
echo "No Docker registry credentials provided, skipping login."
fi
USER_INIT_PATH=$USER_INIT_PATH
GLOBAL_VERSION=${GLOBAL_VERSION:-latest}
SERVICE_DIR=${SERVICE_DIR:-/etc/user/config/services}
@@ -19,6 +34,14 @@ FRAMEWORK_SCHEDULER_NETWORK_SUBNET=${FRAMEWORK_SCHEDULER_NETWORK_SUBNET:-"172.19
FRAMEWORK_SCHEDULER_VERSION=${FRAMEWORK_SCHEDULER_VERSION:-latest}
RUN_FORCE=${RUN_FORCE:-false}
if [ "$DEBUG_MODE" == "true" ]; then
DOCKER_START="--entrypoint=sh $DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION -c 'sleep 86400'"
SET_DEBUG_MODE="--env DEBUG_MODE=true"
else
DOCKER_START="$DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION"
SET_DEBUG_MODE=""
fi
WEB_SERVER=${WEB_SERVER:-webserver}
WEB_IMAGE=${WEB_IMAGE:-web-installer}
WEBSERVER_PORT=${WEBSERVER_PORT:-8080}
@@ -80,6 +103,9 @@ $VOLUME_MOUNTS \
--env SETUP_VERSION=$SETUP_VERSION \
--env GLOBAL_VERSION=$GLOBAL_VERSION \
--env HOST_FILE=$HOST_FILE \
-w /etc/user/config/services/ \
$SET_DEBUG_MODE \
$DOCKER_REGISTRY_ENVS \
$DOCKER_REGISTRY_URL$SETUP:$SETUP_VERSION"
DEBUG=1
@@ -91,14 +117,305 @@ debug() {
fi
}
## Start prevously deployed firewall rules depend on framework scheduler startup at first time
add_json_target(){
if [ -d /etc/user/config/services ]; then
cd /etc/user/config/services
for FIREWALL in $(ls firewall*.json); do
$service_exec $FIREWALL start &
done
fi
local TASK_NAME=$1
if [ -n "$TASK_NAME" ]; then
TASK="upgrade-$TASK_NAME"
fi
if [ ! -f $SHARED/output/$TASK.json ]; then
install -m 664 -g 65534 /dev/null $SHARED/output/$TASK.json
fi
echo $JSON_TARGET | base64 -d >$SHARED/output/$TASK.json
}
backup_query_state() {
echo "backup_query_state"
}
generate_backup_server_secrets () {
mkdir -p $SECRET_DIR/backup/server
echo '{
"backupserver":{
"SSH_HOST":"'$SSH_HOST'",
"SSH_USER":"'$SSH_USER'",
"SSH_PORT":"'$SSH_PORT'",
"SSH_PASSWORD":"'$SSH_PASSWORD'",
"BACKUP_PASSWORD":"'$BACKUP_PASSWORD'",
"PERIOD":"'$PERIOD'",
"COMPRESSION":"'$COMPRESSION'",
"DIRECTORIES":"'$DIRECTORIES'",
"SERVICES":"'$SERVICES'"
}
}' | jq -r . > $SECRET_DIR/backup/server/backup.json
}
defaulting_missing_paramaters() {
if [ "$SSH_HOST" == "" ] || [ "$SSH_HOST" == "null" ]; then
SSH_HOST="localhost"
fi
if [ "$SSH_PORT" == "" ] || [ "$SSH_PORT" == "null" ]; then
SSH_PORT="20022"
fi
if [ "$SSH_USER" == "" ] || [ "$SSH_USER" == "null" ]; then
SSH_USER="backup"
fi
if [ "$SSH_PASSWORD" == "" ] || [ "$SSH_PASSWORD" == "null" ]; then
SSH_PASSWORD="backup"
fi
}
create_backup_service () {
ADDITIONAL=""
ADDITIONAL='"EXTRA":"--rm","PRE_START":[],"DEPEND": [],"CMD":""'
BACKUP_SERVER='{
"main": {
"SERVICE_NAME": "backup-server"
},
"containers": [
{
"IMAGE": "alpine:latest",
"NAME": "backup-init",
"NETWORK": "host",
"UPDATE": "true",
"MEMORY": "64M",
"EXTRA": "--rm",
"VOLUMES":[
{
"SOURCE": "USER_DATA",
"DEST": "/etc/user/data/",
"TYPE": "rw"
}
],
"ENTRYPOINT": "sh -c",
"CMD": "mkdir -p /etc/user/data/backup/server/ssh",
"POST_START": []
},
{
"IMAGE": "safebox/backup-server:latest",
"NAME": "backupserver",
"NETWORK": "'$NETWORK'",
"UPDATE": "true",
"MEMORY": "64M",
"VOLUMES":[
{
"SOURCE": "USER_DATA",
"DEST": "/etc/user/data/",
"TYPE": "ro"
},
{
"SOURCE": "USER_CONFIG",
"DEST": "/etc/user/config/",
"TYPE": "ro"
},
{
"SOURCE": "USER_SECRET",
"DEST": "/etc/user/secret/",
"TYPE": "ro"
},
{
"SOURCE": "/etc/user/data/backup/server/ssh",
"DEST": "/home/'$SSH_USER'/",
"TYPE": "rw"
}
],
"ENV_FILES":["/etc/user/secret/backup/server/backup.json"],
'$ADDITIONAL',
"POST_START": []
}
]
}'
# create backup server secrets from variables
generate_backup_server_secrets
}
backup_set_service() {
local BACKUP_PASSWORD="$1"
local PERIOD="$2"
local COMPRESSION="$3"
local PLANNED_TIME="$(echo "$4" | base64 -d)"
local DIRECTRIES="$5"
local SERVICES="$6"
local SSH_HOST="$7"
local VPN="$8"
local SSH_PORT="$9"
local SSH_USER="${10}"
local SSH_PASSWORD="${11}"
local OPERATION="${12}"
defaulting_missing_paramaters
if [ "$OPERATION" == "DELETE" ]; then
sed -i '/service-backup/d' /etc/user/data/cron/crontab.txt
# delete service
rm -f /etc/user/config/services/service-backup-server*
rm -rf /etc/user/data/backup/server
rm -rf /etc/user/secret/backup/server
debug "Service backup server service deleted."
elif [ "$OPERATION" == "MODIFY" ]; then
# modify only secrets for backup server, it will be affected at the next cron job
generate_backup_server_secrets
else
if [ -z "$SSH_PORT" ] ; then
SSH_PORT=20022
fi
if [ "$VPN" == "true" ]; then
NETWORK=$VPN_NETWORK
create_backup_service
else
NETWORK="host"
create_backup_service
fi
fi
if [ -n "$PLANNED_TIME" ]; then
if [ "$VPN" == "true" ]; then
if [ -n "$BACKUP_SERVER" ] ; then
echo "$BACKUP_SERVER" | jq -r . >/etc/user/config/services/service-backup-server-vpn.json
fi
echo "'$PLANNED_TIME' service service-backup-server-vpn" >> /etc/user/data/cron/crontab.txt
else
if [ -n "$BACKUP_SERVER" ] ; then
echo "$BACKUP_SERVER" | jq -r . >/etc/user/config/services/service-backup-server-local.json
fi
echo "'$PLANNED_TIME' service service-backup-server-local" >> /etc/user/data/cron/crontab.txt
fi
fi
}
backup_set_client() {
local NAME="$1"
local SIZE="$2"
local VPN="$3"
local SSH_PORT="$4"
local SSH_USER="$5"
local SSH_PASSWORD="$6"
local OPERATION="$7"
local VPN_KEY="$8"
defaulting_missing_paramaters
if [ "$OPERATION" == "DELETE" ]; then
# delete service
if [ -f "/etc/user/config/services/service-backup-client-$NAME.json" ]; then
debug "service-backup-client-$NAME.json stop force dns-remove"
$service_exec service-backup-client-$NAME.json stop force dns-remove
rm -f /etc/user/config/services/service-backup-client-$NAME.json
debug "Service backup client $NAME deleted."
fi
else
if [ -z "$SSH_PORT" ] ; then
SSH_PORT=20022
fi
if [ "$VPN" == "true" ]; then
NETWORK=$NAME
PORT='"PORTS": [{"SOURCE":"null","DEST":"'$SSH_PORT'","TYPE":"tcp"}],'
else
NETWORK="host"
PORT='"PORTS": [{"SOURCE":"'$SSH_PORT'","DEST":"'$SSH_PORT'","TYPE":"tcp"}],'
fi
ADDITIONAL=""
ADDITIONAL='"EXTRA":"--restart=always","PRE_START":[],"DEPEND":[],"CMD": ""'
ENVS='"ENVS":[{"SSH_USER":"'$SSH_USER'"},{"SSH_PORT":"'$SSH_PORT'"},{"SSH_PASSWORD":"'$SSH_PASSWORD'"},{"VPN_CLIENT_KEY":"'$VPN_KEY'"}],'
echo '{
"main": {
"SERVICE_NAME": "'$NAME'"
},
"containers": [
{
"IMAGE": "alpine:latest",
"NAME": "'$NAME'-init",
"NETWORK": "host",
"UPDATE": "true",
"MEMORY": "64M",
"EXTRA": "--rm",
"VOLUMES":[
{
"SOURCE": "USER_DATA",
"DEST": "/etc/user/data/",
"TYPE": "rw"
}
],
"ENTRYPOINT": "sh -c",
"CMD": "mkdir -p /etc/user/data/backup/clients/'$NAME'/backup && mkdir -p /etc/user/data/backup/clients/'$NAME'/ssh && chmod -R '$SSH_USER':'$SSH_USER' /etc/user/data/backup/clients/'$NAME'",
"POST_START": []
},
{
"IMAGE": "safebox/backup-client:latest",
"NAME": "'$NAME'",
"UPDATE": "true",
"MEMORY": "64M",
"NETWORK": "'$NETWORK'",
'$ADDITIONAL',
'$ENVS'
'$PORT'
"VOLUMES":[
{
"SOURCE": "/etc/user/data/backup/clients/'$NAME'/backup",
"DEST": "/home/'$SSH_USER'/backup",
"TYPE": "rw"
},
{
"SOURCE": "/etc/user/data/backup/clients/'$NAME'/ssh",
"DEST": "/home/'$SSH_USER'/.ssh",
"TYPE": "rw"
}
],
"POST_START": []
}
]
}' | jq -r . >/etc/user/config/services/service-backup-client-$NAME.json
debug "service-backup-client-$NAME.json start info"
$service_exec service-backup-client-$NAME.json start info &
fi
}
backup_challenge_clients() {
echo "backup_challenge_clients"
}
restore_from_backup() {
echo "restore_from_backup"
}
create_htpasswd_file() {
@@ -111,8 +428,6 @@ create_htpasswd_file() {
fi
}
install -m 664 -g 65534 /dev/null
deploy_additionals() {
local DIR="$1"
@@ -160,14 +475,80 @@ remove_additionals() {
debug "UNINSTALL: $NAME"
# delete firewall rules
FIREWALLS=""
FIREWALLS="$(ls $SERVICE_DIR/firewall-*.json | grep $NAME)"
for FIREWALL in $(echo $FIREWALLS); do
cat $FIREWALL | jq '.containers[] |= (
if (.ENVS | map(has("OPERATION")) | any) then
# If any entry has OPERATION key, update it
.ENVS = [.ENVS[] | if has("OPERATION") then {"OPERATION": "DELETE"} else . end]
else
# If no entry has OPERATION key, add new entry
.ENVS += [{"OPERATION": "DELETE"}]
end
)' >$FIREWALL.tmp
debug "$service_exec $FIREWALL.tmp start info"
$service_exec $FIREWALL.tmp start info
rm $FIREWALL.tmp
done
# delete domains
DOMMAINS=""
DOMAINS="$(ls $SERVICE_DIR/domain-*.json | grep $NAME)"
for DOMAIN in $(echo $DOMAINS); do
cat $DOMAIN | jq '.containers[] |= (
if (.ENVS | map(has("OPERATION")) | any) then
# If any entry has OPERATION key, update it
.ENVS = [.ENVS[] | if has("OPERATION") then {"OPERATION": "DELETE"} else . end]
else
# If no entry has OPERATION key, add new entry
.ENVS += [{"OPERATION": "DELETE"}]
end
)' >$DOMAIN.tmp
debug "$service_exec $DOMAIN.tmp start info"
$service_exec $DOMAIN.tmp start info
rm $DOMAIN.tmp
done
# remove related directories and files
# get volume destinations
DESTINATIONS=""
VOLUMES=""
DESTINATIONS=$(cat $SERVICE_DIR/service-$NAME.json | jq -r '[.containers[] | select(has("VOLUMES")) | .VOLUMES[] | select(.SHARED != "true") | .SOURCE] | unique[]' | grep $NAME)
for DESTINATION in $(echo $DESTINATIONS); do
if [ -d "$DESTINATION" ] || [ -f "$DESTINATION" ]; then
rm -rf $DESTINATION
debug "deleted directory or file: $DESTINATION"
fi
done
ENV_FILES=$(cat $SERVICE_DIR/service-$NAME.json | jq -r '[.containers[] | select(has("ENV_FILES")) | .ENV_FILES[]] | unique[]')
for ENV_FILE in $(echo $ENV_FILES); do
if [ -f "$ENV_FILE" ]; then
rm -rf $ENV_FILE
debug "deleted enviroment file: $ENV_FILE"
fi
done
VOLUMES=$(cat $SERVICE_DIR/service-$NAME.json | jq -r '[.containers[] | select(has("VOLUMES")) | .VOLUMES[] | select(.SHARED != "true") | .SOURCE] | unique[]' | grep -vE 'USER|SYSTEM')
# stop service
# force - remove stopped container, docker rm
debug "$service_exec service-$NAME.json stop force dns-remove"
$service_exec service-$NAME.json stop force dns-remove
for VOLUME in $(echo $VOLUMES | grep -vE 'USER|SYSTEM|SHARED'); do
if [ "$(echo $VOLUME | cut -d '/' -f1)" ]; then
docker volume rm $VOLUME
debug "deleted volume: $VOLUME"
fi
done
# remove service files
rm $SERVICE_DIR/*"-"$NAME.json # service, domain, etc.
rm $SECRET_DIR/$NAME/$NAME.json
}
get_repositories() {
@@ -177,15 +558,20 @@ get_repositories() {
local TREES=""
local REPO
if [ ! -f "/etc/user/config/repositories.json" ]; then
create_repositories_json
fi
REPOS=$(jq -r .repositories[] /etc/user/config/repositories.json) # list of repos, delimiter by space
for REPO in $REPOS; do
BASE=$(basename $REPO | cut -d '.' -f1)
if [ ! -d "/tmp/$BASE" ]; then
git clone $REPO /tmp/$BASE >/dev/null
GIT_HTTP_CONNECT_TIMEOUT=10 GIT_HTTP_TIMEOUT=30 git clone $REPO /tmp/$BASE >/dev/null
else
cd /tmp/$BASE
git pull >/dev/null
date >> /tmp/pull.log
GIT_HTTP_CONNECT_TIMEOUT=10 GIT_HTTP_TIMEOUT=30 git pull >> /tmp/pull.log
fi
if [ -f "/tmp/$BASE/applications-tree.json" ]; then
TREES=$TREES" /tmp/$BASE/applications-tree.json"
@@ -201,6 +587,9 @@ check_volumes() {
if [ ! -d "/var/tmp/shared" ]; then
/usr/bin/docker volume create SHARED
RET=0
else
rm -rf /var/tmp/shared/input/*
rm -rf /var/tmp/shared/output/*
fi
if [ ! -d "/etc/system/data/" ]; then
@@ -511,7 +900,7 @@ check_update() {
debug "$REMOTE_URL not accessible, http error code: $CURL_CHECK_CODE"
echo "Force image pull has started without digest check..."
DOCKER_PULL="docker pull $IMAGE"
DOCKER_PULL="/usr/bin/docker pull $IMAGE"
eval $DOCKER_PULL
STATUS=$?
debug "PULL STATUS: $STATUS"
@@ -525,9 +914,14 @@ check_update() {
upgrade_scheduler() {
DOCKER_START="--entrypoint=sh $DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION -c '/scripts/upgrade.sh'"
# Upgrading framework scheduler
debug "Upgrading framework scheduler..."
/usr/bin/docker pull "$DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION"
FRAMEWORK_SCHEDULER_NAME="$FRAMEWORK_SCHEDULER_NAME-$(head /dev/urandom | tr -dc '0-9' | head -c 6)"
DOCKER_RUN="/usr/bin/docker run -d \
-w /etc/user/config/services/ \
-v SHARED:/var/tmp/shared \
-v /var/run/docker.sock:/var/run/docker.sock \
-v SYSTEM_DATA:/etc/system/data \
@@ -537,29 +931,39 @@ upgrade_scheduler() {
-v USER_CONFIG:/etc/user/config \
-v USER_SECRET:/etc/user/secret \
--restart=always \
--name $FRAMEWORK_SCHEDULER_NAME \
$DOCKER_REGISTRY_ENVS \
$SET_DEBUG_MODE \
--env WEBSERVER_PORT=$WEBSERVER_PORT \
--network $FRAMEWORK_SCHEDULER_NETWORK \
--env RUN_FORCE=$RUN_FORCE \
--env DOCKER_REGISTRY_URL=$DOCKER_REGISTRY_URL \
$DOCKER_START"
echo "DOCKER RUN: $DOCKER_RUN"
eval "$DOCKER_RUN"
}
upgrade() {
local NAME=$1
if [ "$NAME" == "web-installer" ]; then
debug "$service_exec service-framework.containers.webserver start info"
$service_exec service-framework.containers.webserver stop force
$service_exec service-framework.containers.webserver start info &
if [ "$NAME" == "webserver" ]; then
debug "$service_exec service-framework.containers.$NAME stop force"
$service_exec service-framework.containers.$NAME stop force
debug "$service_exec service-framework.containers.$NAME start info"
$service_exec service-framework.containers.$NAME start info &
else
if ! echo "$NAME" | grep -q '\.'; then
NAME="$NAME.json"
fi
debug "$service_exec $NAME stop force"
$service_exec $NAME stop force
debug "$service_exec $NAME start info"
$service_exec $NAME start info &
debug "$service_exec $NAME.json start info"
$service_exec $NAME.json stop force
$service_exec $NAME.json start info &
fi
PID=$!
}
execute_task() {
@@ -594,20 +998,26 @@ execute_task() {
#fi;
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "INSTALL_STATUS": "'$INSTALL_STATUS'" }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "letsencrypt" ]; then
elif [ "$TASK_NAME" == "request_letsencrypt" ]; then
DOMAINS=$(echo $B64_JSON | base64 -d | jq -r 'keys[]')
for DOMAIN in $(echo $DOMAINS); do
REQUEST=$(echo $B64_JSON | base64 -d | jq -r ".[\"$DOMAIN\"].status")
if [ "$REQUEST" == "requested" ]; then
echo "New certificate for $DOMAIN is requested."
touch /etc/system/data/ssl/keys/$DOMAIN/new_certificate
echo "Modifying $DOMAIN_FILE.json for $DOMAIN"
jq '.containers[0].ENVS |= map(if has("OPERATION") then .OPERATION = "MODIFY" else . end) | \
.containers[0].ENVS |= map(if has("DOMAIN") then .DOMAIN = "'$DOMAIN'" else . end)' \
/etc/user/config/services/$DOMAIN_FILE.json > /tmp/$DOMAIN_FILE.json && \
mv /tmp/$DOMAIN_FILE.json /etc/user/config/services/$DOMAIN_FILE.json
debug "$service_exec $DOMAIN_FILE.json start info"
$service_exec $DOMAIN_FILE.json start info &
fi
done
JSON_TARGET=$B64_JSON
elif [ "$TASK_NAME" == "system" ]; then
#SYSTEM_LIST="core-dns.json cron.json domain-local-backend.json firewall-letsencrypt.json firewall-local-backend.json firewall-localloadbalancer-dns.json firewall-localloadbalancer-to-smarthostbackend.json firewall-smarthost-backend-dns.json firewall-smarthost-loadbalancer-dns.json firewall-smarthost-to-backend.json firewall-smarthostloadbalancer-from-publicbackend.json letsencrypt.json local-backend.json local-proxy.json service-framework.json smarthost-proxy-scheduler.json smarthost-proxy.json"
SYSTEM_LIST="core-dns.json cron.json letsencrypt.json local-proxy.json service-framework.json smarthost-proxy-scheduler.json smarthost-proxy.json"
SYSTEM_LIST="core-dns.json cron.json letsencrypt.json local-loadbalancer.json service-framework.json smarthost-proxy-scheduler.json smarthost-proxy.json"
INSTALLED_SERVICES=$(ls /etc/user/config/services/*.json)
SERVICES=""
for SERVICE in $(echo $INSTALLED_SERVICES); do
@@ -647,7 +1057,7 @@ execute_task() {
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "INSTALL_STATUS": "'$INSTALL_STATUS'", "INSTALLED_SERVICES": {'$SERVICES'} }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "services" ]; then
SYSTEM_LIST="core-dns.json cron.json letsencrypt.json local-proxy.json service-framework.json smarthost-proxy-scheduler.json smarthost-proxy.json"
SYSTEM_LIST="core-dns.json cron.json letsencrypt.json local-loadbalancer.json service-framework.json smarthost-proxy-scheduler.json smarthost-proxy.json"
INSTALLED_SERVICES=$(ls /etc/user/config/services/*.json)
SERVICES=""
for SERVICE in $(echo $INSTALLED_SERVICES); do
@@ -730,18 +1140,21 @@ execute_task() {
elif [ "$TASK_NAME" == "deployments" ]; then
DEPLOYMENTS=""
TREES=$(get_repositories)
for TREE in $TREES; do
for TREE in "$TREES"; do
APPS=$(jq -rc '.apps[]' $TREE)
for APP in $APPS; do
#for APP in "$APPS"; do #space problem
while IFS= read -r APP; do
APP_NAME=$(echo "$APP" | jq -r '.name')
APP_SUBTITLE="$(echo "$APP" | jq -r '.subtitle')"
APP_VERSION=$(echo "$APP" | jq -r '.version')
APP_ICON=$(echo "$APP" | jq -r '.icon')
if [ "$DEPLOYMENTS" != "" ]; then
SEP=","
else
SEP=""
fi
DEPLOYMENTS=$DEPLOYMENTS$SEP'"'$APP_NAME'": "'$APP_VERSION'"'
done
DEPLOYMENTS="$DEPLOYMENTS"$SEP'"'$APP_NAME'":{"subtitle":"'"$APP_SUBTITLE"'","version":"'"$APP_VERSION"'","icon":"'"$APP_ICON"'"}'
done < <(echo "$APPS") # preserve DEPLOYMENTS variable
done
if [ "$DEPLOYMENTS" == "" ]; then
DEPLOYMENTS='"deployments": "NONE"'
@@ -775,20 +1188,22 @@ execute_task() {
for TREE in $TREES; do
APPS=$(jq -rc '.apps[]' $TREE)
for APP in $APPS; do
#for APP in $APPS; do
while IFS= read -r APP; do
APP_NAME=$(echo "$APP" | jq -r '.name' | awk '{print tolower($0)}')
APP_SUBTITLE=$(echo "$APP" | jq -r '.subtitle')
APP_VERSION=$(echo "$APP" | jq -r '.version')
APP_DIR=$(dirname $TREE)"/"$APP_NAME
debug "$APP_TEMPLATE"
if [ "$APP_NAME" == "$DEPLOY_NAME" ]; then
if [ "$DEPLOY_ACTION" == "ask" ]; then
APP_TEMPLATE=$APP_DIR"/template.json"
TEMPLATE=$(cat $APP_TEMPLATE | base64 -w0)
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "STATUS": "0", "TEMPLATE": "'$TEMPLATE'" }' | jq -r . | base64 -w0)
debug "$APP_TEMPLATE"
JSON_TARGET=$(echo '{"DATE":"'$DATE'","STATUS": "0","TEMPLATE":"'$TEMPLATE'"}' | jq -r . | base64 -w0)
elif [ "$DEPLOY_ACTION" == "reinstall" ]; then
APP_TEMPLATE=$APP_DIR"/template.json"
TEMPLATE=$(cat $APP_TEMPLATE)
for LINE in $(cat $SERVICE_DIR/service-$DEPLOY_NAME.json | jq -rc '.containers[].ENVS[] | to_entries[]'); do
for LINE in $(cat $SERVICE_DIR/service-$DEPLOY_NAME.json | jq -rc '.containers[].ENVS[] | to_entries[]' 2>/dev/null); do
KEY=$(echo $LINE | jq -r .key)
VALUE=$(echo $LINE | jq -r .value)
debug "$KEY: $VALUE"
@@ -797,14 +1212,14 @@ execute_task() {
TEMPLATE=$(echo "$TEMPLATE" | jq -r '.fields |= map(if .key == "'$KEY'" then .value = "'$VALUE'" else . end)')
done
# write ENV value from domain file to template value by key name
for LINE in $(cat $SERVICE_DIR/domain-$DEPLOY_NAME.json | jq -rc '.containers[].ENVS[] | to_entries[]'); do
for LINE in $(cat $SERVICE_DIR/domain-$DEPLOY_NAME.json | jq -rc '.containers[].ENVS[] | to_entries[]' 2>/dev/null); do
KEY=$(echo $LINE | jq -r .key)
VALUE=$(echo $LINE | jq -r .value)
debug "$KEY: $VALUE"
TEMPLATE=$(echo "$TEMPLATE" | jq -r '.fields |= map(if .key == "'$KEY'" then .value = "'$VALUE'" else . end)')
done
# write ENV value from secret file to template value by key name
for LINE in $(cat $SECRET_DIR/$DEPLOY_NAME/$DEPLOY_NAME.json | jq -rc '.[] | to_entries[]'); do
for LINE in $(cat $SECRET_DIR/$DEPLOY_NAME/$DEPLOY_NAME.json | jq -rc '.[] | to_entries[]' 2>/dev/null); do
KEY=$(echo $LINE | jq -r .key)
VALUE=$(echo $LINE | jq -r .value)
debug "$KEY: $VALUE"
@@ -830,6 +1245,18 @@ execute_task() {
DEPLOY_PAYLOAD=$(echo "$JSON" | jq -r .PAYLOAD) # base64 list of key-value pairs in JSON
deploy_additionals "$APP_DIR" "$DEPLOY_NAME" "$DEPLOY_PAYLOAD"
sh /scripts/check_pid.sh "$PID" "$SHARED" "deploy-$DEPLOY_NAME" "$DATE" "$DEBUG" &
elif [ "$DEPLOY_ACTION" == "edit" ]; then
JSON_TARGET=""
DEPLOY_PAYLOAD=$(echo "$JSON" | jq -r .PAYLOAD) # base64 list of key-value pairs in JSON
# stop service before edit
debug "$service_exec service-$DEPLOY_NAME.json stop force"
$service_exec service-$DEPLOY_NAME.json stop force
deploy_additionals "$APP_DIR" "$DEPLOY_NAME" "$DEPLOY_PAYLOAD"
sh /scripts/check_pid.sh "$PID" "$SHARED" "deploy-$DEPLOY_NAME" "$DATE" "$DEBUG" &
elif [ "$DEPLOY_ACTION" == "uninstall" ]; then
remove_additionals "$APP_DIR" "$DEPLOY_NAME"
# uninstall has finished
@@ -839,7 +1266,7 @@ execute_task() {
JSON_TARGET=""
fi
fi
done
done < <(echo "$APPS") # preserve variables
done
elif [ "$TASK_NAME" == "repositories" ]; then
@@ -866,20 +1293,25 @@ execute_task() {
VPN_STATUS="0"
VPN_RESULT=""
CONTAINERS=$(docker ps -a --format '{{.Names}} {{.Status}}' | grep -w wireguardproxy)
if [ "$CONTAINERS" != "" ]; then
UP=$(echo $CONTAINERS | grep -w 'Up')
if [ "$UP" != "" ]; then
VPN_STATUS="2"
else
VPN_STATUS="1"
if [ -f $SECRET_DIR/vpn-proxy/wg0.conf ]; then
CONTAINERS=$(docker ps -a --format '{{.Names}} {{.Status}}' | grep -w wireguardproxy)
if [ "$CONTAINERS" != "" ]; then
UP=$(echo $CONTAINERS | grep -w 'Up')
if [ "$UP" != "" ]; then
VPN_STATUS="1"
else
VPN_STATUS="2"
fi
VPN_RESULT=$(echo "$CONTAINERS" | base64 -w0)
fi
VPN_RESULT=$(echo "$CONTAINERS" | base64 -w0)
fi
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "STATUS": "'$VPN_STATUS'", "RESULT": "'$VPN_RESULT'" }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "save_vpn" ]; then
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "STATUS": "0", "RESULT": "" }' | jq -r . | base64 -w0)
add_json_target
VPN_PROXY_REPO="wireguard-proxy-client"
if [ ! -d "/tmp/$VPN_PROXY_REPO" ]; then
git clone https://git.format.hu/safebox/$VPN_PROXY_REPO.git /tmp/$VPN_PROXY_REPO >/dev/null
@@ -897,31 +1329,101 @@ execute_task() {
# install vpn only
sh /scripts/install.sh "$B64_JSON" "$service_exec" "vpn" "$GLOBAL_VERSION"
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "STATUS": "'$VPN_STATUS'", "RESULT": "'$VPN_RESULT'" }' | jq -r . | base64 -w0)
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "STATUS": "1", "RESULT": "'$VPN_RESULT'" }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "containers" ]; then # not in use
CONTAINERS=$(docker ps -a --format '{{.Names}} {{.Status}}' | grep -v framework-scheduler)
RESULT=$(echo "$CONTAINERS" | base64 -w0)
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "RESULT": "'$RESULT'" }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "backup" ]; then
TASK_TYPE=$(echo $B64_JSON | base64 -d | jq -r '.TASK_TYPE')
if [ "$TASK_TYPE" == "backup_query_state" ]; then
echo "task type is backup_query_state"
elif [ "$TASK_TYPE" == "backup_set_service" ]; then
BACKUP_PASSWORD="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_SERVER_PASSWORD')"
PERIOD="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_PERIOD')"
COMPRESSION="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_COMPRESSION')"
PLANNED_TIME="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_PLANNED_TIME')"
DIRECTRIES="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_DIRECTORIES')"
SERVICES="$(echo $B64_JSON | base64 -d | jq -r '.SERVICES')"
BACKUP_LOCAL_CLIENTS="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_LOCAL_CLIENTS')"
BACKUP_VPN_CLIENTS="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_VPN_CLIENTS')"
VPN="$(echo $B64_JSON | base64 -d | jq -r '.VPN')"
SSH_HOST="$(echo $B64_JSON | base64 -d | jq -r '.SSH_HOST')"
SSH_PORT="$(echo $B64_JSON | base64 -d | jq -r '.SSH_PORT')"
SSH_USER="$(echo $B64_JSON | base64 -d | jq -r '.SSH_USER')"
SSH_PASSWORD="$(echo $B64_JSON | base64 -d | jq -r '.SSH_PASSWORD')"
OPERATION="$(echo $B64_JSON | base64 -d | jq -r '.OPERATION')"
echo "task type is backup_set_service"
backup_set_service "$BACKUP_PASSWORD" "$PERIOD" "$COMPRESSION" "$PLANNED_TIME" "$DIRECTRIES" "$SERVICES" "$SSH_HOST" "$VPN" "$SSH_PORT" "$SSH_USER" "$SSH_PASSWORD" "$OPERATION"
elif [ "$TASK_TYPE" == "backup_set_client" ]; then
NAME="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_NAME')"
SIZE="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_SIZE')"
VPN="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_VPN')"
SSH_PORT="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_SSH_PORT')"
SSH_USER="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_SSH_USER')"
SSH_PASSWORD="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_SSH_PASSWORD')"
OPERATION="$(echo $B64_JSON | base64 -d | jq -r '.BACKUP_CLIENT_OPERATION')"
debug "task type is backup_set_client for $NAME"
backup_set_client "$NAME" "$SIZE" "$VPN" "$SSH_PORT" "$SSH_USER" "$SSH_PASSWORD" "$OPERATION"
elif [ "$TASK_TYPE" == "backup_challenge_clients" ]; then
echo "task type is backup_challenge_clients"
elif [ "$TASK_TYPE" == "restore_from_backup" ]; then
echo "task type is restore_from_backup"
else
echo "Unknown task type: $TASK_TYPE"
fi
RESULT=$(echo "$CONTAINERS" | base64 -w0)
JSON_TARGET=$(echo '{ "DATE": "'$DATE'", "RESULT": "'$RESULT'" }' | jq -r . | base64 -w0)
elif [ "$TASK_NAME" == "upgrade" ]; then
JSON="$(echo $B64_JSON | base64 -d)"
NAME=$(echo "$JSON" | jq -r .NAME | awk '{print tolower($0)}')
if [ "$NAME" == "framework" ]; then
JSON_TARGET=$(echo '{"DATE":"'$DATE'","STATUS":1}' | jq -r . | base64 -w0)
add_json_target $NAME
echo "Upgrading service: webserver"
upgrade webserver
echo "Upgrading framework scheduler..."
upgrade_scheduler
upgrade "web-installer"
echo "Removing old framework scheduler container..."
JSON_TARGET=$(echo '{"DATE":"'$DATE'","STATUS":2,"VERSION":"'$VERSION'"}' | jq -r . | base64 -w0)
add_json_target $NAME
sleep 1
/usr/bin/docker rm -f $HOSTNAME
JSON_TARGET="" # do not create upgrade.json
#CONTAINERS=$(docker ps -a --format '{{.Names}} {{.Status}}' | grep -E 'framework-scheduler|webserver')
else
echo "Upgrading service: $NAME"
upgrade "$NAME"
#CONTAINERS=$(docker ps -a --format '{{.Names}} {{.Status}}' | grep -w "$NAME")
fi
#RESULT=$(echo "$CONTAINERS" | base64 -w0)
sh /scripts/check_pid.sh "$PID" "$SHARED" "$TASK_NAME-$NAME" "$DATE" "$DEBUG" &
fi
if [ "$TASK_NAME" != "check_vpn" ]; then
debug "JSON_TARGET: $JSON_TARGET"
fi
# if [ "$TASK_NAME" != "check_vpn" ]; then
# debug "JSON_TARGET: $JSON_TARGET"
# fi
if [ "$JSON_TARGET" != "" ]; then
#redis-cli -h $REDIS_SERVER -p $REDIS_PORT SET $TASK "$JSON_TARGET"
install -m 664 -g 65534 /dev/null $SHARED/output/$TASK.json
echo $JSON_TARGET | base64 -d >$SHARED/output/$TASK.json
add_json_target
fi
}
@@ -991,39 +1493,15 @@ check_redis_availability() {
done
}
start_framework_scheduler() {
if [ "$DEBUG_MODE" == "true" ]; then
DOCKER_START="--entrypoint=sh $DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION -c 'sleep 86400'"
else
DOCKER_START="$DOCKER_REGISTRY_URL/$FRAMEWORK_SCHEDULER_IMAGE:$FRAMEWORK_SCHEDULER_VERSION"
fi
DOCKER_RUN="/usr/bin/docker run -d \
-v SHARED:/var/tmp/shared \
-v /var/run/docker.sock:/var/run/docker.sock \
-v SYSTEM_DATA:/etc/system/data \
-v SYSTEM_CONFIG:/etc/system/config \
-v SYSTEM_LOG:/etc/system/log \
-v USER_DATA:/etc/user/data \
-v USER_CONFIG:/etc/user/config \
-v USER_SECRET:/etc/user/secret \
--restart=always \
--name $FRAMEWORK_SCHEDULER_NAME \
--env WEBSERVER_PORT=$WEBSERVER_PORT \
--network $FRAMEWORK_SCHEDULER_NETWORK \
--env RUN_FORCE=$RUN_FORCE \
--env DOCKER_REGISTRY_URL=$DOCKER_REGISTRY_URL \
$DOCKER_START"
eval "$DOCKER_RUN"
}
### SYSTEM INITIALIZATION ###
## Start prevously deployed firewall rules depend on framework scheduler startup at first time
## DOCKER NETWORK VARIABLES
## FILESYSTEM VARIABLES
## PORTS VARIABLES
### RESTART SCHEDULER IF NEEDED
if [ -d /etc/user/config/services ]; then
cd /etc/user/config/services
for FIREWALL in $(ls firewall*.json); do
$service_exec $FIREWALL start &
done
fi
SN=$(check_subnets)
if [ "$SN" != "1" ]; then
@@ -1040,7 +1518,7 @@ fi
VOL=$(check_volumes)
if [ "$VOL" != "1" ]; then
start_framework_scheduler
upgrade_scheduler
/usr/bin/docker rm -f $HOSTNAME
fi
@@ -1052,15 +1530,12 @@ if [ "$DF" != "1" ]; then
fi
#RS=$(docker ps | grep redis-server)
WS=$(docker ps | grep webserver)
#if [[ "$WS" == "" && "$RS" == "" ]]; then
if [ "$WS" == "" ]; then
WS=$(/usr/bin/docker ps | grep -o webserver)
if [ "$WS" == "" ] && [ ! -f $SHARED/output/upgrade-framework.json ]; then
# START SERVICES
#$service_exec service-framework.containers.redis-server start &
echo "Starting webserver"
$service_exec service-framework.containers.webserver start &
sleep 5
fi
@@ -1072,7 +1547,13 @@ DATE=$(date +%F-%H-%M-%S)
DIR=$SHARED/input
# Triggers by certificate or domain config changes
# Set installed version number
echo '{}' | jq --arg VERSION "$VERSION" '.VERSION = $VERSION' > $SHARED/output/version.json
############################
if [ "$DEBUG_MODE" == "true" ]; then
rm $DIR/*
fi
unset IFS
inotifywait --exclude "\.(swp|tmp)" -m -e CREATE,CLOSE_WRITE,DELETE,MOVED_TO -r $DIR |

View File

@@ -200,14 +200,18 @@ elif [ "$FIRST_INSTALL" == "vpn" ]; then
get_vpn_key
edit_user_json $LETSENCRYPT_MAIL $LETSENCRYPT_SERVERNAME
if [ "$VPN_PROXY" != "no" ]; then
$SERVICE_EXEC vpn-proxy stop force
$SERVICE_EXEC vpn-proxy start
echo "$INIT_SERVICE_PATH/vpn-proxy.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-smarthost-loadbalancer" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-proxy-postrouting" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-proxy-prerouting" >>$AUTO_START_SERVICES/.init_services
edit_user_json $LETSENCRYPT_MAIL $LETSENCRYPT_SERVERNAME
$SERVICE_EXEC vpn-proxy stop force
$SERVICE_EXEC vpn-proxy start
echo "$INIT_SERVICE_PATH/vpn-proxy.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-smarthost-loadbalancer" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-proxy-postrouting" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-vpn-proxy-prerouting" >>$AUTO_START_SERVICES/.init_services
fi;
exit
@@ -251,7 +255,7 @@ if [ "$INIT" == "true" ]; then
if [ "$SMARTHOST_PROXY" == "yes" ]; then
$SERVICE_EXEC smarthost-proxy start
$SERVICE_EXEC smarthost-proxy-scheduler start
$SERVICE_EXEC local-proxy start
$SERVICE_EXEC local-loadbalancer start
echo "$INIT_SERVICE_PATH/smarthost-proxy.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-smarthost-loadbalancer-dns.json" >>$AUTO_START_SERVICES/.init_services
@@ -260,7 +264,7 @@ if [ "$INIT" == "true" ]; then
echo "$INIT_SERVICE_PATH/firewall-smarthost-backend-dns.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/firewall-smarthost-to-backend.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/smarthost-proxy-scheduler.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/local-proxy.json" >>$AUTO_START_SERVICES/.init_services
echo "$INIT_SERVICE_PATH/local-loadbalancer.json" >>$AUTO_START_SERVICES/.init_services
if [ "$LOCAL_BACKEND" == "yes" ]; then
$SERVICE_EXEC local-backend start