forked from dataesr/bso-ui
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathservice.sh
More file actions
118 lines (99 loc) · 4.08 KB
/
service.sh
File metadata and controls
118 lines (99 loc) · 4.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
#!/bin/bash
set -e
source ./.env.josm
PROJECT_DIR=$(pwd)
TARGET=$1
# start application
function start_application() {
echo "Starting Application..."
# Docker イメージのビルド(開発用)
# docker build --target development -t "$IMAGE_NAME" "$PROJECT_DIR"
# Docker イメージのビルド(本番用)
docker build -t "$IMAGE_NAME" "$PROJECT_DIR"
# 既存のコンテナを削除(同じ名前のコンテナが既に存在する場合)
# docker stop "$CONTAINER_NAME" 2>/dev/null
# docker rm "$CONTAINER_NAME" 2>/dev/null
# コンテナの起動(ログをホスト側に出力)
docker run -d --name "$CONTAINER_NAME" \
--network "$NETWORK_NAME" \
-p "$PORT:3000" \
-v "$PROJECT_DIR:/app" \
--log-driver=syslog \
--log-opt syslog-address=udp://localhost:514 \
--log-opt syslog-facility=local0 \
--log-opt tag="bso-ui" \
"$IMAGE_NAME"
echo "Application started. Logs can be found in $LOG_FILE"
}
# stop application
# コンテナを停止
function stop_application() {
echo "Stopping Application..."
# コンテナが実行中か確認
if docker ps -q -f name=$CONTAINER_NAME > /dev/null; then
# コンテナが実行中の場合、停止、削除
docker stop $CONTAINER_NAME
docker rm -v $CONTAINER_NAME
echo "Container $CONTAINER_NAME stopped."
else
echo "Container $CONTAINER_NAME is not running."
fi
}
# rotate logs and upload logs to wasabi
function rotate_logs() {
IFS=',' read -r -a LOG_DIRS <<< "$LOG_DIRS"
for LOG_DIR in "${LOG_DIRS[@]}"; do
# compress log files
NOT_COMPRESSED_LOG_FILES=($(find "/var/log/${LOG_DIR}/" -name "*.log" -exec basename {} \; | sort))
for LOG_FILE in "${NOT_COMPRESSED_LOG_FILES[@]}"; do
LOG_DATE=$(basename "$LOG_FILE" | awk -F'_' '{print $1}')
if [ $LOG_DATE -le $(date -d "1 days ago" "+%Y%m%d") ]; then
gzip -fk "/var/log/${LOG_DIR}/${LOG_FILE}" && rm -f "/var/log/${LOG_DIR}/${LOG_FILE}"
fi
done
# get sorted list of already uploaded logs
ALREADY_UPLOADED_LOGS=($(aws s3 ls s3://${WASABI_BUCKET}/${S3_BUCKT_PREFIX_BACKUP_LOG}/${LOG_DIR}/ | grep -v 'PRE' | awk '{print $4}' | sort))
pattern=$(printf "%s\n" "${ALREADY_UPLOADED_LOGS[@]}")
# get sorted list of log files
LOG_FILES=($(find "/var/log/${LOG_DIR}/" -name "*.log.gz" -exec basename {} \; | sort))
# get list of not uploaded log files
NOT_UPLOADED_LOG_FILES=($(printf "%s\n" "${LOG_FILES[@]}" | grep -Fxv -e "$pattern"))
# check if UPLOAD_LOG_DAYS and DELETE_LOG_DAYS are not set
if [ -z "$UPLOAD_LOG_DAYS" ]; then
echo "UPLOAD_LOG_DAYS is not set. Exiting."
exit 1
elif [ -z "$DELETE_LOG_DAYS" ]; then
echo "DELETE_LOG_DAYS is not set. Exiting."
exit 1
fi
# upload log files
for LOG_FILE in "${NOT_UPLOADED_LOG_FILES[@]}"; do
LOG_DATE=$(basename "$LOG_FILE" | awk -F'_' '{print $1}')
if [ $LOG_DATE -le $(date -d "$UPLOAD_LOG_DAYS days ago" "+%Y%m%d") ]; then
aws s3 cp "/var/log/${LOG_DIR}/${LOG_FILE}" s3://${WASABI_BUCKET}/${S3_BUCKT_PREFIX_BACKUP_LOG}/${LOG_DIR}/
fi
done
# delete old logs
for LOG_FILE in "${LOG_FILES[@]}"; do
LOG_DATE=$(basename "$LOG_FILE" | awk -F'_' '{print $1}')
if [ $LOG_DATE -le $(date -d "$DELETE_LOG_DAYS days ago" "+%Y%m%d") ]; then
rm -f "/var/log/${LOG_DIR}/${LOG_FILE}" && echo "Deleted: ${LOG_DIR}/${LOG_FILE}"
else
break
fi
done
done
}
# implement branching logic
if [ "$TARGET" = "startbsoui" ]; then
echo "Selected: Start Application"
start_application
elif [ "$TARGET" = "stopbsoui" ]; then
echo "Selected: Stop Application"
stop_application
elif [ "$TARGET" = "logrotationbsoui" ]; then
echo "Selected: rotate logs and upload logs to wasabi"
rotate_logs
else
echo "Invalid target."
fi