Initial commit
This commit is contained in:
33
image_files/fsroot/docker-entrypoint.d/30-prepare-env.sh
Normal file
33
image_files/fsroot/docker-entrypoint.d/30-prepare-env.sh
Normal file
@@ -0,0 +1,33 @@
|
||||
# #############################################################################
|
||||
# Docker Entrypoint scipt - Prepare Application Enviroment
|
||||
# #############################################################################
|
||||
|
||||
export PORT=${APP_PORT:-3000}
|
||||
|
||||
# ----- Setup the Environment
|
||||
|
||||
if [ "${ENABLE_DUMMY_DAEMON}" = "true" ]; then
|
||||
# Assume it is a development environment if ENABLE_DUMMY_DAEMON=true
|
||||
export APP_ENV="development"
|
||||
export NODE_ENV="development"
|
||||
export LOG_LEVEL="debug"
|
||||
|
||||
else
|
||||
# Starting as normal, so it should not be a development environment
|
||||
# It should be a service for Production, UAT, or even demo
|
||||
|
||||
export APP_ENV=${APP_ENV:-production}
|
||||
|
||||
# Because of the Price Logging framework does not support setting
|
||||
# the JSON format explicitly, we have to set NODE_ENV="production"
|
||||
# in order to log in JSON format. Even it should be set to "development"
|
||||
export NODE_ENV="production"
|
||||
|
||||
if [ \( -z "${LOG_LEVEL}" \) -o \( "${LOG_LEVEL}" = "" \) ]; then
|
||||
if [ "${APP_ENV}" = "production" ]; then
|
||||
export LOG_LEVEL="info"
|
||||
else
|
||||
export LOG_LEVEL="debug"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -0,0 +1,16 @@
|
||||
# #############################################################################
|
||||
# Docker Entrypoint scipt - Prepare the shell enviroment
|
||||
# #############################################################################
|
||||
|
||||
# For Bash
|
||||
cat "/docker-entrypoint.d/files/dot-bashrc.append" >> /home/node/.bashrc
|
||||
|
||||
if [ "${ENABLE_DUMMY_DAEMON}" = "true" ]; then
|
||||
# Assume it is a development environment if ENABLE_DUMMY_DAEMON=true
|
||||
# Append the following lines for shell operations
|
||||
echo "export APP_ENV=${APP_ENV}" >> /home/node/.bashrc
|
||||
echo "export NODE_ENV=${NODE_ENV}" >> /home/node/.bashrc
|
||||
echo "export LOG_LEVEL=${LOG_LEVEL}" >> /home/node/.bashrc
|
||||
echo "export APP_PORT=${APP_PORT}" >> /home/node/.bashrc
|
||||
echo "unset LOG_IN_JSON_FMT" >> /home/node/.bashrc
|
||||
fi
|
||||
17
image_files/fsroot/docker-entrypoint.d/90-start-app.sh
Normal file
17
image_files/fsroot/docker-entrypoint.d/90-start-app.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
# Start application
|
||||
|
||||
# By-pass starting the node.js if ENABLE_DUMMY_DAEMON=true
|
||||
if [ ! "${ENABLE_DUMMY_DAEMON}" = "true" ]; then
|
||||
|
||||
# Change to APP Root Folder to run app
|
||||
cd ${APP_ROOT}
|
||||
|
||||
# Start NodeJS App - 'start:prod'
|
||||
# Note: Don't run 'start:dev' for development environment
|
||||
# instead, set environment variable: APP_ENV=development
|
||||
# Optionally, you can override the LOG_LEVEL as needed.
|
||||
|
||||
# TODO: Handle env properly
|
||||
# dumb-init yarn start:prod
|
||||
dumb-init yarn -s start:prod
|
||||
fi
|
||||
@@ -0,0 +1,6 @@
|
||||
# customisation for bash
|
||||
|
||||
# aliases
|
||||
alias ll='ls -l'
|
||||
alias la='ls -A'
|
||||
alias l='ls -CF'
|
||||
36
image_files/fsroot/docker-entrypoint.sh
Normal file
36
image_files/fsroot/docker-entrypoint.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script use sh. Since the sh may be different function in distributions.
|
||||
# In order to make the maximum compatibility, please use classic technique.
|
||||
# Example, if-condition, 'source' command.
|
||||
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# (same as set -o errexit)
|
||||
|
||||
# The condition use classic technique
|
||||
if [ \( "${LOG_IN_JSON_FMT}" = "false" \) -o \( "${LOG_IN_JSON_FMT}" = "0" \) ]; then
|
||||
unset LOG_IN_JSON_FMT
|
||||
# Otherwise, other value will enable Log in JSON
|
||||
fi
|
||||
. /usr/local/bin/script_utils.sh
|
||||
|
||||
# Run (startup) scripts for the entrypoint if any:
|
||||
if [ $# -eq 0 ]; then
|
||||
if [ -d '/docker-entrypoint.d' ]; then
|
||||
for f in /docker-entrypoint.d/*.sh; do
|
||||
. "$f" "$@"
|
||||
done
|
||||
fi
|
||||
# If daemon script does not work as daemon,
|
||||
# Use the follow command to keep the container running as daemon
|
||||
if [ "${ENABLE_DUMMY_DAEMON}" = "true" ]; then
|
||||
logging_warning "[!] Running as dummy daemon"
|
||||
tail -f /dev/null
|
||||
else
|
||||
# Run the shell
|
||||
/bin/sh
|
||||
fi
|
||||
else
|
||||
exec "$@"
|
||||
fi
|
||||
49
image_files/fsroot/usr/local/bin/script_utils.sh
Normal file
49
image_files/fsroot/usr/local/bin/script_utils.sh
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Utility functions for shell script
|
||||
|
||||
|
||||
# Very simple 'logger' for shell script support JSON output
|
||||
# To turn on JSON output, set env LOG_IN_JSON_FMT=1
|
||||
# ---------------------
|
||||
logging_info () {
|
||||
msg="${1}"
|
||||
logging_msg "INFO" "\${msg}"
|
||||
}
|
||||
|
||||
logging_warning () {
|
||||
msg="${1}"
|
||||
logging_msg "WARNING" "\${msg}"
|
||||
}
|
||||
|
||||
logging_error () {
|
||||
msg="${1}"
|
||||
logging_msg "ERROR" "\${msg}"
|
||||
}
|
||||
|
||||
logging_msg () {
|
||||
eval level="${1}"
|
||||
eval msg="${2}"
|
||||
#TODO, handle 4 env or detect if it is k8s
|
||||
if [ ! -z "$LOG_IN_JSON_FMT" ]; then
|
||||
echo_json_log "INFO" "\${msg}"
|
||||
else
|
||||
# echo "$ENV - $msg"
|
||||
echo "$msg"
|
||||
fi
|
||||
}
|
||||
|
||||
echo_json_log () {
|
||||
eval level="$1"
|
||||
eval msg="${2}"
|
||||
|
||||
# Handle double quote
|
||||
msg="${msg//[\"]/\\\"}"
|
||||
# Handle mutliline
|
||||
msg=$(echo "$msg" | sed -e '1h;2,$H;$!d;g' -re 's/([^\n]*)\n([^\n]*)/\1\\n\2/g' )
|
||||
|
||||
#Replace %N with %3N for milliseconds, %6N for micro-seconds...
|
||||
# now=`date '+%FT%T.%3N%:z'`
|
||||
now=`date '+%FT%T%z'`
|
||||
echo "{\"time\": \"${now}\", \"level\": \"${level}\", \"message\": \"${msg}\"}"
|
||||
}
|
||||
Reference in New Issue
Block a user