aiflow / run.sh
AZLABS's picture
Update run.sh
cc9ee69 verified
raw
history blame
1.96 kB
#!/bin/bash
# WebDAV URL
WEBDAV_URL="$WEBDAV_URL"
# WebDAV username
WEBDAV_USER="$WEBDAV_USER"
# WebDAV password
WEBDAV_PASSWORD="$WEBDAV_PASSWORD"
# Get current time
current_time=$(date +"%Y-%m-%d %H:%M:%S")
# Log file path
# LOG_FILE="${WORKDIR}/buildlog.txt"
# Start PostgreSQL service
/usr/local/bin/docker-entrypoint.sh postgres &
# Wait for PostgreSQL service to start
echo "Waiting for PostgreSQL service to start..."
until pg_isready -h localhost; do
sleep 3
done
echo "PostgreSQL service started!"
# Run database import script
echo "Running database import script..."
${WORKDIR}/import-db.sh
# Set N8N environment variables
export N8N_ENCRYPTION_KEY="n8n8n8n"
# Allow use of all built-in modules
export NODE_FUNCTION_ALLOW_BUILTIN=*
# Allow use of external npm modules
export NODE_FUNCTION_ALLOW_EXTERNAL=*
# Activate automatic data pruning
export EXECUTIONS_DATA_PRUNE=true
# Number of hours after execution that n8n deletes data
export EXECUTIONS_DATA_MAX_AGE=36
# Number of executions to store
export EXECUTIONS_DATA_PRUNE_MAX_COUNT=1000
# Save executions ending in errors
export EXECUTIONS_DATA_SAVE_ON_ERROR=all
# Save successful executions
export EXECUTIONS_DATA_SAVE_ON_SUCCESS=all
# Don't save node progress for each execution
export EXECUTIONS_DATA_SAVE_ON_PROGRESS=false
# Don't save manually launched executions
export EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS=false
# Check if log file exists on WebDAV
# echo "Checking if ${WEBDAV_URL}buildlog.txt exists"
# if curl -u "$WEBDAV_USER:$WEBDAV_PASSWORD" -f -o "$LOG_FILE" "${WEBDAV_URL}buildlog.txt"; then
# echo "Log file downloaded and written to deployment time: ${LOG_FILE}"
# echo "hf deployment time: ${current_time}" >> "${LOG_FILE}"
# else
# echo "WebDAV log file does not exist, creating new log file: ${LOG_FILE}"
# touch "${LOG_FILE}"
# echo "hf deployment time: ${current_time}" >> "${LOG_FILE}"
# fi
# Run n8n with absolute path
n8n > /dev/null 2>&1