Skip to content

Instantly share code, notes, and snippets.

@diegofcornejo
Last active February 24, 2025 13:46
Show Gist options
  • Save diegofcornejo/b40623de733f8239d99e5b64f54e4ce1 to your computer and use it in GitHub Desktop.
Save diegofcornejo/b40623de733f8239d99e5b64f54e4ce1 to your computer and use it in GitHub Desktop.
Cronicle Docker Compose
{
"base_app_url": "http://localhost:3012",
"email_from": "admin@localhost",
"smtp_hostname": "localhost",
"smtp_port": 2587,
"secret_key": "ee3012911426641dcfcde322f3a8e330",
"mail_options": {
"secure": true,
"auth": {
"user": "fsmith",
"pass": "12345"
},
"connectionTimeout": 10000,
"greetingTimeout": 10000,
"socketTimeout": 10000
},
"log_dir": "logs",
"log_filename": "[component].log",
"log_columns": [
"hires_epoch",
"date",
"hostname",
"pid",
"component",
"category",
"code",
"msg",
"data"
],
"log_archive_path": "logs/archives/[yyyy]/[mm]/[dd]/[filename]-[yyyy]-[mm]-[dd].log.gz",
"log_crashes": true,
"copy_job_logs_to": "",
"queue_dir": "queue",
"pid_file": "logs/cronicled.pid",
"debug_level": 9,
"maintenance": "04:00",
"list_row_max": 10000,
"job_data_expire_days": 180,
"child_kill_timeout": 10,
"dead_job_timeout": 120,
"master_ping_freq": 20,
"master_ping_timeout": 60,
"udp_broadcast_port": 3014,
"scheduler_startup_grace": 10,
"universal_web_hook": "",
"track_manual_jobs": false,
"max_jobs": 0,
"server_comm_use_hostnames": false,
"web_direct_connect": false,
"web_socket_use_hostnames": false,
"job_memory_max": 1073741824,
"job_memory_sustain": 0,
"job_cpu_max": 0,
"job_cpu_sustain": 0,
"job_log_max_size": 0,
"job_env": {},
"web_hook_text_templates": {
"job_start": "Job started on [hostname]: [event_title] [job_details_url]",
"job_complete": "Job completed successfully on [hostname]: [event_title] [job_details_url]",
"job_failure": "Job failed on [hostname]: [event_title]: Error [code]: [description] [job_details_url]",
"job_launch_failure": "Failed to launch scheduled event: [event_title]: [description] [edit_event_url]"
},
"client": {
"name": "Cronicle",
"debug": 1,
"default_password_type": "password",
"privilege_list": [
{
"id": "admin",
"title": "Administrator"
},
{
"id": "create_events",
"title": "Create Events"
},
{
"id": "edit_events",
"title": "Edit Events"
},
{
"id": "delete_events",
"title": "Delete Events"
},
{
"id": "run_events",
"title": "Run Events"
},
{
"id": "abort_events",
"title": "Abort Events"
},
{
"id": "state_update",
"title": "Toggle Scheduler"
}
],
"new_event_template": {
"enabled": 1,
"params": {},
"timing": {
"minutes": [
0
]
},
"max_children": 1,
"timeout": 3600,
"catch_up": 0,
"queue_max": 1000
}
},
"Storage": {
"engine": "Filesystem",
"list_page_size": 50,
"concurrency": 4,
"log_event_types": {
"get": 1,
"put": 1,
"head": 1,
"delete": 1,
"expire_set": 1
},
"transactions": true,
"trans_auto_recover": true,
"Filesystem": {
"base_dir": "data",
"key_namespaces": 1
}
},
"WebServer": {
"http_port": 3012,
"http_htdocs_dir": "htdocs",
"http_max_upload_size": 104857600,
"http_static_ttl": 3600,
"http_static_index": "index.html",
"http_server_signature": "Cronicle 1.0",
"http_gzip_text": true,
"http_timeout": 30,
"http_regex_json": "(text|javascript|js|json)",
"http_response_headers": {
"Access-Control-Allow-Origin": "*"
},
"https": false,
"https_port": 3013,
"https_cert_file": "conf/ssl.crt",
"https_key_file": "conf/ssl.key",
"https_force": false,
"https_timeout": 30,
"https_header_detect": {
"Front-End-Https": "^on$",
"X-Url-Scheme": "^https$",
"X-Forwarded-Protocol": "^https$",
"X-Forwarded-Proto": "^https$",
"X-Forwarded-Ssl": "^on$"
}
},
"User": {
"session_expire_days": 30,
"max_failed_logins_per_hour": 5,
"max_forgot_passwords_per_hour": 3,
"free_accounts": false,
"sort_global_users": true,
"use_bcrypt": true,
"email_templates": {
"welcome_new_user": "conf/emails/welcome_new_user.txt",
"changed_password": "conf/emails/changed_password.txt",
"recover_password": "conf/emails/recover_password.txt"
},
"default_privileges": {
"admin": 0,
"create_events": 1,
"edit_events": 1,
"delete_events": 1,
"run_events": 0,
"abort_events": 0,
"state_update": 0
}
}
}
version: '3.8'
x-logging: &default-logging
driver: "loki"
options:
loki-url: "http://localhost:3100/loki/api/v1/push"
services:
cronicle:
image: cronicle:0.9.73
container_name: cronicle
hostname: cronicle
environment:
- TZ=America/Guatemala
ports:
- 127.0.0.1:3012:3012
volumes:
- /tmp:/tmp
- /tools/aws:/tools/aws
- /var/run/docker.sock:/var/run/docker.sock
- ./data/cronicle/data:/opt/cronicle/data
- ./data/cronicle/logs:/opt/cronicle/logs
- ./data/cronicle/plugins:/opt/cronicle/plugins
- ./data/cronicle/workloads/app:/app
- ./data/cronicle/config.json:/opt/cronicle/conf/config.json
restart: always
healthcheck:
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider localhost:3012/api/app/ping || exit 1"]
interval: 5s
timeout: 1s
retries: 3
logging: *default-logging
FROM docker:27.0
LABEL maintainer="BlueT - Matthew Lien - 練喆明 <[email protected]>"
# Docker defaults
ENV CRONICLE_VERSION 0.9.73
ENV CRONICLE_base_app_url 'http://localhost:3012'
ENV CRONICLE_WebServer__http_port 3012
ENV CRONICLE_WebServer__https_port 443
ENV EDITOR=nano
RUN apk add --no-cache nodejs npm git curl wget perl bash perl-pathtools tar procps nano tini python3
RUN mkdir -p /opt/cronicle \
&& cd /opt/cronicle \
&& curl -L https://github.com/jhuckaby/Cronicle/archive/v${CRONICLE_VERSION}.tar.gz | tar zxvf - --strip-components 1 \
&& npm install \
&& node bin/build.js dist \
&& rm -Rf /root/.npm
# Runtime user
# RUN adduser cronicle -D -h /opt/cronicle
# RUN adduser cronicle docker
WORKDIR /opt/cronicle/
ADD entrypoint.sh /entrypoint.sh
EXPOSE 3012
# data volume is also configured in entrypoint.sh
VOLUME ["/opt/cronicle/data", "/opt/cronicle/logs", "/opt/cronicle/plugins"]
ENTRYPOINT ["/sbin/tini", "--"]
CMD ["sh", "/entrypoint.sh"]
#!/bin/bash
ROOT_DIR=/opt/cronicle
CONF_DIR=$ROOT_DIR/conf
BIN_DIR=$ROOT_DIR/bin
# DATA_DIR needs to be the same as the exposed Docker volume in Dockerfile
DATA_DIR=$ROOT_DIR/data
# LOGS_DIR needs to be the same as the exposed Docker volume in Dockerfile
LOGS_DIR=$ROOT_DIR/logs
# PLUGINS_DIR needs to be the same as the exposed Docker volume in Dockerfile
PLUGINS_DIR=$ROOT_DIR/plugins
# The env variables below are needed for Docker and cannot be overwritten
export CRONICLE_Storage__Filesystem__base_dir=${DATA_DIR}
export NODE_EXTRA_CA_CERTS=/etc/ssl/certs/ca-certificates.crt
export CRONICLE_echo=1
export CRONICLE_foreground=1
export CRONICLE_color=1
# Only run setup when setup needs to be done
if [ ! -f $DATA_DIR/.setup_done ]
then
cp $CONF_DIR/config.json $CONF_DIR/config.json.origin
if [ -f $DATA_DIR/config.json.import ]
then
# Move in custom configuration
cp $DATA_DIR/config.json.import $CONF_DIR/config.json
fi
$BIN_DIR/control.sh setup
# Create plugins directory
mkdir -p $PLUGINS_DIR
# Marking setup done
touch $DATA_DIR/.setup_done
fi
# Run cronicle with unprivileged user
# chown -R cronicle:cronicle data/ logs/
# remove old lock file. resolves #9
PID_FILE=$LOGS_DIR/cronicled.pid
if [ -f "$PID_FILE" ]; then
echo "Removing old PID file: $PID_FILE"
rm -f $PID_FILE
fi
if [ -n "$1" ];
then
# if [ "${1#-}" != "${1}" ] || [ -z "$(command -v "${1}")" ]; then
# set -- cronicle "$@"
# fi
exec "$@"
else
# exec su cronicle -c "/opt/cronicle/bin/control.sh start"
/opt/cronicle/bin/control.sh start
fi
#!/bin/bash
# Constants
S3_BUCKET="backups-01"
HOST_NAME="$(hostname)"
DATE=$(date +%F-%H%M%S) # Format as 'YYYY-MM-DD-HHMMSS'
MAX_BACKUPS=5
AWS_PROFILE="cloudflare-r2"
# AWS CLI command setup for Cloudflare R2
AWS_CLI="docker run --rm -v /tools/aws:/root/.aws -v $(pwd):/aws -v /tmp:/tmp -e AWS_PROFILE=${AWS_PROFILE} amazon/aws-cli"
R2_ENDPOINT="https://<cfr2id>.r2.cloudflarestorage.com"
# Function to upload to S3 and manage retention
upload_and_cleanup() {
temp_dir=$1
volume_name=$2
archive_name=$3
archive_path="${temp_dir}/${archive_name}"
echo "Archive path: ${archive_path}"
# Define S3 key
s3_key="${HOST_NAME}/docker/${volume_name}/${archive_name}"
echo "S3 key: ${s3_key}"
# Upload to S3
echo "Uploading ${archive_name} to S3..."
$AWS_CLI s3api put-object --bucket "${S3_BUCKET}" --key "${s3_key}" --body "${archive_path}" --endpoint-url ${R2_ENDPOINT} --checksum-algorithm CRC32
# Cleanup local file
rm -rf "${temp_dir}"
}
# Listing all Docker volumes with postgres in the name
volumes=$(docker volume ls -q | grep postgres)
for volume in $volumes; do
echo "Processing volume: ${volume}"
temp_dir=$(mktemp -d)
docker run --rm -v ${volume}:/volume -v ${temp_dir}:/backup busybox tar czvf /backup/${volume}-${DATE}.tar.gz -C /volume ./
# Fix permissions for the tar file
docker run --rm -v ${temp_dir}:/backup busybox chown $(id -u):$(id -g) /backup/${volume}-${DATE}.tar.gz
# Upload backups to S3
upload_and_cleanup $temp_dir $volume ${volume}-${DATE}.tar.gz
done
echo "Backup process completed."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment