diff --git a/.woodpecker/db.yml b/.woodpecker/db.yml index fef1dbe..8edb654 100644 --- a/.woodpecker/db.yml +++ b/.woodpecker/db.yml @@ -48,4 +48,5 @@ steps: - . ./ci/prod/.env.runtime - . ./.env.version - set +a - - bash ci/prod/scripts/deploy-db.sh + - bash ci/prod/scripts/bootstrap/network.sh + - bash ci/prod/scripts/deploy/db.sh diff --git a/.woodpecker/nats.yml b/.woodpecker/nats.yml new file mode 100644 index 0000000..0f6a620 --- /dev/null +++ b/.woodpecker/nats.yml @@ -0,0 +1,52 @@ +when: + - event: push + branch: main + +steps: + - name: version + image: alpine:latest + commands: + - set -euo pipefail + - apk add --no-cache git + - GIT_REV="$(git rev-parse --short HEAD)" + - BUILD_BRANCH="$(git rev-parse --abbrev-ref HEAD)" + - APP_V="$(cat version)" + - printf "GIT_REV=%s\nBUILD_BRANCH=%s\nAPP_V=%s\n" "$GIT_REV" "$BUILD_BRANCH" "$APP_V" | tee .env.version + + - name: secrets + image: alpine:latest + depends_on: [ version ] + environment: + VAULT_ADDR: https://vault.sendico.io + VAULT_ROLE_ID: { from_secret: VAULT_APP_ROLE } + VAULT_SECRET_ID: { from_secret: VAULT_SECRET_ID } + commands: + - set -euo pipefail + - apk add --no-cache bash coreutils openssh-keygen curl sed python3 + - mkdir -p secrets + - ./ci/vlt kv_to_file kv ops/deploy/ssh_key private_b64 secrets/SSH_KEY.b64 600 + - base64 -d secrets/SSH_KEY.b64 > secrets/SSH_KEY + - chmod 600 secrets/SSH_KEY + - ssh-keygen -y -f secrets/SSH_KEY >/dev/null + + - name: deploy + image: alpine:latest + depends_on: [ secrets ] + environment: + VAULT_ADDR: https://vault.sendico.io + VAULT_ROLE_ID: { from_secret: VAULT_APP_ROLE } + VAULT_SECRET_ID: { from_secret: VAULT_SECRET_ID } + commands: + - set -euo pipefail + - apk add --no-cache bash openssh-client rsync coreutils curl sed python3 + - mkdir -p /root/.ssh + - install -m 600 secrets/SSH_KEY /root/.ssh/id_rsa + - sed -i 's/\r$//' ./ci/prod/.env.runtime + - set -a + - . ./ci/prod/.env.runtime + - . ./.env.version + - set +a + - export NATS_USER="$(./ci/vlt kv_get kv sendico/nats user)" + - export NATS_PASSWORD="$(./ci/vlt kv_get kv sendico/nats password)" + - bash ci/prod/scripts/bootstrap/network.sh + - bash ci/prod/scripts/deploy/nats.sh diff --git a/ci/prod/.env.runtime b/ci/prod/.env.runtime index fdd4852..4f236a9 100644 --- a/ci/prod/.env.runtime +++ b/ci/prod/.env.runtime @@ -14,4 +14,11 @@ PBM_S3_BUCKET=backup SSH_HOST=178.57.67.248 SSH_USER=cloud REMOTE_BASE=/srv/sendico -DB_DIR=db \ No newline at end of file +DB_DIR=db + +# NATS deployment +NATS_DIR=nats +NATS_HOST=sendico-nats +NATS_PORT=4222 +NATS_MONITORING_PORT=8222 +NATS_PROMETHEUS_PORT=7777 diff --git a/ci/prod/compose/db.yml b/ci/prod/compose/db.yml index 4219a5f..3ce3870 100644 --- a/ci/prod/compose/db.yml +++ b/ci/prod/compose/db.yml @@ -23,6 +23,11 @@ volumes: device: tmpfs o: size=16m,uid=0,gid=0,mode=0750 +networks: + sendico-net: + external: true + name: sendico-net + services: vault-agent-sendico: <<: *common-env @@ -56,6 +61,8 @@ services: timeout: 3s retries: 30 start_period: 5s + networks: + - sendico-net sendico_db1: <<: *common-env @@ -80,6 +87,8 @@ services: retries: 10 start_period: 30s ports: [ "0.0.0.0:${MONGO_PORT}:${MONGO_PORT}" ] + networks: + - sendico-net sendico_db2: <<: *common-env @@ -103,6 +112,8 @@ services: timeout: 5s retries: 10 start_period: 30s + networks: + - sendico-net sendico_db3: <<: *common-env @@ -126,6 +137,8 @@ services: timeout: 5s retries: 10 start_period: 30s + networks: + - sendico-net mongo_setup: <<: *common-env @@ -158,6 +171,8 @@ services: EOJS ' restart: "no" + networks: + - sendico-net pbm-agent-1: <<: *common-env @@ -180,6 +195,8 @@ services: export PBM_MONGODB_URI="mongodb://$${U}:$${P}@sendico_db1:${MONGO_PORT}/?authSource=${MONGO_AUTH_SOURCE}&replicaSet=${MONGO_REPLICA_SET}" exec pbm-agent --config=/etc/backup/pbm-config.yaml ' + networks: + - sendico-net pbm-agent-2: <<: *common-env @@ -202,6 +219,8 @@ services: export PBM_MONGODB_URI="mongodb://$${U}:$${P}@sendico_db2:${MONGO_PORT}/?authSource=${MONGO_AUTH_SOURCE}&replicaSet=${MONGO_REPLICA_SET}" exec pbm-agent --config=/etc/backup/pbm-config.yaml ' + networks: + - sendico-net pbm-agent-3: <<: *common-env @@ -224,3 +243,5 @@ services: export PBM_MONGODB_URI="mongodb://$${U}:$${P}@sendico_db3:${MONGO_PORT}/?authSource=${MONGO_AUTH_SOURCE}&replicaSet=${MONGO_REPLICA_SET}" exec pbm-agent --config=/etc/backup/pbm-config.yaml ' + networks: + - sendico-shared diff --git a/ci/prod/compose/nats.yml b/ci/prod/compose/nats.yml new file mode 100644 index 0000000..09eeffc --- /dev/null +++ b/ci/prod/compose/nats.yml @@ -0,0 +1,71 @@ +# Compose v2 - NATS stack + +x-common-env: &common-env + env_file: + - ../env/.env.runtime + +volumes: + nats_data: {} + +networks: + sendico-net: + external: true + name: sendico-net + +services: + sendico_nats: + <<: *common-env + image: docker.io/library/nats:latest + container_name: sendico-nats + restart: unless-stopped + command: + - --jetstream + - --http_port=${NATS_MONITORING_PORT} + - --server_name=sendico-nats + - --user=${NATS_USER} + - --pass=${NATS_PASSWORD} + - --port=${NATS_PORT} + - --store_dir=/data + environment: + NATS_SERVER_OPTS: --jetstream + volumes: + - nats_data:/data + ports: + - "0.0.0.0:${NATS_PORT}:${NATS_PORT}" + - "0.0.0.0:${NATS_MONITORING_PORT}:${NATS_MONITORING_PORT}" + healthcheck: + test: ["CMD-SHELL","wget -qO- http://localhost:${NATS_MONITORING_PORT}/healthz >/dev/null"] + interval: 15s + timeout: 5s + retries: 5 + start_period: 10s + networks: + - sendico-net + + sendico_nats_exporter: + <<: *common-env + image: natsio/prometheus-nats-exporter:latest + container_name: sendico-nats-exporter + restart: unless-stopped + depends_on: + sendico_nats: + condition: service_healthy + environment: + NATS_URL: http://sendico-nats:${NATS_MONITORING_PORT} + ports: + - "0.0.0.0:${NATS_PROMETHEUS_PORT}:${NATS_PROMETHEUS_PORT}" + command: + - -varz + - -connz + - -routez + - -subz + - -leafz + - -gatewayz + - -healthz + - -accstatz + - -jsz=all + - -addr=0.0.0.0 + - -port=${NATS_PROMETHEUS_PORT} + - http://sendico-nats:${NATS_MONITORING_PORT} + networks: + - sendico-net diff --git a/ci/prod/scripts/bootstrap/network.sh b/ci/prod/scripts/bootstrap/network.sh new file mode 100755 index 0000000..5077f15 --- /dev/null +++ b/ci/prod/scripts/bootstrap/network.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +set -euo pipefail +[[ "${DEBUG_DEPLOY:-0}" = "1" ]] && set -x +trap 'echo "[bootstrap-shared-network] error at line $LINENO" >&2' ERR + +: "${SSH_USER:?missing SSH_USER}" +: "${SSH_HOST:?missing SSH_HOST}" + +REMOTE_TARGET="${SSH_USER}@${SSH_HOST}" +DOCKER_SHARED_NETWORK="${DOCKER_SHARED_NETWORK:-sendico-net}" + +SSH_OPTS=( + -i /root/.ssh/id_rsa + -o StrictHostKeyChecking=no + -o UserKnownHostsFile=/dev/null + -o LogLevel=ERROR + -q +) +if [[ "${DEBUG_DEPLOY:-0}" = "1" ]]; then + SSH_OPTS=("${SSH_OPTS[@]/-q/}" -vv) +fi + +ssh "${SSH_OPTS[@]}" "$REMOTE_TARGET" \ + DOCKER_SHARED_NETWORK="$DOCKER_SHARED_NETWORK" bash -s <<'EOSSH' +set -euo pipefail +docker network inspect "$DOCKER_SHARED_NETWORK" >/dev/null 2>&1 || \ + docker network create "$DOCKER_SHARED_NETWORK" +EOSSH diff --git a/ci/prod/scripts/deploy-db.sh b/ci/prod/scripts/deploy/db.sh similarity index 96% rename from ci/prod/scripts/deploy-db.sh rename to ci/prod/scripts/deploy/db.sh index 67385cb..c819f3b 100755 --- a/ci/prod/scripts/deploy-db.sh +++ b/ci/prod/scripts/deploy/db.sh @@ -49,12 +49,11 @@ ssh "${SSH_OPTS[@]}" "$REMOTE_TARGET" \ set -euo pipefail cd "${REMOTE_DIR}/compose" set -a; . ../env/.env.runtime; set +a - # Run with ephemeral AppRole env (scoped only to these commands) VAULT_ROLE_ID="${VAULT_ROLE_ID}" VAULT_SECRET_ID="${VAULT_SECRET_ID}" docker compose -f db.yml pull --quiet 2>/dev/null || \ VAULT_ROLE_ID="${VAULT_ROLE_ID}" VAULT_SECRET_ID="${VAULT_SECRET_ID}" docker compose -f db.yml pull -VAULT_ROLE_ID="${VAULT_ROLE_ID}" VAULT_SECRET_ID="${VAULT_SECRET_ID}" docker compose -f db.yml up -d --remove-orphans --force-recreate +VAULT_ROLE_ID="${VAULT_ROLE_ID}" VAULT_SECRET_ID="${VAULT_SECRET_ID}" docker compose -f db.yml up -d --remove-orphans docker compose -f db.yml ps date -Is > .last_deploy diff --git a/ci/prod/scripts/deploy/nats.sh b/ci/prod/scripts/deploy/nats.sh new file mode 100755 index 0000000..0f0668c --- /dev/null +++ b/ci/prod/scripts/deploy/nats.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +set -euo pipefail +[[ "${DEBUG_DEPLOY:-0}" = "1" ]] && set -x +trap 'echo "[deploy-nats] error at line $LINENO" >&2' ERR + +: "${REMOTE_BASE:?missing REMOTE_BASE}" +: "${NATS_DIR:?missing NATS_DIR}" +: "${SSH_USER:?missing SSH_USER}" +: "${SSH_HOST:?missing SSH_HOST}" +: "${NATS_USER:?missing NATS_USER}" +: "${NATS_PASSWORD:?missing NATS_PASSWORD}" + +REMOTE_DIR="${REMOTE_BASE%/}/${NATS_DIR}" +REMOTE_TARGET="${SSH_USER}@${SSH_HOST}" + +SSH_OPTS=( + -i /root/.ssh/id_rsa + -o StrictHostKeyChecking=no + -o UserKnownHostsFile=/dev/null + -o LogLevel=ERROR + -q +) +if [[ "${DEBUG_DEPLOY:-0}" = "1" ]]; then + SSH_OPTS=("${SSH_OPTS[@]/-q/}" -vv) +fi + +RSYNC_FLAGS=(-az --delete) +[[ "${DEBUG_DEPLOY:-0}" = "1" ]] && RSYNC_FLAGS=(-avz --delete) + +ssh "${SSH_OPTS[@]}" "$REMOTE_TARGET" "mkdir -p ${REMOTE_DIR}/{compose,env}" + +rsync "${RSYNC_FLAGS[@]}" -e "ssh ${SSH_OPTS[*]}" ci/prod/compose/ "$REMOTE_TARGET:${REMOTE_DIR}/compose/" +rsync "${RSYNC_FLAGS[@]}" -e "ssh ${SSH_OPTS[*]}" ci/prod/.env.runtime "$REMOTE_TARGET:${REMOTE_DIR}/env/.env.runtime" + +ssh "${SSH_OPTS[@]}" "$REMOTE_TARGET" \ + REMOTE_DIR="$REMOTE_DIR" \ + NATS_USER="$NATS_USER" \ + NATS_PASSWORD="$NATS_PASSWORD" \ + bash -s <<'EOSSH' +set -euo pipefail +cd "${REMOTE_DIR}/compose" +set -a; . ../env/.env.runtime; set +a +: "${NATS_USER:?missing NATS_USER}" +: "${NATS_PASSWORD:?missing NATS_PASSWORD}" +export NATS_USER NATS_PASSWORD + +docker compose -f nats.yml pull --quiet 2>/dev/null || docker compose -f nats.yml pull +docker compose -f nats.yml up -d --remove-orphans + +docker compose -f nats.yml ps +date -Is > .last_deploy +logger -t deploy-nats "nats deployed at $(date -Is) in ${REMOTE_DIR}" +EOSSH