backup_tfstate.sh
1#!/bin/bash
2#
3# Terraform Cloud State Backup
4# Downloads all tfstate files from an organization's workspaces
5# Usage: ./backup-tfstate.sh <API_TOKEN> <ORG_NAME> [MAX_PARALLEL]
6#
7if [ $# -lt 2 ]; then
8 echo "Usage: $0 <API_TOKEN> <ORG_NAME> [MAX_PARALLEL]"
9 exit 1
10fi
11command -v jq >/dev/null || { echo "Error: jq is required"; exit 1; }
12API_TOKEN="$1"
13ORG_NAME="$2"
14MAX_PARALLEL="${3:-10}"
15API_BASE="https://app.terraform.io/api/v2"
16BACKUP_DIR="./tfstate-backups-${ORG_NAME}-$(date +%Y%m%d-%H%M%S)"
17mkdir -p "$BACKUP_DIR"
18RESULTS_FILE="$BACKUP_DIR/.results"
19WORKSPACES_FILE="$BACKUP_DIR/.workspaces"
20> "$RESULTS_FILE"
21> "$WORKSPACES_FILE"
22echo "Fetching workspaces for: $ORG_NAME"
23page=1
24while true; do
25 url="${API_BASE}/organizations/${ORG_NAME}/workspaces?page%5Bnumber%5D=${page}&page%5Bsize%5D=100"
26 response=$(curl -s -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/vnd.api+json" "$url")
27
28 echo "$response" | jq -r '.data[] | "\(.id)\t\(.attributes.name)"' >> "$WORKSPACES_FILE"
29
30 next=$(echo "$response" | jq -r '.meta.pagination["next-page"] // empty')
31 [ -z "$next" ] && break
32 page=$next
33done
34total=$(wc -l < "$WORKSPACES_FILE" | tr -d ' ')
35echo "Found $total workspaces"
36echo "Downloading states (max $MAX_PARALLEL parallel)..."
37echo ""
38download_state() {
39 local ws_id="$1"
40 local ws_name="$2"
41 local safe_name=$(echo "$ws_name" | tr '/ ' '__')
42 local output_file="${BACKUP_DIR}/${ORG_NAME}_${safe_name}.tfstate"
43 local tmp_file="/tmp/state_${ws_id}.json"
44
45 local state_http=$(curl -s -o "$tmp_file" -w "%{http_code}" \
46 -H "Authorization: Bearer $API_TOKEN" -H "Content-Type: application/vnd.api+json" \
47 "${API_BASE}/workspaces/${ws_id}/current-state-version")
48
49 if [ "$state_http" != "200" ]; then
50 echo "[skip:$state_http] $ws_name"
51 echo "skip" >> "$RESULTS_FILE"
52 rm -f "$tmp_file"
53 return
54 fi
55
56 local download_url=$(jq -r '.data.attributes["hosted-state-download-url"] // empty' "$tmp_file")
57 rm -f "$tmp_file"
58
59 if [ -z "$download_url" ]; then
60 echo "[skip:no-url] $ws_name"
61 echo "skip" >> "$RESULTS_FILE"
62 return
63 fi
64
65 local http_code=$(curl -s -L -o "$output_file" -w "%{http_code}" -H "Authorization: Bearer $API_TOKEN" "$download_url")
66
67 if [ "$http_code" = "200" ]; then
68 echo "[done] $ws_name"
69 echo "success" >> "$RESULTS_FILE"
70 else
71 echo "[fail:$http_code] $ws_name"
72 echo "fail" >> "$RESULTS_FILE"
73 rm -f "$output_file"
74 fi
75}
76export -f download_state
77export API_TOKEN API_BASE BACKUP_DIR RESULTS_FILE ORG_NAME
78while IFS=$'\t' read -r ws_id ws_name; do
79 download_state "$ws_id" "$ws_name" &
80
81 while [ $(jobs -r | wc -l) -ge "$MAX_PARALLEL" ]; do
82 sleep 0.1
83 done
84done < "$WORKSPACES_FILE"
85wait
86success_count=$(grep -c "success" "$RESULTS_FILE" 2>/dev/null) || success_count=0
87skip_count=$(grep -c "skip" "$RESULTS_FILE" 2>/dev/null) || skip_count=0
88fail_count=$(grep -c "fail" "$RESULTS_FILE" 2>/dev/null) || fail_count=0
89echo ""
90echo "Complete: $BACKUP_DIR"
91echo " Total: $total"
92echo " Success: $success_count"
93echo " Skipped: $skip_count"
94echo " Failed: $fail_count"
95rm -f "$RESULTS_FILE" "$WORKSPACES_FILE"