REST APIs are the backbone of modern infrastructure — cloud providers, monitoring platforms, communication tools, and external services all expose APIs. Shell scripts that interact with APIs via curl, parse JSON with jq, and handle authentication and rate limiting give you programmatic access to any external service.
1Robust API client in bash
BASH
#!/usr/bin/env bash
# api_client.sh — Robust REST API client with auth, retry, and rate limiting
set -euo pipefail
# ── Configuration ─────────────────────────────────────────
API_BASE="${API_URL:?API_URL required}"
API_KEY="${API_KEY:?API_KEY required}"
API_TIMEOUT=30
API_MAX_RETRIES=3
API_RATE_LIMIT_FILE="/tmp/.api_rate_$(echo "${API_BASE}" | md5sum | cut -c1-8)"
# ── Rate limiting tracker ─────────────────────────────────
rate_limit_check() {
local max_per_minute="${1:-60}"
local now; now=$(date +%s)
local window_start=$(( now - 60 ))
# Load request timestamps from file
local requests=()
[[ -f "${API_RATE_LIMIT_FILE}" ]] && \
mapfile -t requests < "${API_RATE_LIMIT_FILE}"
# Filter to last 60 seconds
local recent=()
for ts in "${requests[@]}"; do
(( ts >= window_start )) && recent+=("${ts}")
done
# Check rate
if (( ${#recent[@]} >= max_per_minute )); then
local oldest="${recent[0]}"
local wait=$(( 60 - (now - oldest) + 1 ))
echo " Rate limit: waiting ${wait}s" >&2
sleep "${wait}"
recent=()
fi
# Record this request
recent+=("${now}")
printf '%s\n' "${recent[@]}" > "${API_RATE_LIMIT_FILE}"
}
# ── Core API call function ────────────────────────────────
api_call() {
local method="${1:-GET}"
local endpoint="${2:?endpoint required}"
local data="${3:-}"
local attempt=0
while (( attempt < API_MAX_RETRIES )); do
(( attempt++ ))
rate_limit_check 60
local args=(
-s -f
--max-time "${API_TIMEOUT}"
-H "Authorization: Bearer ${API_KEY}"
-H "Content-Type: application/json"
-H "Accept: application/json"
-w '\n{"_http_code":%{http_code}}'
-X "${method}"
"${API_BASE}${endpoint}"
)
[[ -n "${data}" ]] && args+=(-d "${data}")
local response; response=$(curl "${args[@]}" 2>/dev/null)
local body; body=$(echo "${response}" | head -n-1)
local http_code; http_code=$(echo "${response}" | tail -1 | jq -r '._http_code')
case "${http_code}" in
200|201|204) echo "${body}"; return 0 ;;
429) # Rate limited by server
local retry_after; retry_after=$(curl -sI "${API_BASE}${endpoint}" \
-H "Authorization: Bearer ${API_KEY}" | \
grep -i "Retry-After:" | awk '{print $2}' | tr -d '\r')
sleep "${retry_after:-30}"
continue ;;
401) echo "API authentication failed" >&2; return 1 ;;
404) echo "Not found: ${endpoint}" >&2; return 1 ;;
5??) echo "Server error ${http_code} — retry ${attempt}/${API_MAX_RETRIES}" >&2
sleep $(( attempt * 5 )); continue ;;
*) echo "Unexpected HTTP ${http_code}" >&2; return 1 ;;
esac
done
echo "API call failed after ${API_MAX_RETRIES} attempts" >&2
return 1
}
# ── Convenience wrappers ──────────────────────────────────
api_get() { api_call GET "$1"; }
api_post() { api_call POST "$1" "$2"; }
api_put() { api_call PUT "$1" "$2"; }
api_del() { api_call DELETE "$1"; }
# ── Example: GitHub API integration ───────────────────────
github_create_release() {
local repo="$1" tag="$2" title="$3" notes="$4"
local payload
payload=$(jq -n \
--arg tag_name "${tag}" \
--arg name "${title}" \
--arg body "${notes}" \
'{tag_name:$tag_name, name:$name, body:$body, draft:false}')
API_BASE="https://api.github.com"
API_KEY="${GITHUB_TOKEN}"
local result; result=$(api_post "/repos/${repo}/releases" "${payload}")
echo "${result}" | jq -r '.html_url'
}
# ── Example: PagerDuty alert ──────────────────────────────
send_pagerduty_alert() {
local summary="$1" severity="${2:-critical}"
API_BASE="https://events.pagerduty.com"
API_KEY="${PAGERDUTY_ROUTING_KEY}"
api_post "/v2/enqueue" \
"$(jq -n --arg s "${summary}" --arg sev "${severity}" \
'{routing_key:"'"${PAGERDUTY_ROUTING_KEY}"'",
event_action:"trigger",
payload:{summary:$s,severity:$sev,source:"'"$(hostname)"'"}}')"
}
2Practical API integrations
BASH
#!/usr/bin/env bash
# api_integrations.sh — Common API integration patterns
# ── Cloudflare: purge cache after deploy ──────────────────
cf_purge_cache() {
local zone_id="${CF_ZONE_ID}"
local urls=("$@")
curl -s -X POST \
"https://api.cloudflare.com/client/v4/zones/${zone_id}/purge_cache" \
-H "Authorization: Bearer ${CF_API_TOKEN}" \
-H "Content-Type: application/json" \
-d "$(jq -n --argjson urls "$(printf '%s\n' "${urls[@]}" | jq -R . | jq -s .)" \
'{files: $urls}')" | jq -r '.success'
}
# ── Slack: file upload ────────────────────────────────────
slack_upload_file() {
local channel="${1}" file="${2}" title="${3:-Report}"
curl -s \
-F "file=@${file}" \
-F "channels=${channel}" \
-F "title=${title}" \
-H "Authorization: Bearer ${SLACK_BOT_TOKEN}" \
"https://slack.com/api/files.upload" | jq -r '.ok'
}
# ── AWS S3: upload backup ─────────────────────────────────
s3_upload() {
local local_file="$1" s3_path="$2"
aws s3 cp "${local_file}" "s3://${S3_BUCKET}/${s3_path}" \
--storage-class STANDARD_IA \
--sse AES256 \
--quiet
echo "Uploaded: s3://${S3_BUCKET}/${s3_path}"
}
# ── Datadog: send custom metric ───────────────────────────
dd_metric() {
local metric="$1" value="$2" tags="${3:-host:$(hostname)}"
curl -s -X POST "https://api.datadoghq.com/api/v1/series" \
-H "DD-API-KEY: ${DD_API_KEY}" \
-H "Content-Type: application/json" \
-d "$(jq -n \
--arg m "${metric}" \
--argjson v "${value}" \
--arg t "${tags}" \
'{series:[{metric:$m,points:[[now,.[$v]]],tags:[$t]}]}')" \
| jq -r '.status'
}
# After each deploy:
cf_purge_cache "https://myapp.example.com/" "https://myapp.example.com/api"
slack_upload_file "#deployments" "/tmp/deploy_log.txt" "Deploy Log $(date +%Y-%m-%d)"
s3_upload "/backups/myapp_$(date +%Y%m%d).sql.gz" "backups/mysql/"
dd_metric "myapp.deploy.duration_seconds" "117" "env:production"
✔ API integration rules — Always handle rate limiting — both client-side (track your own request rate) and server-side (respect Retry-After headers). Use
jq for JSON construction and parsing — never build JSON by string concatenation. Add retry logic with exponential backoff for 5xx errors. Store API keys in environment variables or a secrets manager, never in scripts. Log every API call with timestamp and response code for debugging.