Exporting data in the right format is a daily database operations task — feeding analytics pipelines, delivering reports, exchanging data with partners, and creating snapshots. Shell scripts that combine database clients with format tools produce cleaner, more auditable exports than application-level code.
1
CSV exports from MySQL, PostgreSQL, and SQLite
BASH
# ── MySQL to CSV ──────────────────────────────────────────
# Method 1: MySQL client with CSV-friendly flags
mysql --defaults-file=/etc/myapp/mysql.conf myapp -BNs <<SQL | sed 's/ /,/g'
SELECT id,name,email,created_at FROM users WHERE active=1;
SQL
# Method 2: MySQL SELECT INTO OUTFILE (runs on DB server)
mysql --defaults-file=/etc/myapp/mysql.conf myapp <<SQL
SELECT id,name,email
INTO OUTFILE '/tmp/users_export.csv'
FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '
'
FROM users WHERE active=1;
SQL
# Method 3: mysqldump to CSV via tab + conversion
mysqldump --defaults-file=/etc/myapp/mysql.conf --tab=/tmp/export/ myapp users
sed 's/ /,/g; s/
//' /tmp/export/users.txt > users.csv
# ── PostgreSQL to CSV ─────────────────────────────────────
PGPASSWORD="${DB_PASS}" psql -h "${DB_HOST}" -U "${DB_USER}" -d myapp -c "\COPY (SELECT id,name,email FROM users WHERE active=true) TO '/tmp/users.csv' CSV HEADER"
# Or streaming via stdout (no server filesystem access needed)
PGPASSWORD="${DB_PASS}" psql -h "${DB_HOST}" -U "${DB_USER}" -d myapp --csv -c "SELECT id,name,email FROM users WHERE active=true" > users.csv
# ── SQLite to CSV ─────────────────────────────────────────
sqlite3 -csv -header mydb.db "SELECT id,name,email FROM users WHERE active=1" > users.csv
# ── Add timestamp to export filename ─────────────────────
EXPORT="users_$(date +%Y%m%d_%H%M%S).csv"
sqlite3 -csv -header mydb.db "SELECT * FROM users" > "${EXPORT}"
gzip "${EXPORT}"
echo "Exported: ${EXPORT}.gz ($(du -sh "${EXPORT}.gz" | cut -f1))"
2
JSON exports and transformations
BASH
# ── MySQL to JSON (manual construction) ───────────────────
mysql --defaults-file=/etc/myapp/mysql.conf myapp -BNs <<SQL | SELECT CONCAT('{"id":', id, ',"name":"', name, '","email":"', email, '"}')
FROM users WHERE active=1;
SQL
jq -s '.' > users.json
# ── PostgreSQL native JSON output ─────────────────────────
PGPASSWORD="${DB_PASS}" psql -h "${DB_HOST}" -U "${DB_USER}" -d myapp -tAc "SELECT json_agg(row_to_json(u)) FROM (SELECT id,name,email FROM users WHERE active=true) u" | jq '.' > users.json
# ── SQLite native JSON output ─────────────────────────────
sqlite3 -json mydb.db "SELECT id,name,email FROM users WHERE active=1" > users.json
# ── NDJSON (newline-delimited JSON) for streaming ─────────
mysql --defaults-file=/etc/myapp/mysql.conf -BNs myapp -e "SELECT id,name,email FROM users" | awk -F' ' '{printf "{"id":%s,"name":"%s","email":"%s"}
",$1,$2,$3}' > users.ndjson
# ── CSV to JSON with jq ────────────────────────────────────
# Reads CSV with header and converts each row to a JSON object
cat users.csv | python3 -c "
import csv, json, sys
r = csv.DictReader(sys.stdin)
print(json.dumps(list(r), indent=2))
" > users.json
3
XML export and validation
BASH
# ── Generate XML from database rows ──────────────────────
mysql --defaults-file=/etc/myapp/mysql.conf -BNs myapp -e "SELECT id,name,email FROM users WHERE active=1" | awk -F' ' '
BEGIN { print "<?xml version="1.0" encoding="UTF-8"?>"; print "<users>" }
{ printf " <user id="%s"><name>%s</name><email>%s</email></user>
",$1,$2,$3 }
END { print "</users>" }' > users.xml
# ── Validate XML with xmllint ─────────────────────────────
xmllint --noout users.xml 2>&1 && echo "Valid XML" || echo "Invalid XML"
# ── Pretty-print XML ──────────────────────────────────────
xmllint --format users.xml > users_formatted.xml
# ── Extract values from XML with xmllint XPath ────────────
xmllint --xpath "//user[@id='42']/name/text()" users.xml
# ── Complete export script with format selection ──────────
export_users() {
local format="${1:-csv}"
case "${format}" in
csv) sqlite3 -csv -header mydb.db "SELECT * FROM users" ;;
json) sqlite3 -json mydb.db "SELECT * FROM users" ;;
xml) sqlite3 -separator $' ' mydb.db "SELECT id,name FROM users" | awk -F' ' 'BEGIN{print "<users>"} {printf " <user id="%s">%s</user>
",$1,$2} END{print "</users>"}' ;;
*) echo "Unknown format: ${format}"; return 1 ;;
esac
}
✔ Export rules — For MySQL CSV, use
-BNs with tab output then convert with sed 's/\t/,/g', or use SELECT INTO OUTFILE for large tables. For PostgreSQL, \COPY ... TO STDOUT CSV HEADER streams directly without touching server filesystem. Always compress exports with gzip immediately. Validate JSON with jq empty and XML with xmllint --noout before transferring to consumers.