2026-04-01 11:50:03 +03:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
set -euo pipefail
|
|
|
|
|
|
|
|
|
|
usage() {
|
|
|
|
|
echo "Usage: $0 --uri <uri> --db <dbname> --input <file> [--dry-run]"
|
|
|
|
|
echo ""
|
|
|
|
|
echo " --uri MongoDB connection URI (e.g. mongodb://user:pass@host:27017/admin)"
|
|
|
|
|
echo " --db Database name"
|
|
|
|
|
echo " --input Path to input JSON file"
|
|
|
|
|
echo " --dry-run Show planned actions without executing them"
|
|
|
|
|
exit 1
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
URI=""
|
|
|
|
|
DB=""
|
|
|
|
|
INPUT=""
|
|
|
|
|
DRY_RUN="false"
|
|
|
|
|
|
|
|
|
|
while [[ $# -gt 0 ]]; do
|
|
|
|
|
case "$1" in
|
|
|
|
|
--uri) URI="$2"; shift 2 ;;
|
|
|
|
|
--db) DB="$2"; shift 2 ;;
|
|
|
|
|
--input) INPUT="$2"; shift 2 ;;
|
|
|
|
|
--dry-run) DRY_RUN="true"; shift ;;
|
|
|
|
|
*) echo "Unknown argument: $1"; usage ;;
|
|
|
|
|
esac
|
|
|
|
|
done
|
|
|
|
|
|
|
|
|
|
[[ -z "$URI" ]] && echo "Error: --uri is required" && usage
|
|
|
|
|
[[ -z "$DB" ]] && echo "Error: --db is required" && usage
|
|
|
|
|
[[ -z "$INPUT" ]] && echo "Error: --input is required" && usage
|
|
|
|
|
|
|
|
|
|
[[ ! -f "$INPUT" ]] && echo "Error: input file not found: $INPUT" && exit 1
|
|
|
|
|
|
|
|
|
|
TMP_JS=$(mktemp /tmp/mongo-import-XXXXXX.js)
|
|
|
|
|
trap 'rm -f "$TMP_JS"' EXIT
|
|
|
|
|
|
|
|
|
|
cat > "$TMP_JS" <<'EOF'
|
|
|
|
|
const dbName = process.env.MONGO_DB;
|
|
|
|
|
const dryRun = process.env.DRY_RUN === "true";
|
|
|
|
|
const data = JSON.parse(process.env.INDEXES_JSON);
|
|
|
|
|
|
|
|
|
|
const targetDb = db.getSiblingDB(dbName);
|
|
|
|
|
const existingCollections = new Set(targetDb.getCollectionNames());
|
|
|
|
|
|
2026-04-01 13:01:55 +03:00
|
|
|
function normalizeKey(key) {
|
|
|
|
|
return JSON.stringify(key, Object.keys(key).sort());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function normalizeIndex(idx) {
|
|
|
|
|
const { v, ns, background, ...rest } = idx;
|
|
|
|
|
return JSON.stringify(rest, Object.keys(rest).sort());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function indexesEqual(a, b) {
|
|
|
|
|
return normalizeIndex(a) === normalizeIndex(b);
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-01 11:50:03 +03:00
|
|
|
if (dryRun) {
|
|
|
|
|
print("[dry-run] No changes will be made.\n");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (const [collName, indexes] of Object.entries(data)) {
|
2026-04-01 13:01:55 +03:00
|
|
|
print(`Collection: ${collName}`);
|
|
|
|
|
|
|
|
|
|
const coll = targetDb.getCollection(collName);
|
|
|
|
|
|
2026-04-01 11:50:03 +03:00
|
|
|
if (!existingCollections.has(collName)) {
|
|
|
|
|
if (dryRun) {
|
2026-04-01 13:01:55 +03:00
|
|
|
print(` [dry-run] Would create collection: ${collName}`);
|
2026-04-01 11:50:03 +03:00
|
|
|
} else {
|
|
|
|
|
targetDb.createCollection(collName);
|
2026-04-01 13:01:55 +03:00
|
|
|
print(` Created collection`);
|
2026-04-01 11:50:03 +03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-01 13:01:55 +03:00
|
|
|
const existingByName = {};
|
|
|
|
|
const existingByKey = {};
|
|
|
|
|
if (existingCollections.has(collName)) {
|
|
|
|
|
for (const idx of coll.getIndexes()) {
|
|
|
|
|
if (idx.name !== "_id_") {
|
|
|
|
|
existingByName[idx.name] = idx;
|
|
|
|
|
existingByKey[normalizeKey(idx.key)] = idx;
|
|
|
|
|
}
|
2026-04-01 11:50:03 +03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-01 13:01:55 +03:00
|
|
|
const consumedExistingNames = new Set();
|
|
|
|
|
|
2026-04-01 11:50:03 +03:00
|
|
|
for (const idx of indexes) {
|
|
|
|
|
if (idx.name === "_id_") continue;
|
|
|
|
|
|
|
|
|
|
const { key, name, ...rest } = idx;
|
|
|
|
|
const options = { name, ...rest };
|
|
|
|
|
|
2026-04-01 13:01:55 +03:00
|
|
|
if (existingByName[name]) {
|
|
|
|
|
consumedExistingNames.add(name);
|
|
|
|
|
if (indexesEqual(existingByName[name], idx)) {
|
|
|
|
|
print(` Index "${name}" is up to date, skipping`);
|
|
|
|
|
} else {
|
|
|
|
|
if (dryRun) {
|
|
|
|
|
print(` [dry-run] Would drop and recreate index "${name}" (definition changed)`);
|
|
|
|
|
} else {
|
|
|
|
|
coll.dropIndex(name);
|
|
|
|
|
coll.createIndex(key, options);
|
|
|
|
|
print(` Dropped and recreated index "${name}" (definition changed)`);
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-04-01 11:50:03 +03:00
|
|
|
} else {
|
2026-04-01 13:01:55 +03:00
|
|
|
const conflicting = existingByKey[normalizeKey(key)];
|
|
|
|
|
if (conflicting) {
|
|
|
|
|
consumedExistingNames.add(conflicting.name);
|
|
|
|
|
if (dryRun) {
|
|
|
|
|
print(` [dry-run] Would drop index "${conflicting.name}" and create "${name}" (same key, name changed)`);
|
|
|
|
|
} else {
|
|
|
|
|
coll.dropIndex(conflicting.name);
|
|
|
|
|
coll.createIndex(key, options);
|
|
|
|
|
print(` Dropped "${conflicting.name}" and created "${name}" (same key, name changed)`);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
if (dryRun) {
|
|
|
|
|
print(` [dry-run] Would create index "${name}"`);
|
|
|
|
|
} else {
|
|
|
|
|
coll.createIndex(key, options);
|
|
|
|
|
print(` Created index "${name}"`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (const name of Object.keys(existingByName)) {
|
|
|
|
|
if (!consumedExistingNames.has(name)) {
|
|
|
|
|
if (dryRun) {
|
|
|
|
|
print(` [dry-run] Would drop obsolete index "${name}"`);
|
|
|
|
|
} else {
|
|
|
|
|
coll.dropIndex(name);
|
|
|
|
|
print(` Dropped obsolete index "${name}"`);
|
|
|
|
|
}
|
2026-04-01 11:50:03 +03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!dryRun) {
|
|
|
|
|
print("\nImport complete.");
|
|
|
|
|
}
|
|
|
|
|
EOF
|
|
|
|
|
|
|
|
|
|
MONGO_DB="$DB" \
|
|
|
|
|
DRY_RUN="$DRY_RUN" \
|
|
|
|
|
INDEXES_JSON="$(cat "$INPUT")" \
|
|
|
|
|
mongosh --quiet "$URI" --file "$TMP_JS"
|