Last active
May 24, 2025 09:03
-
-
Save Kareszrk/41757f7d0e49b99383705c301770eb28 to your computer and use it in GitHub Desktop.
Simple S3 Object Storage File Migrator
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { S3Client, ListObjectsV2Command, GetObjectCommand, PutObjectCommand } from "@aws-sdk/client-s3"; | |
import { Readable } from "stream"; | |
const s3 = new S3Client({ | |
region: 'nbg1', | |
endpoint: 'https://nbg1.your-objectstorage.com', // changed endpoint to match SSL cert | |
s3ForcePathStyle: true, | |
credentials: { | |
accessKeyId: 'ACCESS_KEY', | |
secretAccessKey: 'SECRET_ACCESS_KEY' | |
} | |
}); // e.g. "us-east-1" | |
// Destination S3 client config (replace with your destination credentials and endpoint) | |
const destS3 = new S3Client({ | |
region: 'nbg1', // change as needed | |
endpoint: 'https://nbg1.your-objectstorage.com', // change as needed | |
s3ForcePathStyle: true, | |
credentials: { | |
accessKeyId: 'ACCESS_KEY', | |
secretAccessKey: 'SECRET_ACCESS_KEY' | |
} | |
}); | |
async function listAllFiles(bucketName, prefix = "") { | |
let isTruncated = true; | |
let continuationToken = undefined; | |
const allFiles = []; | |
while (isTruncated) { | |
const command = new ListObjectsV2Command({ | |
Bucket: bucketName, | |
Prefix: prefix, // optional | |
ContinuationToken: continuationToken, | |
}); | |
const response = await s3.send(command); | |
if (response.Contents) { | |
allFiles.push(...response.Contents.map(item => item.Key)); | |
} | |
isTruncated = response.IsTruncated; | |
continuationToken = response.NextContinuationToken; | |
} | |
return allFiles; | |
} | |
async function streamToBuffer(stream) { | |
return new Promise((resolve, reject) => { | |
const chunks = []; | |
stream.on('data', chunk => chunks.push(chunk)); | |
stream.on('end', () => resolve(Buffer.concat(chunks))); | |
stream.on('error', reject); | |
}); | |
} | |
async function copyFileBetweenS3(sourceBucket, destBucket, key) { | |
// Get the object from the source S3 | |
const getObj = await s3.send(new GetObjectCommand({ Bucket: sourceBucket, Key: key })); | |
// Buffer the body | |
const bodyBuffer = await streamToBuffer(getObj.Body); | |
// Prepare upload params | |
const uploadParams = { | |
Bucket: destBucket, | |
Key: key, | |
Body: bodyBuffer | |
}; | |
if (getObj.ContentLength !== undefined) { | |
uploadParams.ContentLength = getObj.ContentLength; | |
} | |
if (getObj.ContentType) { | |
uploadParams.ContentType = getObj.ContentType; | |
} | |
await destS3.send(new PutObjectCommand(uploadParams)); | |
console.log(`Copied: ${key}`); | |
} | |
async function migrateAllFiles(sourceBucket, destBucket) { | |
const files = await listAllFiles(sourceBucket); | |
for (const key of files) { | |
await copyFileBetweenS3(sourceBucket, destBucket, key); | |
} | |
console.log('Migration complete!'); | |
} | |
// Example usage | |
migrateAllFiles("old-bucket-name", "new-bucket-name"); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment