Node.js Quickstart
Learn how to use Node.js with Rabata.io for managing your object storage using the AWS SDK for JavaScript.
Installation
To use Rabata.io with Node.js, you’ll need to install the AWS SDK for JavaScript.
Install AWS SDK
$ npm install @aws-sdk/client-s3
$ npm install @aws-sdk/s3-request-presigner
For a new project, you can initialize it first:
$ mkdir my-rabata-project
$ cd my-rabata-project
$ npm init -y
$ npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner
Configuration
There are several ways to configure the AWS SDK to work with Rabata.io.
Method 1: Using AWS Credentials File
If you’ve already configured the AWS CLI as shown in the AWS CLI Quickstart, the SDK will automatically use those credentials.
Method 2: Explicit Configuration in Code
You can explicitly configure the S3 client in your code:
const { S3Client } = require('@aws-sdk/client-s3');
// Create an S3 client with Rabata.io endpoint
const s3Client = new S3Client({
region: 'eu-west-1',
endpoint: 'https://s3.eu-west-1.rabata.io',
credentials: {
accessKeyId: 'YOUR_ACCESS_KEY',
secretAccessKey: 'YOUR_SECRET_KEY'
}
});
Using ES modules:
import { S3Client } from '@aws-sdk/client-s3';
// Create an S3 client with Rabata.io endpoint
const s3Client = new S3Client({
region: 'eu-west-1',
endpoint: 'https://s3.eu-west-1.rabata.io',
credentials: {
accessKeyId: 'YOUR_ACCESS_KEY',
secretAccessKey: 'YOUR_SECRET_KEY'
}
});
Method 3: Using Environment Variables
You can set environment variables to configure the SDK:
# Set these environment variables before running your Node.js script
export AWS_ACCESS_KEY_ID=YOUR_ACCESS_KEY
export AWS_SECRET_ACCESS_KEY=YOUR_SECRET_KEY
export AWS_REGION=eu-west-1
Then in your code:
const { S3Client } = require('@aws-sdk/client-s3');
// Create an S3 client with Rabata.io endpoint
const s3Client = new S3Client({
endpoint: 'https://s3.eu-west-1.rabata.io'
});
Basic Operations
Here are some common operations you can perform with the AWS SDK and Rabata.io.
Bucket Operations
List All Buckets
const { S3Client, ListBucketsCommand } = require('@aws-sdk/client-s3');
const s3Client = new S3Client({
region: 'eu-west-1',
endpoint: 'https://s3.eu-west-1.rabata.io',
credentials: {
accessKeyId: 'YOUR_ACCESS_KEY',
secretAccessKey: 'YOUR_SECRET_KEY'
}
});
async function listBuckets() {
try {
const data = await s3Client.send(new ListBucketsCommand({}));
console.log("Buckets:");
data.Buckets.forEach((bucket) => {
console.log(` ${bucket.Name}`);
});
return data;
} catch (err) {
console.error("Error", err);
}
}
listBuckets();
Create a Bucket
const { CreateBucketCommand } = require('@aws-sdk/client-s3');
async function createBucket(bucketName) {
try {
const data = await s3Client.send(
new CreateBucketCommand({ Bucket: bucketName })
);
console.log(`Bucket created: ${bucketName}`);
return data;
} catch (err) {
console.error("Error", err);
}
}
createBucket('my-bucket-name');
Delete a Bucket
const { DeleteBucketCommand } = require('@aws-sdk/client-s3');
async function deleteBucket(bucketName) {
try {
const data = await s3Client.send(
new DeleteBucketCommand({ Bucket: bucketName })
);
console.log(`Bucket deleted: ${bucketName}`);
return data;
} catch (err) {
console.error("Error", err);
}
}
deleteBucket('my-bucket-name');
Note: The bucket must be empty before it can be deleted.
Object Operations
List Objects in a Bucket
const { ListObjectsV2Command } = require('@aws-sdk/client-s3');
async function listObjects(bucketName) {
try {
const data = await s3Client.send(
new ListObjectsV2Command({ Bucket: bucketName })
);
console.log(`Objects in bucket ${bucketName}:`);
if (data.Contents) {
data.Contents.forEach((object) => {
console.log(` ${object.Key} (${object.Size} bytes)`);
});
}
return data;
} catch (err) {
console.error("Error", err);
}
}
listObjects('my-bucket-name');
Upload a File
const { PutObjectCommand } = require('@aws-sdk/client-s3');
const fs = require('fs');
async function uploadFile(bucketName, key, filePath) {
try {
const fileContent = fs.readFileSync(filePath);
const data = await s3Client.send(
new PutObjectCommand({
Bucket: bucketName,
Key: key,
Body: fileContent
})
);
console.log(`File uploaded successfully to ${bucketName}/${key}`);
return data;
} catch (err) {
console.error("Error", err);
}
}
uploadFile('my-bucket-name', 'remote-file.txt', 'local-file.txt');
Download a File
const { GetObjectCommand } = require('@aws-sdk/client-s3');
const fs = require('fs');
async function downloadFile(bucketName, key, filePath) {
try {
const data = await s3Client.send(
new GetObjectCommand({
Bucket: bucketName,
Key: key
})
);
// Convert stream to buffer
const chunks = [];
for await (const chunk of data.Body) {
chunks.push(chunk);
}
const buffer = Buffer.concat(chunks);
// Write to file
fs.writeFileSync(filePath, buffer);
console.log(`File downloaded successfully to ${filePath}`);
return data;
} catch (err) {
console.error("Error", err);
}
}
downloadFile('my-bucket-name', 'remote-file.txt', 'local-file.txt');
Delete a File
const { DeleteObjectCommand } = require('@aws-sdk/client-s3');
async function deleteFile(bucketName, key) {
try {
const data = await s3Client.send(
new DeleteObjectCommand({
Bucket: bucketName,
Key: key
})
);
console.log(`File deleted successfully: ${bucketName}/${key}`);
return data;
} catch (err) {
console.error("Error", err);
}
}
deleteFile('my-bucket-name', 'file-to-delete.txt');
Advanced Operations
Generate a Presigned URL
const { GetObjectCommand } = require('@aws-sdk/client-s3');
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
async function generatePresignedUrl(bucketName, key, expirationSeconds = 3600) {
try {
const command = new GetObjectCommand({
Bucket: bucketName,
Key: key
});
const url = await getSignedUrl(s3Client, command, { expiresIn: expirationSeconds });
console.log(`Presigned URL: ${url}`);
return url;
} catch (err) {
console.error("Error", err);
}
}
generatePresignedUrl('my-bucket-name', 'private-file.txt', 3600);