Skip to content
This repository was archived by the owner on Mar 25, 2025. It is now read-only.

Fixes fs.js s3 read/write issues introduced by switching clients #282

Merged
merged 5 commits into from
Feb 23, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 25 additions & 7 deletions pkg/lang/javascript/aws_runtime/fs.js.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ exports.saveParametersToS3 = saveParametersToS3;
async function s3_writeFile(...args) {
const bucketParams = {
Bucket: bucketName,
Key: `${args[0]}`,
Key: stripLeadingSlashes(`${args[0]}`),
Body: args[1],
};
try {
Expand All @@ -88,15 +88,24 @@ async function s3_writeFile(...args) {
async function s3_readFile(...args) {
const bucketParams = {
Bucket: bucketName,
Key: `${args[0]}`,
Key: stripLeadingSlashes(`${args[0]}`),
};
try {
// Get the object from the Amazon S3 bucket. It is returned as a ReadableStream.
const data = await s3Client.send(new client_s3_1.GetObjectCommand(bucketParams));
if (data.Body) {
return await streamToString(data.Body);
if (args[1]?.encoding) {
return await streamToString(data.Body);
}
return new Promise((resolve, reject) => {
const stream = data.Body;
const chunks = [];
stream.on('data', chunk => chunks.push(chunk))
stream.once('end', () => resolve(Buffer.concat(chunks)))
stream.once('error', reject)
});
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: make this reusable? Looks similar to steamToString as well but maybe nothing to pull out between the 2

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Simple enough. I hadn't even looked at the content of streamToString.

}
return '';
return Promise.resolve();
}
catch (err) {
console.log('Error', err);
Expand All @@ -106,7 +115,7 @@ async function s3_readFile(...args) {
async function s3_readdir(path) {
const bucketParams = {
Bucket: bucketName,
Prefix: `${path}`,
Prefix: stripLeadingSlashes(`${path}`),
};
try {
const data = await s3Client.send(new client_s3_1.ListObjectsCommand(bucketParams));
Expand All @@ -122,7 +131,10 @@ async function s3_readdir(path) {
}
}
async function s3_exists(fpath) {
const bucketParams = { Bucket: bucketName, Key: `${path}` };
const bucketParams = {
Bucket: bucketName,
Key: stripLeadingSlashes(`${path}`)
};
try {
const data = await s3Client.send(new client_s3_1.HeadObjectCommand(bucketParams));
console.debug('Success. Object deleted.', data);
Expand All @@ -134,7 +146,10 @@ async function s3_exists(fpath) {
}
}
async function s3_deleteFile(fpath) {
const bucketParams = { Bucket: bucketName, Key: `${path}` };
const bucketParams = {
Bucket: bucketName,
Key: stripLeadingSlashes(`${path}`)
};
try {
const data = await s3Client.send(new client_s3_1.DeleteObjectCommand(bucketParams));
console.debug('Success. Object deleted.', data);
Expand All @@ -145,6 +160,9 @@ async function s3_deleteFile(fpath) {
throw err;
}
}
function stripLeadingSlashes(path) {
return path.replace(/^\/+/, "");
}
exports.fs = {
writeFile: s3_writeFile,
readFile: s3_readFile,
Expand Down