by

Where communities thrive


  • Join over 1.5M+ people
  • Join over 100K+ communities
  • Free without limits
  • Create your own community
People
Repo info
Activity
  • 03:12
    aws-sdk-js-automation commented #3398
  • 03:05
    JakeGinnivan opened #3398
  • 00:21
    github-actions[bot] unlabeled #3005
  • 00:21
    github-actions[bot] unlabeled #3005
  • Aug 14 21:46
    aws-sdk-js-automation commented #3397
  • Aug 14 21:40
    priyajeet edited #3397
  • Aug 14 21:40
    priyajeet synchronize #3397
  • Aug 14 21:38
    priyajeet edited #3397
  • Aug 14 21:36
    aws-sdk-js-automation commented #3397
  • Aug 14 21:35
    priyajeet edited #3397
  • Aug 14 21:30
    priyajeet opened #3397
  • Aug 14 18:59
    pnikhil commented #3180
  • Aug 14 18:44
    deepakpc commented #3180
  • Aug 14 18:32
    pnikhil commented #3180
  • Aug 14 18:18

    aws-sdk-js-automation on v2.734.0

    (compare)

  • Aug 14 18:17

    aws-sdk-js-automation on master

    Updates SDK to v2.734.0 (compare)

  • Aug 14 17:02

    AllanFly120 on master

    fix UnknownEndpoint exception n… (compare)

  • Aug 14 17:02
    AllanFly120 closed #3396
  • Aug 14 17:02
    AllanFly120 closed #3393
  • Aug 14 06:57
    khacminh commented #3005
jfrshd
@jfrshd
thank you for your help
jfrshd
@jfrshd
hello
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:ListBucket"
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::com.mybucket.123.mybucket"
],
"Condition": {
"StringLike": {
"s3:prefix": [
"${cognito-identity.amazonaws.com:sub}"
]
}
}
},
{
"Action": [
"s3:GetObject",
"s3:PutObject"
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::com.mybucket.123.mybucket/${cognito-identity.amazonaws.com:sub}/*"
]
}
]
}
this is my policy code

i am creating a folder in s3 like this

region + ":" + session.getIdToken().payload.sub

i think i am facing a problem where "sub" of js isn't matching the one of the policy so it keeps getting denied
i tried putting in statically in the policy and it worked
region + ":" + session.getIdToken().payload.sub isn't the same as cognito-identity.amazonaws.com:sub
am i right?
Ronique Ricketts
@RoniqueRicketts
Is it normal to get calls from +206 922 0130 stating that it's amazon service requesting a pin? I never initiate a pin.
Randy H
@randy-halim
most likely a scammer who is trying to get into your account-even if not, i would avoid giving the pin unless you are in a support session (as in you initiate it)
Ronique Ricketts
@RoniqueRicketts
I didn’t send the pin
kes333
@kes333
Hi, I am trying to access a PNG file from S3 using a presigned URL but I am consistently getting access denied. PDF files work as expected. Anybody has faced this issue? I am generating the pre-signed URL via AWS lambda for an S3 event. Thee are other issues also like when the pdf file is downloaded, it is downloading as response.pdf rather than the actual name which is in the URL. I cannot also download PDF files with multiple white spaces although I am able to do so with single space by replacing the filename with .repalce("+", " "). I am sure this will not work with other file types or special characters as well in the file name. First priority is to make the other file types like PNG work. Then work with multiple spaces in filename . Function to generate the presigned URL is as below ( Removed the API URL)

const AWS = require('aws-sdk');
var querystring = require('querystring');
var https = require('https');

//var fs = require('fs');

// if you are using an eu region, you will have to set the signature
// version to v4 by passing this into the S3 constructor -
// {signatureVersion: 'v4' }
var s3 = new AWS.S3({signatureVersion: 'v4'});

exports.handler = function (event, context,callback) {

const bucket = event.Records[0].s3.bucket.name;
const key = event.Records[0].s3.object.key. replace("+", " ");
console.log( "File Name" + key. replace("+", " "));

if (!bucket) {
console.log('bucket not set:');
context.done(new Error(S3 bucket not set));
}

if (!key) {
console.log('key missing:');
context.done(new Error('S3 object key missing'));
return;
}

const params = {
'Bucket': bucket,
'Key': key,
Expires: 60 * 5

//ACL: 'public-read'

};

var url = s3.getSignedUrl('getObject', params);
url.replace(/+/g, '%2B');
console.log('The URL is', url);
ResponseContentDisposition: "attachment;filename=key";

//POST to external URL

var post_data = querystring.stringify(
url //TODO here we can get data fro cognito and then instead of obj need to set post_data into call
);

   console.log('post_data='+post_data);

       //TODO need to get data fom cognito
      var obj = {
        "url":url

   };

  console.log(JSON.stringify(obj));

var post_options = {
host:
//host: 'dummy.restapiexample.com',
path:'/v1/documenturl',
//path: '/api/v1/create',
method: 'POST',

};

// Set up the request
var post_req = https.request(post_options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('Response: ' + chunk);
context.succeed();
});
res.on('error', function (e) {
console.log("Got error: " + e.message);
context.done(null, 'FAILURE');
});

});
console.log('post_req'+post_req);

// post the data
post_req.write(JSON.stringify(obj));//TODO post_data
post_req.end();

};

BertrandMarechal
@BertrandMarechal
can you please reformat your code ?
kes333
@kes333
JPG files are working now . Would be great if someone can help with multiple spaces file names as well as downloading the file with actual name rather than just "response.pdf"
BertrandMarechal
@BertrandMarechal
you can use a regex instead of a simple string :
.repalce(/\+/g, " ")
kes333
@kes333
@BertrandMarechal Thanks that works ! The other issue is with the file name download, it downloads as response.jpg / pdf and not the actual filename
BertrandMarechal
@BertrandMarechal
where do you download from ? is the S3 key correct ?
yeah, I mean, the file on s3 is not called response.jpg or pdf ?
kes333
@kes333
Nope, it has a specific filename
BertrandMarechal
@BertrandMarechal
so that's your download code then
kes333
@kes333
I upload a file in S3, an event is fired and then a lambda function generated a pre signed URL for the document and posts to an external document management API. I built this API with backend dynamodb and AWS API gateway. I then copy the URL from the DB and then fire a postman GET request which fetches the file from S3 . The URL is valid for 5 mins after which it expires
The issue is with the file name which gets replaced with response everytime
BertrandMarechal
@BertrandMarechal
hum
if you paste this url in the browser, what is the file downloaded as ?
it seems to be a postman thing :)
kes333
@kes333
If I post the file in browser, it is rendered on page
BertrandMarechal
@BertrandMarechal
ok. but yeah, it's a postman thing. if you had a Frontend, you would choose your file name
kes333
@kes333
I added the content dispostion in the code : ResponseContentDisposition: "attachment;filename=key"; as suggested in the post you forwarded
BertrandMarechal
@BertrandMarechal
yeah, does it work ?
I was just reading through
kes333
@kes333
nope, I already had that
BertrandMarechal
@BertrandMarechal
there seem to be a way when generating the presigned url
kes333
@kes333
ok, let me try that
kes333
@kes333
Excellent, it works :-) Thanks for your help @BertrandMarechal ! Appreciate it
I just added content disposition as a parameter object
var url = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60 * 5,
ResponseContentDisposition: attachment; filename="${key}"
});
BertrandMarechal
@BertrandMarechal
nice :)
Gareth Denny
@gpdenny

Howdy :bow:
Wanted to drop in in case I'm missing anything obvious before asking in an issue.

I have an ECS Task which is using s3.uploadto put files of varying sizes in S3.
When the object is put using PutObject it works as expected, with the correct ECS Task Role.
When the object is put using CreateMultipartUpload it seems to somehow end up with the ECS EC2 Instance Role, not the Task Role, so fails to upload with AccessDenied.
Am I missing something?
Much Thanks!

Prashant Singh
@prashant1k99
Hi everyone, I am facing cors err: Access to XMLHttpRequest at '<lambda-url>' from origin 'http://localhost:8080' has been blocked by CORS policy: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource.
I have specifically enabled cors in my API Gateway for that lambda and also set the response header. But facing the issue
BertrandMarechal
@BertrandMarechal
@prashant1k99 did you figure it out ?
Michał Kłoda
@pyemkey
Hi everyone. I have dynamodb table and I would like to run post deployment script to populate table by some data. Any thoughts how it can be done?
Prashant Singh
@prashant1k99

Hi everyone, I want to create image manipulation on the image fetched from the AWS S3 and would like to perform manipulation action on it. I am using stream to solve the problem of loading big files.

import AWS from 'aws-sdk'
import sharp from 'sharp'

const s3 = new AWS.S3()
const transformer = (w, res, next) =>
    sharp()
        .resize(w)
        .on('data', (data) => {
            console.log(data)
            res.write(data, 'binary')
        })
        .on('error', (err) => next(err))
        .on('end', () => {
            console.log('finished')
            res.status(200).end()
        })

const readStream = s3
    .getObject({
        Bucket: process.env.UPLOAD_BUCKET_NAME,
        Key: 'test.jpg'
    })
    .createReadStream()

const getImage = (w, res, next) => {
    readStream.pipe(transformer(w, res, next))
    readStream.on('error', (err) => next(err))
}

export default getImage

I am calling the getImage method on the route and this function serves images for the first time but when the second time the Image is requested, it throws Error: Input buffer contains unsupported image format

Jairo Campos Vargas
@jcamposv
Hi, guys, I have a question I am using getCostandusage js SDK, I wanted to get all the charges with this filter "Other out-of-cycle charges" but I did not find the machine name to put into the RECORD_TYPE filter do you know what is this name ?
Jairo Campos Vargas
@jcamposv
ok I found it, it's Other