S3 bucket with all available permissions - READ and WRITE are mandatory.

I need to have some generated S3 bucket with all available permissions - READ and WRITE are mandatory.

I tried so far to make it using this code:

var createBucketParams = {Bucket: bucketName, ACL: 'public-read-write', GrantFullControl:'FULL_CONTROL'};
S3.createBucket(createBucketParams, function(err, data) {
if (err) {
console.log("Error while calling createBucket() - Error: " + err);
} else {
console.log("Successfully Bucket created.");
}
});
My problem is that the permissions are not working well, i'm getting AccessDenied while trying to getObject(), any ideas how to solve it ?

EDIT: This is my current code with all the latest changes:

before(() => {
console.log("Mocking AWS.DynamoDB.DocumentClient API");

AWSMock.mock('DynamoDB.DocumentClient', 'put', function(params, callback) {
    callback(null, "Mock: successfully put object in DynamoDB");
});

console.log("Mocking AWS.S3 API");

AWSMock.mock('S3', 'createBucket', function (params, callback){
    callback(null, "successfully bucket created in S3");
});
AWSMock.mock('S3', 'putObject', function (params, callback){
    callback(null, "successfully put item in S3");
});
AWSMock.mock('S3', 'getObject', function (params, callback){
    callback(null, "successfully get item in S3");
});
AWSMock.mock('S3', 'putBucketPolicy', function (params, callback){
    callback(null, "successfully putBucketPolicy in S3");
});

});

it('Writing a file to S3 with user metadata - when data is valid JSON and updating the DB is Succeed',
function(done) {

    var bucketName = 'my.unique.bucket.name';
    var fileName = 'fileName.csv';
    var s3Policy = {
        "Version":"2012-10-17",
        "Id":"http referer policy example",
        "Statement":[
            {
                "Sid":"Allow get requests originating from www.example.com and example.com.",
                "Effect": "Allow",
                "Principal": "*",
                "Action": ["s3:GetObject,s3:PutObject"],
                "Resource": "arn:aws:s3:::" + bucketName + "/*",
                "Condition": {
                    "IpAddress": {"aws:SourceIp": "127.0.0.1"},
                    "NotIpAddress": {"aws:SourceIp": "127.0.0.1"}
                }
            }
        ]
    };

    var S3 = new AWS.S3();
    var createBucketParams = {Bucket: bucketName, ACL: "FULL_CONTROL", Region: "us-west-2"};
    S3.createBucket(createBucketParams, function(err, data) {
        if (err) {
            console.log("Error while calling createBucket() - Error: " + err);
        } else {
            console.log("Successfully Bucket created.");
        }
    });

    var putBucketPolicyParams = {
        Bucket: bucketName,
        Policy: JSON.stringify(s3Policy)
    };
    S3.putBucketPolicy(putBucketPolicyParams, function(err, data) {
        if (err) console.log(err, err.stack);
        else     console.log(data);
    });


    var putObjectParams = {Bucket: bucketName,
        Key: fileName,
        Body: 'Hello!',
        Metadata: {startDate: "2016-12-12T12:34:56.000Z", endDate:"2016-12-31T12:34:56.000Z",
        userName:"someUser",
    originalFileName:"fileName.csv"}};
    S3.putObject(putObjectParams, function(err, data) {
        if (err) {
            console.log(err, err.stack)
        } else {
            console.log("Successfully put a file to bucket");
        }
    });


    LambdaTester(myHandler)
        .event(JSON.parse(JSON.stringify(require('./testcases/single_record_with_user_metadata.json'))))
        .expectSucceed(function(result) {

            expect(result.valid).to.be.true;
         })
        .verify(done);
});

And the usage in the js file:

S3.getObject(s3FileParams, function(err, data) {
if (err) {
var message = "Error while trying to get file object " + fullFileName + " from bucket " + bucketName + ". Make sure they exist and your bucket is in the same region as this function. Error: " + err;
console.error(message);
// console.log(err, err.stack);
console.log(JSON.stringify(err, null, 2));
} else {
userMetaDataJson = JSON.parse(JSON.stringify(data.Metadata));
}
resolve();
})

Tagged:

Comments

  • S3 offers two types of policy:

    . resource-based
    . user-based
    You're (correctly) trying to apply a resource-based policy. For resource-based policies, you have a further two options:

    . Access Control Lists (ACL)
    . bucket policy
    You're trying to apply an ACL in which case you should read the 'Mapping of ACL Permissions and Access Policy Permissions' section in the documentation. Specifically, when you grant the READ ACL on a bucket, you are permitting the following:

    . s3:ListBucket
    . s3:ListBucketVersions
    . s3:ListBucketMultipartUploads
    Note specifically that you are not permitting s3:GetObject. To do that, you would supply the READ ACL on each object when you put objects into the bucket.

    OK, all that ACL stuff aside, you should probably be using a bucket policy instead of ACLs. Bucket policies supplement, and in many cases replace, ACL-based access policies. Here is an example policy that grants the s3:GetObject permission to all users. You will also have to add PutObject to allow them to upload (and other actions as necessary for list, delete etc. as appropriate). You can set a bucket policy using putBucketPolicy.

Sign In or Register to comment.