Uploading to S3 is very useful, but requires authorization and/or permissions set on the bucket access, and achieving this from a plain old browser is a pain as it requires the POST to be approved and signed prior to sending it to the bucket.
All the implementations I have found require a server side component to sign the upload request, but I want to do it SERVERLESS!
So, I ended up adapting Leonid Shevtsov’s solution to work with Lambda – here comes serverless s3 upoads, and I hope it might help you too 🙂
Make sure you have an S3 Bucket, and a user that will provide the credentials for signing the upload request.
The user needs to have an AWS ACCESS_KEY and SECRET_KEY that will be used in the Lambda function, and an S3 PutObject permission for the bucket you created:
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:PutObject",
"s3:PutObjectAcl"
],
"Effect": "Allow",
"Resource": "arn:aws:s3:::mybucket"
}
]
}
I did not encounter problems with CORS on the S3 bucket, so I did not set special CORS support on the bucket, but I did enable CORS for the Lambda function, in particular the API Gateway endpoint for my signing lambda.

So the HTML code performs 2 operations, sign and post
when a file is selected, jQuery triggers code that sends ajax GET to the lambda, the lambda then generates a new policy that allows the file upload, and signes the policy.
Then a form is populated with the response policy etc, and the submit button is enabled – so the form can POST to the S3 bucket directly from the browser!
The HTML looks like
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<link rel="stylesheet" href="https://unpkg.com/purecss@1.0.0/build/base-min.css">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
</head>
<body>
<article style="width: 50%; margin: 6% auto;">
<h1>S3 Uploader</h1>
<div class="result"></div>
<form action="http://mybucket.s3.amazonaws.com/" method="post" enctype="multipart/form-data">
<input type="hidden" name="key" value="${filename}" />
<input type="hidden" name="acl" value="public-read" />
<input type="hidden" name="success_action_redirect" value="http://anotherbucket-public.s3.amazonaws.com/successful_upload.html" />
<input type="hidden" name="Content-Type" value="image/jpg" />
<input type="hidden" name="X-Amz-Credential" value="" />
<input type="hidden" name="X-Amz-Algorithm" value="" />
<input type="hidden" name="X-Amz-Date" value="" />
<br />
<input type="hidden" name="Policy" value="" />
<input type="hidden" name="X-Amz-Signature" value="" />
File:
<input type="file" name="file" />
<br /><br />
<span class="filename"></span><br>
<input type="submit" name="submit" value="Upload to Amazon S3" hidden/>
</form>
</article>
</body>
<script>
$(function() {
$("input:file").change(function (){
var fileName = $(this).val().split("\\").slice(-1);
$(".filename").html(fileName);
let getUrl = 'https://myapigw.execute-api.us-east-1.amazonaws.com/dev/object/signer' + '?filename=' + fileName;
$.get( getUrl, function( data ) {
console.log('data:', data);
$( ".result" ).html( JSON.stringify(data) );
$('input[name="X-Amz-Credential"]').val(data.params['x-amz-credential']);
$('input[name="X-Amz-Algorithm"]').val(data.params['x-amz-algorithm']);
$('input[name="X-Amz-Date"]').val(data.params['x-amz-date']);
$('input[name="Policy"]').val(data.params['policy']);
$('input[name="X-Amz-Signature"]').val(data.params['x-amz-signature']);
});
$('input[type="submit"]').removeAttr('hidden');
});
});
</script>
</html>
And the lambda
const crypto = require('crypto');
var myDateString;
exports.handler = async (event) => {
console.log("EVENT:", event);
// Get AWS Creds - use environment variables to keep these out of the code
let config = {
bucket: 'mybucket',
region: 'us-east-1',
accessKey: 'AKIAEXAMPLE',
secretKey: '+secretkey+'
};
let filename = event.queryStringParameters.filename;
myDateString = dateString();
console.log('config:', config);
console.log('filename:', filename);
var sig = s3Credentials(config, filename);
const response = {
statusCode: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true,
},
body: JSON.stringify(sig)
};
return response;
};
function s3Credentials(config, filename) {
return {
endpoint_url: "https://" + config.bucket + ".s3.amazonaws.com",
params: s3Params(config, filename)
};
}
// Returns the parameters that must be passed to the API call
function s3Params(config, filename) {
const credential = amzCredential(config);
const policy = s3UploadPolicy(config, filename, credential);
const policyBase64 = new Buffer(JSON.stringify(policy)).toString('base64');
return {
key: filename,
acl: 'public-read',
success_action_status: '201',
original_policy: JSON.stringify(policy),
policy: policyBase64,
'x-amz-algorithm': 'AWS4-HMAC-SHA256',
'x-amz-credential': credential,
'x-amz-date': myDateString + 'T000000Z',
'x-amz-signature': s3UploadSignature(config, policyBase64, credential)
};
}
function dateString() {
var date = new Date().toISOString();
return date.substr(0, 4) + date.substr(5, 2) + date.substr(8, 2);
}
function amzCredential(config) {
return [config.accessKey, myDateString, config.region, 's3/aws4_request'].join('/');
}
// Constructs the policy
function s3UploadPolicy(config, filename, credential) {
return {
// 500 minutes into the future (for debugging, for real world use maybe ~10 minutes?
expiration: new Date((new Date).getTime() + (100 * 5 * 60 * 1000)).toISOString(),
conditions: [
{ bucket: config.bucket },
{ key: filename },
{ acl: 'public-read' },
//{ success_action_status: "201" },
{ success_action_redirect: "http://anotherbucket-public.s3.amazonaws.com/successful_upload.html" },
// Optionally control content type and file size
['starts-with', '$Content-Type', 'image'],
// 10Mb limit
['content-length-range', 0, 10000000],
{ 'x-amz-algorithm': 'AWS4-HMAC-SHA256' },
{ 'x-amz-credential': credential },
{ 'x-amz-date': myDateString + 'T000000Z' }
],
};
}
function hmac(key, string) {
let hmac = crypto.createHmac('sha256', key);
hmac.end(string);
return hmac.read();
}
// Signs the policy with the credential
function s3UploadSignature(config, policyBase64, credential) {
let dateKey = hmac('AWS4' + config.secretKey, myDateString);
let dateRegionKey = hmac(dateKey, config.region);
let dateRegionServiceKey = hmac(dateRegionKey, 's3');
let signingKey = hmac(dateRegionServiceKey, 'aws4_request');
return hmac(signingKey, policyBase64).toString('hex');
}
This is pretty much it, then there’s the serverless.yaml for deploying this, here’s the relevant snippet from it:
provider:
name: aws
runtime: nodejs8.10
stage: dev
region: us-east-1
functions:
s3_upload_signer:
handler: s3_upload_signer/index.handler
role: 'arn:aws:iam::1111111111:role/lambda_with_permissions' // use your own role here
events:
- http:
path: object/signer
method: get
cors: true
Hope this helps 🙂
Final thoughts: Authentication and permission for users to upload files to your bucket is not dicussed here – you can add it before creating the policy if you like… My approach is providing a way for anyone to upload an image (with a size limit) to the bucket – further processing of the image is done by a bucket trigger and images that are not going to be used in the system are immediately deleted.