mirror of
https://github.com/fnproject/fn.git
synced 2022-10-28 21:29:17 +03:00
Lambda docs (#264)
* Add aws.md and s3 example Signed-off-by: Seif Lotfy <seif.lotfy@gmail.com> * minor fix
This commit is contained in:
committed by
C Cirello
parent
0cc946d937
commit
ba65220127
114
docs/lambda/aws.md
Normal file
114
docs/lambda/aws.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
Interacting with AWS Services
|
||||||
|
=============================
|
||||||
|
|
||||||
|
The node.js and Python stacks include SDKs to interact with other AWS services.
|
||||||
|
For Java you will need to include any such SDK in the JAR file.
|
||||||
|
|
||||||
|
## Credentials
|
||||||
|
|
||||||
|
Running Lambda functions outside of AWS means that we cannot automatically get
|
||||||
|
access to other AWS resources based on Lambda subsuming the execution role
|
||||||
|
specified with the function. Instead, when using the AWS APIs inside your
|
||||||
|
Lambda function (for example, to access S3 buckets), you will need to pass
|
||||||
|
these credentials explicitly.
|
||||||
|
|
||||||
|
### Using environment variables for the credentials
|
||||||
|
|
||||||
|
The easiest way to do this is to pass the `AWS_ACCESS_KEY_ID` and
|
||||||
|
`AWS_SECRET_ACCESS_KEY` environment while creating or importing the lambda function from aws.
|
||||||
|
|
||||||
|
This can be done as follows:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
export aws_access_key_id=<access-key>
|
||||||
|
export aws_secret_access_key=<secret_key>
|
||||||
|
|
||||||
|
./fnctl lambda create-function <user>/s3 nodejs example.run examples/s3/example.js examples/s3/example-payload.json --config aws_access_key_id --config aws_secret_access_key
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./fnctl lambda create-function <user>/s3 nodejs example.run ../../lambda/examples/s3/example.js ../../lambda/examples/s3/example-payload.json --config aws_access_key_id=<access-key> --config aws_secret_access_key=<secret_key>
|
||||||
|
```
|
||||||
|
|
||||||
|
The various AWS SDKs will automatically pick these up.
|
||||||
|
|
||||||
|
## Example: Reading and writing to S3 Bucket
|
||||||
|
|
||||||
|
This example demonstrates modifying S3 buckets and using the included
|
||||||
|
ImageMagick tools in a node.js function. Our function will fetch an image
|
||||||
|
stored in a key specified by the event, resize it to a width of 1024px and save
|
||||||
|
it to another key.
|
||||||
|
|
||||||
|
The code for this example is located [here](../../examples/s3/example.js).
|
||||||
|
|
||||||
|
The event will look like:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
"bucket": "iron-lambda-demo-images",
|
||||||
|
"srcKey": "waterfall.jpg",
|
||||||
|
"dstKey": "waterfall-1024.jpg"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The setup, imports and SDK initialization.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var im = require('imagemagick');
|
||||||
|
var fs = require('fs');
|
||||||
|
var AWS = require('aws-sdk');
|
||||||
|
|
||||||
|
exports.run = function(event, context) {
|
||||||
|
var bucketName = event['bucket']
|
||||||
|
var srcImageKey = event['srcKey']
|
||||||
|
var dstImageKey = event['dstKey']
|
||||||
|
|
||||||
|
var s3 = new AWS.S3();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
First we retrieve the source and write it to a local file so ImageMagick can
|
||||||
|
work with it.
|
||||||
|
|
||||||
|
```js
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: srcImageKey
|
||||||
|
}, function (err, data) {
|
||||||
|
|
||||||
|
if (err) throw err;
|
||||||
|
|
||||||
|
var fileSrc = '/tmp/image-src.dat';
|
||||||
|
var fileDst = '/tmp/image-dst.dat'
|
||||||
|
fs.writeFileSync(fileSrc, data.Body)
|
||||||
|
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
The actual resizing involves using the identify function to get the current
|
||||||
|
size (we only resize if the image is wider than 1024px), then doing the actual
|
||||||
|
conversion to `fileDst`. Finally we upload to S3.
|
||||||
|
|
||||||
|
```js
|
||||||
|
im.identify(fileSrc, function(err, features) {
|
||||||
|
resizeIfRequired(err, features, fileSrc, fileDst, function(err, resized) {
|
||||||
|
if (err) throw err;
|
||||||
|
if (resized) {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket:bucketName,
|
||||||
|
Key: dstImageKey,
|
||||||
|
Body: fs.createReadStream(fileDst),
|
||||||
|
ContentType: 'image/jpeg',
|
||||||
|
ACL: 'public-read',
|
||||||
|
}, function (err, data) {
|
||||||
|
if (err) throw err;
|
||||||
|
context.done()
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
context.done();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
@@ -36,8 +36,7 @@ Assuming you have a lambda with the following arn `arn:aws:lambda:us-west-2:1231
|
|||||||
fnctl lambda aws-import arn:aws:lambda:us-west-2:123141564251:function:my-function us-east-1 user/my-function
|
fnctl lambda aws-import arn:aws:lambda:us-west-2:123141564251:function:my-function us-east-1 user/my-function
|
||||||
```
|
```
|
||||||
|
|
||||||
will import the function code from the region `us-east-1` to a directory called `./my-function`. It will
|
will import the function code from the region `us-east-1` to a directory called `./user/my-function`. Inside the directory you will find the `function.yml`, `Dockerfile`, and all the files needed for running the function.
|
||||||
then create a docker image called `my-function`.
|
|
||||||
|
|
||||||
Using Lambda with Docker Hub and IronFunctions requires that the Docker image be
|
Using Lambda with Docker Hub and IronFunctions requires that the Docker image be
|
||||||
named `<Docker Hub username>/<image name>`. This is used to uniquely identify
|
named `<Docker Hub username>/<image name>`. This is used to uniquely identify
|
||||||
@@ -48,3 +47,9 @@ If you only want to download the code, pass the `--download-only` flag. The
|
|||||||
`--profile` flag is available similar to the `aws` tool to help
|
`--profile` flag is available similar to the `aws` tool to help
|
||||||
you tweak the settings on a command level. Finally, you can import a different version of your lambda function than the latest one
|
you tweak the settings on a command level. Finally, you can import a different version of your lambda function than the latest one
|
||||||
by passing `--version <version>.`
|
by passing `--version <version>.`
|
||||||
|
|
||||||
|
You can then publish the imported lambda as follows:
|
||||||
|
```
|
||||||
|
./fnctl publish -d ./user/my-function
|
||||||
|
````
|
||||||
|
Now the function can be reached via ```http://$HOSTNAME/r/user/my-function```
|
||||||
5
examples/s3/Dockerfile
Normal file
5
examples/s3/Dockerfile
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
FROM iron/lambda-nodejs
|
||||||
|
|
||||||
|
ADD example.js ./example.js
|
||||||
|
|
||||||
|
CMD ["example.run"]
|
||||||
7
examples/s3/Makefile
Normal file
7
examples/s3/Makefile
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
IMAGE=iron/lambda-node-aws-example
|
||||||
|
|
||||||
|
create: Dockerfile
|
||||||
|
docker build -t $(IMAGE) .
|
||||||
|
|
||||||
|
test:
|
||||||
|
docker run --rm -it -e PAYLOAD_FILE=/mnt/example-payload.json -e AWS_ACCESS_KEY_ID=change-here -e AWS_SECRET_ACCESS_KEY=change-here -v `pwd`:/mnt $(IMAGE)
|
||||||
2
examples/s3/README.md
Normal file
2
examples/s3/README.md
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
Example on how to use AWS S3 in a lambda function.
|
||||||
|
|
||||||
5
examples/s3/example-payload.json
Normal file
5
examples/s3/example-payload.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"bucket": "iron-lambda-demo-images",
|
||||||
|
"srcKey": "waterfall.jpg",
|
||||||
|
"dstKey": "waterfall-1024.jpg"
|
||||||
|
}
|
||||||
70
examples/s3/example.js
Normal file
70
examples/s3/example.js
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
var im = require('imagemagick');
|
||||||
|
var fs = require('fs');
|
||||||
|
var AWS = require('aws-sdk');
|
||||||
|
|
||||||
|
// cb(err, resized) is called with true if resized.
|
||||||
|
function resizeIfRequired(err, features, fileSrc, fileDst, cb) {
|
||||||
|
if (err) {
|
||||||
|
cb(err, false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var targetWidth = 1024;
|
||||||
|
if (features.width > targetWidth)
|
||||||
|
{
|
||||||
|
im.resize({
|
||||||
|
srcPath : fileSrc,
|
||||||
|
dstPath : fileDst,
|
||||||
|
width : targetWidth,
|
||||||
|
format: 'jpg'
|
||||||
|
}, function(err) {
|
||||||
|
if (err) {
|
||||||
|
cb(err, false);
|
||||||
|
} else {
|
||||||
|
cb(null, true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
cb(null, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.run = function(event, context) {
|
||||||
|
var bucketName = event['bucket']
|
||||||
|
var srcImageKey = event['srcKey']
|
||||||
|
var dstImageKey = event['dstKey']
|
||||||
|
|
||||||
|
var s3 = new AWS.S3();
|
||||||
|
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: srcImageKey
|
||||||
|
}, function (err, data) {
|
||||||
|
|
||||||
|
if (err) throw err;
|
||||||
|
|
||||||
|
var fileSrc = '/tmp/image-src.dat';
|
||||||
|
var fileDst = '/tmp/image-dst.dat'
|
||||||
|
fs.writeFileSync(fileSrc, data.Body)
|
||||||
|
|
||||||
|
im.identify(fileSrc, function(err, features) {
|
||||||
|
resizeIfRequired(err, features, fileSrc, fileDst, function(err, resized) {
|
||||||
|
if (err) throw err;
|
||||||
|
if (resized) {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket:bucketName,
|
||||||
|
Key: dstImageKey,
|
||||||
|
Body: fs.createReadStream(fileDst),
|
||||||
|
ContentType: 'image/jpeg',
|
||||||
|
ACL: 'public-read',
|
||||||
|
}, function (err, data) {
|
||||||
|
if (err) throw err;
|
||||||
|
context.succeed("Image updated");
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
context.succeed("Image not updated");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user