diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..471369a --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,90 @@ +version: 2.1 +tagged_build_filters: &tagged_build_filters + branches: + ignore: /.*/ + tags: + only: /v[0-9]+\.[0-9]+\.[0-9]+/ +test_build_filters: &test_build_filters + branches: + only: /.*/ + tags: + ignore: /v[0-9]+\.[0-9]+\.[0-9]+/ +jobs: + test: + docker: + - image: circleci/node:10 + steps: + - checkout + - run: npm install + - run: npm run lint + build: + docker: + - image: circleci/node:10 + steps: + - checkout + - run: + name: Check Tagged Push + command: | + PKG_VERSION=$(cat ./package.json | grep version | cut -d '"' -f4) + if [[ "${CIRCLE_TAG}" != "v${PKG_VERSION}" ]]; then + echo "There is mismatch:" + echo " TAG_VERSION: ${CIRCLE_TAG}" + echo " PKG_VERSION: v${PKG_VERSION}" + exit 1 + fi + - run: + name: Build S3 Lambda + command: | + npm install --production + zip logdna-s3.zip -r node_modules/ index.js package.json + - persist_to_workspace: + root: . + paths: + - ./logdna-s3.zip + approve: + machine: true + steps: + - attach_workspace: + at: . + - persist_to_workspace: + root: . + paths: + - ./logdna-s3.zip + release: + docker: + - image: circleci/golang:1.12 + steps: + - attach_workspace: + at: . + - run: go get -u github.com/tcnksm/ghr + - run: + name: Create a Release + command: | + ghr \ + -n "LogDNA S3 Lambda Function ${CIRCLE_TAG}" \ + -t ${GITHUB_TOKEN} \ + -u ${CIRCLE_PROJECT_USERNAME} \ + -r ${CIRCLE_PROJECT_REPONAME} \ + -draft ${CIRCLE_TAG} ${CIRCLE_WORKING_DIRECTORY} +workflows: + update: + jobs: + - test: + filters: *tagged_build_filters + - build: + requires: + - test + filters: *tagged_build_filters + - approve: + type: approval + requires: + - build + filters: *tagged_build_filters + - release: + requires: + - approve + filters: *tagged_build_filters + test: + jobs: + - test: + filters: *test_build_filters diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 0000000..c4f2c5b --- /dev/null +++ b/.eslintrc @@ -0,0 +1,207 @@ +{ + "env": { + "browser": true, + "es6": true, + "node": true + }, + "globals": { + "debug": false + }, + "rules": { + "accessor-pairs": 0, + "array-bracket-spacing": [2, "never"], + "array-callback-return": 2, + "arrow-parens": ["error", "as-needed", { + "requireForBlockBody": true + }], + "block-scoped-var": 2, + "block-spacing": [2, "always"], + "brace-style": [2, "1tbs", { + "allowSingleLine": true + }], + "class-methods-use-this": [0, { + "exceptMethods": [] + }], + "comma-dangle": [2, "never"], + "comma-spacing": [2, { + "before": false, + "after": true + }], + "comma-style": [2, "first"], + "complexity": [0, 11], + "consistent-return": 0, + "consistent-this": [2, "that"], + "curly": [2, "multi-line"], + "default-case": 0, + "dot-location": [2, "property"], + "dot-notation": [1, { + "allowKeywords": true + }], + "eol-last": 2, + "eqeqeq": [2, "always", { + "null": "ignore" + }], + "guard-for-in": 2, + "indent": [1, 4, { + "FunctionDeclaration": { + "parameters": "first" + }, + "FunctionExpression": { + "parameters": "first" + }, + "SwitchCase": 1 + }], + "key-spacing": [0, { + "align": "colon", + "afterColon": true + }], + "keyword-spacing": [2, { + "overrides": { + "else": { + "before": true + }, + "while": { + "before": true + }, + "catch": { + "before": true + } + } + }], + "max-len": [1, 160, 2, { + "ignoreComments": true + }], + "no-alert": 1, + "no-caller": 2, + "no-case-declarations": 2, + "no-cond-assign": [2, "except-parens"], + "no-const-assign": "error", + "no-div-regex": 0, + "no-else-return": 0, + "no-empty-function": [2, { + "allow": ["arrowFunctions", "functions", "methods"] + }], + "no-empty-pattern": 2, + "no-eq-null": 0, + "no-eval": 2, + "no-extend-native": 2, + "no-extra-bind": 2, + "no-extra-label": 2, + "no-fallthrough": 1, + "no-floating-decimal": 2, + "no-global-assign": [2, { + "exceptions": [] + }], + "no-implicit-coercion": [0, { + "boolean": false, + "number": true, + "string": true, + "allow": [] + }], + "no-implicit-globals": 0, + "no-implied-eval": 2, + "no-invalid-this": 0, + "no-iterator": 2, + "no-labels": [2, { + "allowLoop": false, + "allowSwitch": false + }], + "no-lone-blocks": 2, + "no-loop-func": 1, + "no-magic-numbers": [1, { + "ignore": [0, 1, -1, 2, 3, 4, 5, 6, 7, 8, 9, + 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 32, 50, 80, 100, 443, 8080, 8090, + 1024, 2048, 4096, 8192, + 200, 201, 202, 204, 301, 302, 400, 401, 403, 404, 405, 409, 410, 415, 500, 505, + 30, 60, 120, 180, 240, 300, 900, 1800, 3600, 7200, 14400, 43200, 86400, 172800, + 1000, 2000, 2500, 3000, 5000, 10000, 30000, 32000, 60000, 120000, 180000, 240000, 300000, 900000, 1800000, 3600000, 7200000, 14400000, 43200000, 86400000, 172800000], + "ignoreArrayIndexes": true, + "enforceConst": false, + "detectObjects": false + }], + "no-mixed-spaces-and-tabs": 2, + "no-multi-spaces": 2, + "no-multi-str": 2, + "no-native-reassign": 0, + "no-new": 0, + "no-new-func": 2, + "no-new-wrappers": 2, + "no-octal": 2, + "no-octal-escape": 2, + "no-param-reassign": 0, + "no-proto": 2, + "no-redeclare": 2, + "no-restricted-properties": [2, { + "object": "arguments", + "property": "callee", + "message": "arguments.callee is deprecated," + }, { + "property": "__defineGetter__", + "message": "Please use Object.defineProperty instead." + }, { + "property": "__defineSetter__", + "message": "Please use Object.defineProperty instead." + }], + "no-return-assign": 2, + "no-script-url": 2, + "no-self-assign": 2, + "no-self-compare": 2, + "no-sequences": 2, + "no-spaced-func": 2, + "no-throw-literal": 2, + "no-trailing-spaces": 2, + "no-undef": 2, + "no-unmodified-loop-condition": 0, + "no-unused-expressions": [2, { + "allowShortCircuit": false, + "allowTernary": false + }], + "no-unused-labels": 2, + "no-unused-vars": [1, { + "vars": "local", + "args": "none" + }], + "no-use-before-define": [2, { + "functions": false + }], + "no-useless-call": 0, + "no-useless-concat": 2, + "no-useless-escape": 2, + "no-void": 2, + "no-warning-comments": [0, { + "terms": ["todo", "fixme", "xxx"], + "location": "start" + }], + "no-with": 2, + "one-var": [0, { + "uninitialized": "always", + "initialized": "never" + }], + "operator-linebreak": [2, "after"], + "padded-blocks": [0, "never"], + "quote-props": [1, "consistent-as-needed"], + "quotes": [1, "single"], + "radix": 0, + "semi": [2, "always"], + "semi-spacing": [2, { + "before": false, + "after": true + }], + "space-before-blocks": [2, "always"], + "space-before-function-paren": [2, "never"], + "space-in-parens": [2, "never"], + "space-infix-ops": 2, + "space-unary-ops": [2, { + "words": false, + "nonwords": false + }], + "spaced-comment": [2, "always"], + "vars-on-top": 0, + "wrap-iife": [2, "outside", { + "functionPrototypeMethods": false + }], + "yoda": 0 + } +} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c63e8f5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +# No Package Lock +package-lock.json + +# No Temporary File +*.swp +.DS_Store + +# No NPM Modules +node_modules + +# No ZIP Files +*.zip + +# No Testing Materials +data +generateData.js \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..22c60e1 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,6 @@ +# CHANGELOG + +This file documents all notable changes in the `LogDNA S3 Lambda Function`. The release numbering uses [semantic versioning](http://semver.org). + +## v1.0.0 - Released on January 24, 2020 +* Initial Release \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..d27c902 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,15 @@ +# Contributing + +## Github Workflow + +Contributions are always welcome! Be sure to follow the [github workflow](https://guides.github.com/introduction/flow/) when contributing to this project: + +* Create an issue, or comment on an issue to indicate what you are working on. This avoids work duplication. +* Fork the repository and clone to your local machine +* You should already be on the default branch `master` - if not, check it out (`git checkout master`) +* Create a new branch for your feature/fix `git checkout -b my-new-feature`) +* Write your feature/fix +* Stage the changed files for a commit (`git add .`) +* Commit your files with a *useful* commit message ([example](https://github.com/Azure/azure-quickstart-templates/commit/53699fed9983d4adead63d9182566dec4b8430d4)) (`git commit`) +* Push your new branch to your GitHub Fork (`git push origin my-new-feature`) +* Visit this repository in GitHub and create a Pull Request. diff --git a/LICENSE b/LICENSE new file mode 100755 index 0000000..bd95903 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 LogDNA, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..1fac533 --- /dev/null +++ b/README.md @@ -0,0 +1,63 @@ +# LogDNA S3 + +The LogDNA Amazon S3 integration relies on [AWS Lambda](https://docs.aws.amazon.com/lambda/index.html) to route your logs from [S3](https://docs.aws.amazon.com/AmazonS3/latest/dev/Welcome.html) to LogDNA. + +## Configure the LogDNA AWS Lambda function +1. Create a [new AWS Lambda function](https://console.aws.amazon.com/lambda/home) and select `Author from scratch` +2. For the basic information: + * Function Name: `logdna_s3` (you can choose what to name it) + * Runtime: `Node.js.10.x` +3. Click on the Lambda function to edit the details: + * Code entry type: `Upload a .ZIP file` + * Upload our LogDNA Lambda function [.ZIP File](https://github.com/logdna/logdna-s3/releases/latest/download/logdna-s3.zip) + * Handler: `index.handler` + * Runtime: `Node.js.10.x` + * Environment variables: + * `LOGDNA_KEY`: `YOUR_INGESTION_KEY_HERE` *(Required)* + * `LOGDNA_HOSTNAME`: Alternative Host Name *(Optional)* + * `LOGDNA_TAGS`: Comma-separated Tags *(Optional)* + * `LOGDNA_URL`: Custom Ingestion URL *(Optional)* +4. For Execution role, assign a role that has the following policies: + * [`AmazonS3ReadOnlyAccess`](https://gist.github.com/bernadinm/6f68bfdd015b3f3e0a17b2f00c9ea3f8#file-all_aws_managed_policies-json-L4392-L4417) + * [`AWSLambdaBasicExecutionRole`](https://gist.github.com/bernadinm/6f68bfdd015b3f3e0a17b2f00c9ea3f8#file-all_aws_managed_policies-json-L1447-L1473) +![Policies](https://raw.githubusercontent.com/logdna/artwork/master/logdna-s3/permissions.png) +5. It is recommended to set the `Timeout` to 30 seconds if the function is going to be used to stream the logs from `gzipped` files. You may change `Memory (MB)` limit as well depending on how heavy those files are going to be + +### Configure your AWS S3 Bucket +You have the option of connecting your Amazon S3 Bucket within the S3 Lambda function console or in your S3 console. + +### In the S3 Lambda Function +1. Add S3 as a Trigger and click it to Configure: +![Configure](https://raw.githubusercontent.com/logdna/artwork/master/logdna-s3/designer.png) +2. Select the `Bucket` and `Event type` to stream the logs from to LogDNA + a. *Optional* You can also specify `Prefix` and `Suffix` for the files to capture +3. Make sure you check `Enable trigger` box: +![Trigger](https://raw.githubusercontent.com/logdna/artwork/master/logdna-s3/trigger.png) +4. Repeat steps 1-3 to add multiple buckets + +### Optional Environment Variables +The following variables can be used to tune this S3 Lambda function for specific use cases. + +* **LOGDNA_BATCH_INTERVAL**: How frequently (in `milliseconds`) to flush the batch of logs, *Optional* + * **Default**: 50 +* **LOGDNA_BATCH_LIMIT**: The maximum number of logs in one batch, *Optional* + * **Default**: 25 +* **LOGDNA_FREE_SOCKET_TIMEOUT**: How long (in `milliseconds`) to wait for inactivity before timing out on the free socket, *Optional* + * **Default**: 300000 + * **Source**: [agentkeepalive#agentoptions](https://github.com/node-modules/agentkeepalive/blob/master/README.md#new-agentoptions) +* **LOGDNA_MAX_LINE_LENGTH**: The maximum character length for each line, *Optional* + * **Default**: 32000 +* **LOGDNA_MAX_REQUEST_RETRIES**: The maximum number of retries for sending a line when there are network failures, *Optional* + * **Default**: 5 +* **LOGDNA_MAX_REQUEST_TIMEOUT**: Time limit (in `seconds`) for requests made by this HTTP Client, *Optional* + * **Default**: 300 +* **LOGDNA_REQUEST_RETRY_INTERVAL**: How frequently (in `milliseconds`) to retry for sending a line when there are network failures, *Optional* + * **Default**: 100 + +### Notes +* This S3 Lambda function and S3 Bucket should be in the same availability zone +* You can specify a bucket only in one trigger and/or S3 Lambda function since a bucket accepts only one subscription +* `Node.js.10.x` is the minimum runtime requirement for successfully running this S3 Lambda function + +## Contributing +Contributions are always welcome. See the [contributing guide](/CONTRIBUTING.md) to learn how you can help. Build instructions for the agent are also in the guide. diff --git a/index.js b/index.js new file mode 100644 index 0000000..f882aa5 --- /dev/null +++ b/index.js @@ -0,0 +1,255 @@ +// External Libraries +const agent = require('agentkeepalive'); +const async = require('async'); +const aws = require('aws-sdk'); +const request = require('request'); +const zlib = require('zlib'); + +// Constants +const BATCH_INTERVAL_MS = parseInt(process.env.LOGDNA_BATCH_INTERVAL) || 50; +const BATCH_LIMIT = parseInt(process.env.LOGDNA_BATCH_LIMIT) || 25; +const FREE_SOCKET_TIMEOUT_MS = parseInt(process.env.LOGDNA_FREE_SOCKET_TIMEOUT) || 300000; +const LOGDNA_URL = process.env.LOGDNA_URL || 'https://logs.logdna.com/logs/ingest'; +const MAX_REQUEST_RETRIES = parseInt(process.env.LOGDNA_MAX_REQUEST_RETRIES) || 5; +const MAX_REQUEST_TIMEOUT_MS = parseInt(process.env.LOGDNA_MAX_REQUEST_TIMEOUT) || 30000; +const REQUEST_RETRY_INTERVAL_MS = parseInt(process.env.LOGDNA_REQUEST_RETRY_INTERVAL) || 100; +const INTERNAL_SERVER_ERROR = 500; +const DEFAULT_HTTP_ERRORS = [ + 'ECONNRESET' + , 'EHOSTUNREACH' + , 'ETIMEDOUT' + , 'ESOCKETTIMEDOUT' + , 'ECONNREFUSED' + , 'ENOTFOUND' +]; + +// RegExp: +const DOT_REGEXP = /\./g; + +// Initializations +const s3 = new aws.S3({ + apiVersion: '2006-03-01' +}); + +// Get Configuration from Environment Variables +const getConfig = (event) => { + const pkg = require('./package.json'); + let config = { + eventlog: false + , filelog: true + , hostname: event.Records[0].s3.bucket.name.replace(DOT_REGEXP, '_') + , UserAgent: `${pkg.name}/${pkg.version}` + }; + + if (process.env.LOGDNA_KEY) config.key = process.env.LOGDNA_KEY; + if (process.env.LOGDNA_HOSTNAME) config.hostname = process.env.LOGDNA_HOSTNAME; + if (process.env.LOGDNA_TAGS) { + config.tags = process.env.LOGDNA_TAGS.split(',').map(tag => tag.trim()).join(','); + } + + if (process.env.LOGDNA_EVENTLOG) { + config.eventlog = process.env.LOGDNA_EVENTLOG.toLowerCase(); + config.eventlog = config.eventlog.indexOf('yes') > -1 || config.eventlog.indexOf('true') > -1; + } + + if (process.env.LOGDNA_FILELOG) { + config.eventlog = process.env.LOGDNA_FILELOG.toLowerCase(); + config.eventlog = config.eventlog.indexOf('yes') > -1 || config.eventlog.indexOf('true') > -1; + } + + return config; +}; + +// Prepare Datetime +const formatTime = (datetime) => { + const components = (new Date(datetime)).toString().split(' '); + var timeString = `[${components[2]}/${components[1]}/${components[3]}:`; + return `${timeString}${components[4]} ${components[5].split('GMT').pop()}]`; +}; + +// Prepare Message +const formatMessage = (record) => { + let message = `${record.bucket.owner} ${record.s3.region} ${record.bucket.name}`; + message = `${message} ${formatTime(record.event.time)} ${record.request.sourceIPAddress}`; + return `${message} ${record.event.name} ${record.object.key} ${record.object.size}`; +}; + +// Check File Format +const checkFormat = (key) => { + return { + json: key && key.indexOf('.json') >= 0 && key.indexOf('.jsonl') === -1 + , gz: key && key.indexOf('.gz') >= 0 + }; +}; + +// Parse Event Payload +const parseEvent = (event) => { + event.Records[0].s3.object.key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' ')); + return { + event: { + version: event.Records[0].eventVersion + , source: event.Records[0].eventSource + , time: event.Records[0].eventTime + , name: event.Records[0].eventName + } + , s3: { + region: event.Records[0].awsRegion + , user: event.Records[0].userIdentity.principalId + , schema: event.Records[0].s3.s3SchemaVersion + , configuration: event.Records[0].s3.configurationId + } + , bucket: { + name: event.Records[0].s3.bucket.name + , owner: event.Records[0].s3.bucket.ownerIdentity.principalId + , arn: event.Records[0].s3.bucket.arn + }, object: event.Records[0].s3.object + }; +}; + +// Process Event Data After Parsing +const processEvent = (eventData) => { + return { + file: `${eventData.bucket.name}/${eventData.object.key}` + , timestamp: (new Date(eventData.event.time)).getTime() + , meta: { + bucket: eventData.bucket + , object: eventData.object + , region: eventData.s3.region + , source: eventData.event.source + , user: eventData.event.user + } + }; +}; + +// Prepare the Messages and Options +const prepareEvent = (eventData) => { + return Object.assign({}, eventData, { + line: formatMessage(eventData) + }); +}; + +// Gett the Logs from File +const getLogs = (params, callback) => { + const keyFormat = checkFormat(params.Key); + return s3.getObject(params, (error, data) => { + if (error) return callback(error); + data = keyFormat.gz ? zlib.gunzipSync(data.Body) : data.Body; + data = data.toString('ascii'); + + if (keyFormat.json) { + try { + data = JSON.parse(data); + return callback(null, Array.isArray(data) ? data.map(entry => JSON.stringify(entry)) : JSON.stringify(data)); + } catch (e) { + return callback(null, data.split('\n').map(line => line.trim()).filter((line) => { + try { + return line === JSON.stringify(JSON.parse(line)); + } catch (e) { + return false; + } + })); + } + } + + return callback(null, data.split('\n')); + }); +}; + +// Batchify +const batchify = (logs) => { + let batches = [], batch = [], batch_size = 0; + logs.forEach((log) => { + batch.push(log); + batch_size += 1; + if (batch_size >= BATCH_LIMIT) { + batches.push(batch); + batch = []; + batch_size = 0; + } + }); + + if (batch_size > 0) { batches.push(batch); } + return batches; +}; + +// Prepare the Logs +const prepareLogs = (logs, eventData) => { + return logs.filter(log => log !== '').map((log) => { + return Object.assign({}, eventData, { + line: log + }); + }); +}; + +// Ship the Logs +const send = (payload, config, callback) => { + // Check for Ingestion Key + if (!config.key) return callback('Missing LogDNA Ingestion Key'); + + // Prepare HTTP Request Options + const options = { + url: LOGDNA_URL + , qs: config.tags ? { + tags: config.tags + , hostname: config.hostname + } : { + hostname: config.hostname + }, method: 'POST' + , body: JSON.stringify({ + e: 'ls' + , ls: payload + }), auth: { + username: config.key + }, headers: { + 'Content-Type': 'application/json; charset=UTF-8' + , 'user-agent': config.UserAgent + }, timeout: MAX_REQUEST_TIMEOUT_MS + , withCredentials: false + , agent: new agent.HttpsAgent({ + freeSocketTimeout: FREE_SOCKET_TIMEOUT_MS + }) + }; + + // Flush the Log + async.retry({ + times: MAX_REQUEST_RETRIES + , interval: retryCount => REQUEST_RETRY_INTERVAL_MS * Math.pow(2, retryCount) + , errorFilter: errCode => DEFAULT_HTTP_ERRORS.includes(errCode) || errCode === 'INTERNAL_SERVER_ERROR' + }, (reqCallback) => { + return request(options, (error, response, body) => { + if (error) return reqCallback(error.code); + if (response.statusCode >= INTERNAL_SERVER_ERROR) return reqCallback('INTERNAL_SERVER_ERROR'); + return reqCallback(null, body); + }); + }, callback); +}; + +// Main Handler +exports.handler = (event, context, callback) => { + const config = getConfig(event) + , eventData = processEvent(parseEvent(event)) + , payload = config.eventlog ? [prepareEvent(eventData)] : [] + , s3params = { + Bucket: eventData && eventData.meta && eventData.meta.bucket && eventData.meta.bucket.name || undefined + , Key: eventData && eventData.meta && eventData.meta.object && eventData.meta.object.key || undefined + }; + + if (config.filelog) { + return getLogs(s3params, (error, lines) => { + if (error) { + if (config.eventlog) return send(payload, config, callback); + return callback(error); + } + + return async.everySeries(batchify(payload.concat(prepareLogs(lines, eventData))), (batch, next) => { + setTimeout(() => { + return send(batch, config, next); + }, BATCH_INTERVAL_MS); + }, callback); + }); + } else if (!config.eventlog) { + return callback('None of file and event logging has been enabled!'); + } + + return send(payload, config, callback); +}; diff --git a/package.json b/package.json new file mode 100644 index 0000000..28e052c --- /dev/null +++ b/package.json @@ -0,0 +1,26 @@ +{ + "name": "logdna-s3", + "version": "1.0.0", + "description": "Lambda Function to Stream Logs from AWS S3 to LogDNA", + "main": "index.js", + "scripts": { + "lint": "./node_modules/.bin/eslint -c .eslintrc *.js" + }, + "dependencies": { + "agentkeepalive": "^4.0.2", + "async": "^2.6.2", + "request": "^2.88.0" + }, + "devDependencies": { + "aws-sdk": "^2.590.0", + "eslint": "^6.7.2" + }, + "keywords": [ + "lambda", + "logdna", + "aws", + "s3" + ], + "author": "LogDNA", + "license": "MIT" +}