Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue #931 fix for not allowing caching #1050

Merged
merged 5 commits into from Aug 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Expand Up @@ -135,6 +135,7 @@ All CLI options are optional:
--useDocker Run handlers in a docker container.
--layersDir The directory layers should be stored in. Default: ${codeDir}/.serverless-offline/layers'
--dockerReadOnly Marks if the docker code layer should be read only. Default: true
--allowCache Allows the code of lambda functions to cache if supported.
```

Any of the CLI options can be added to your `serverless.yml`. For example:
Expand Down
27 changes: 25 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Expand Up @@ -158,6 +158,7 @@
"aws-sdk": "^2.624.0",
"boxen": "^4.2.0",
"chalk": "^3.0.0",
"clear-module": "^4.1.1",
"cuid": "^2.1.8",
"execa": "^4.0.0",
"extend": "^3.0.2",
Expand Down
3 changes: 3 additions & 0 deletions src/config/commandOptions.js
Expand Up @@ -99,4 +99,7 @@ export default {
functionCleanupIdleTimeSeconds: {
usage: 'Number of seconds until an idle function is eligible for cleanup',
},
allowCache: {
usage: 'Allows the code of lambda functions to cache if supported',
},
}
1 change: 1 addition & 0 deletions src/config/defaultOptions.js
Expand Up @@ -28,4 +28,5 @@ export default {
layersDir: null,
dockerReadOnly: true,
functionCleanupIdleTimeSeconds: 60,
allowCache: false,
}
18 changes: 12 additions & 6 deletions src/lambda/handler-runner/HandlerRunner.js
Expand Up @@ -21,7 +21,12 @@ export default class HandlerRunner {
}

async _loadRunner() {
const { useDocker, useChildProcesses, useWorkerThreads } = this.#options
const {
useDocker,
useChildProcesses,
useWorkerThreads,
allowCache,
} = this.#options

const {
functionKey,
Expand All @@ -48,7 +53,7 @@ export default class HandlerRunner {
const { default: ChildProcessRunner } = await import(
'./child-process-runner/index.js'
)
return new ChildProcessRunner(this.#funOptions, this.#env)
return new ChildProcessRunner(this.#funOptions, this.#env, allowCache)
}

if (useWorkerThreads) {
Expand All @@ -58,7 +63,7 @@ export default class HandlerRunner {
const { default: WorkerThreadRunner } = await import(
'./worker-thread-runner/index.js'
)
return new WorkerThreadRunner(this.#funOptions, this.#env)
return new WorkerThreadRunner(this.#funOptions, this.#env, allowCache)
}

const { default: InProcessRunner } = await import(
Expand All @@ -70,22 +75,23 @@ export default class HandlerRunner {
handlerName,
this.#env,
timeout,
allowCache,
)
}

if (supportedPython.has(runtime)) {
const { default: PythonRunner } = await import('./python-runner/index.js')
return new PythonRunner(this.#funOptions, this.#env)
return new PythonRunner(this.#funOptions, this.#env, allowCache)
}

if (supportedRuby.has(runtime)) {
const { default: RubyRunner } = await import('./ruby-runner/index.js')
return new RubyRunner(this.#funOptions, this.#env)
return new RubyRunner(this.#funOptions, this.#env, allowCache)
}

if (supportedJava.has(runtime)) {
const { default: JavaRunner } = await import('./java-runner/index.js')
return new JavaRunner(this.#funOptions, this.#env)
return new JavaRunner(this.#funOptions, this.#env, allowCache)
}

// TODO FIXME
Expand Down
Expand Up @@ -9,15 +9,17 @@ export default class ChildProcessRunner {
#handlerName = null
#handlerPath = null
#timeout = null
#allowCache = false

constructor(funOptions, env) {
constructor(funOptions, env, allowCache) {
const { functionKey, handlerName, handlerPath, timeout } = funOptions

this.#env = env
this.#functionKey = functionKey
this.#handlerName = handlerName
this.#handlerPath = handlerPath
this.#timeout = timeout
this.#allowCache = allowCache
}

// no-op
Expand All @@ -37,6 +39,7 @@ export default class ChildProcessRunner {
childProcess.send({
context,
event,
allowCache: this.#allowCache,
timeout: this.#timeout,
})

Expand Down
Expand Up @@ -23,7 +23,7 @@ process.on('uncaughtException', (err) => {
const [, , functionKey, handlerName, handlerPath] = process.argv

process.on('message', async (messageData) => {
const { context, event, timeout } = messageData
const { context, event, allowCache, timeout } = messageData

// TODO we could probably cache this in the module scope?
const inProcessRunner = new InProcessRunner(
Expand All @@ -32,6 +32,7 @@ process.on('message', async (messageData) => {
handlerName,
process.env,
timeout,
allowCache,
)

let result
Expand Down
@@ -1,18 +1,21 @@
import { performance } from 'perf_hooks'
import clearModule from 'clear-module'

export default class InProcessRunner {
#env = null
#functionKey = null
#handlerName = null
#handlerPath = null
#timeout = null
#allowCache = false

constructor(functionKey, handlerPath, handlerName, env, timeout) {
constructor(functionKey, handlerPath, handlerName, env, timeout, allowCache) {
this.#env = env
this.#functionKey = functionKey
this.#handlerName = handlerName
this.#handlerPath = handlerPath
this.#timeout = timeout
this.#allowCache = allowCache
}

// no-op
Expand All @@ -36,7 +39,9 @@ export default class InProcessRunner {
Object.assign(process.env, this.#env)

// lazy load handler with first usage

if (!this.#allowCache) {
clearModule(this.#handlerPath)
}
const { [this.#handlerName]: handler } = await import(this.#handlerPath)

if (typeof handler !== 'function') {
Expand Down
5 changes: 4 additions & 1 deletion src/lambda/handler-runner/java-runner/JavaRunner.js
Expand Up @@ -10,8 +10,9 @@ export default class JavaRunner {
#functionName = null
#handler = null
#deployPackage = null
#allowCache = false

constructor(funOptions, env) {
constructor(funOptions, env, allowCache) {
const {
functionName,
handler,
Expand All @@ -23,6 +24,7 @@ export default class JavaRunner {
this.#functionName = functionName
this.#handler = handler
this.#deployPackage = functionPackage || servicePackage
this.#allowCache = allowCache
}

// no-op
Expand Down Expand Up @@ -71,6 +73,7 @@ export default class JavaRunner {
function: this.#functionName,
jsonOutput: true,
serverlessOffline: true,
allowCache: this.#allowCache,
})

const httpOptions = {
Expand Down
5 changes: 4 additions & 1 deletion src/lambda/handler-runner/python-runner/PythonRunner.js
Expand Up @@ -13,14 +13,16 @@ export default class PythonRunner {
#handlerName = null
#handlerPath = null
#runtime = null
#allowCache = false

constructor(funOptions, env) {
constructor(funOptions, env, allowCache) {
const { handlerName, handlerPath, runtime } = funOptions

this.#env = env
this.#handlerName = handlerName
this.#handlerPath = handlerPath
this.#runtime = platform() === 'win32' ? 'python.exe' : runtime
this.#allowCache = allowCache

if (process.env.VIRTUAL_ENV) {
const runtimeDir = platform() === 'win32' ? 'Scripts' : 'bin'
Expand Down Expand Up @@ -96,6 +98,7 @@ export default class PythonRunner {
const input = stringify({
context,
event,
allowCache: this.#allowCache,
})

const onErr = (data) => {
Expand Down
5 changes: 4 additions & 1 deletion src/lambda/handler-runner/ruby-runner/RubyRunner.js
Expand Up @@ -10,13 +10,15 @@ export default class RubyRunner {
#env = null
#handlerName = null
#handlerPath = null
#allowCache = false

constructor(funOptions, env) {
constructor(funOptions, env, allowCache) {
const { handlerName, handlerPath } = funOptions

this.#env = env
this.#handlerName = handlerName
this.#handlerPath = handlerPath
this.#allowCache = allowCache
}

// no-op
Expand Down Expand Up @@ -64,6 +66,7 @@ export default class RubyRunner {
const input = stringify({
context: _context,
event,
allowCache: this.#allowCache,
})

// console.log(input)
Expand Down
Expand Up @@ -5,12 +5,14 @@ const workerThreadHelperPath = resolve(__dirname, './workerThreadHelper.js')

export default class WorkerThreadRunner {
#workerThread = null
#allowCache = false

constructor(funOptions /* options */, env) {
constructor(funOptions /* options */, env, allowCache) {
// this._options = options

const { functionKey, handlerName, handlerPath, timeout } = funOptions

this.#allowCache = allowCache
this.#workerThread = new Worker(workerThreadHelperPath, {
// don't pass process.env from the main process!
env,
Expand Down Expand Up @@ -51,6 +53,7 @@ export default class WorkerThreadRunner {
{
context,
event,
allowCache: this.#allowCache,
// port2 is part of the payload, for the other side to answer messages
port: port2,
},
Expand Down
Expand Up @@ -4,7 +4,7 @@ import InProcessRunner from '../in-process-runner/index.js'
const { functionKey, handlerName, handlerPath } = workerData

parentPort.on('message', async (messageData) => {
const { context, event, port, timeout } = messageData
const { context, event, port, timeout, allowCache } = messageData

// TODO we could probably cache this in the module scope?
const inProcessRunner = new InProcessRunner(
Expand All @@ -13,6 +13,7 @@ parentPort.on('message', async (messageData) => {
handlerName,
process.env,
timeout,
allowCache,
)

let result
Expand Down
4 changes: 4 additions & 0 deletions tests/integration/lambda-invoke/serverless.yml
Expand Up @@ -51,3 +51,7 @@ functions:

invokedAsyncHandler:
handler: lambdaInvokeAsyncHandler.invokedAsyncHandler

custom:
serverless-offline:
allowCache: true