diff --git a/.flake8 b/.flake8
index 142c1bea0..f9dc06797 100644
--- a/.flake8
+++ b/.flake8
@@ -6,10 +6,12 @@
ignore = W503,E402,E731
exclude = .git, __pycache__, build, dist, .eggs, .github, .local, docs/,
- Samples, azure_functions_worker/protos/, proxy_worker/protos/,
- azure_functions_worker/_thirdparty/typing_inspect.py,
- tests/unittests/test_typing_inspect.py,
- tests/unittests/broken_functions/syntax_error/main.py,
- .env*, .vscode, venv*, *.venv*
+ Samples, workers/azure_functions_worker/protos/, workers/proxy_worker/protos/,
+ workers/azure_functions_worker/_thirdparty/typing_inspect.py,
+ workers/tests/unittests/test_typing_inspect.py,
+ workers/tests/unittests/broken_functions/syntax_error/main.py,
+ .env*, .vscode, venv*, *.venv*,
+ azure_functions_worker_v2/tests/protos/*,
+ azure_functions_worker_v2/azure_functions_worker_v2/utils/typing_inspect.py
max-line-length = 88
diff --git a/.gitignore b/.gitignore
index 0baffcaf8..0f4ea09c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -132,3 +132,8 @@ prof/
tests/**/host.json
tests/**/bin
tests/**/extensions.csproj
+
+# Azurite related files
+__blobstorage__/*
+__queuestorage__/*
+__azurite*
\ No newline at end of file
diff --git a/azure_functions_worker_v2/README.md b/azure_functions_worker_v2/README.md
new file mode 100644
index 000000000..5d1bb195c
--- /dev/null
+++ b/azure_functions_worker_v2/README.md
@@ -0,0 +1,71 @@
+#
Azure Functions Python Worker
+
+| Branch | Build Status | CodeCov | Test Status |
+|--------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| dev | [](https://azfunc.visualstudio.com/public/_build/latest?definitionId=658&branchName=dev) | [](https://codecov.io/gh/Azure/azure-functions-python-worker) | [](https://azfunc.visualstudio.com/public/_build/latest?definitionId=658&branchName=dev) |
+
+Python support for Azure Functions is based on Python 3.13 serverless hosting on Linux and the Functions 4.0 runtime.
+
+Here is the current status of Python in Azure Functions:
+
+What are the supported Python versions?
+
+| Azure Functions Runtime | Python 3.13 |
+|----------------------------------|-------------|
+| Azure Functions 4.0 | ✔ |
+
+For information about Azure Functions Runtime, please refer to [Azure Functions runtime versions overview](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) page.
+
+### What's available?
+
+- Build, test, debug, and publish using Azure Functions Core Tools (CLI) or Visual Studio Code
+- Deploy Python Function project onto consumption, dedicated, elastic premium, or flex consumption plan.
+- Deploy Python Function project in a custom docker image onto dedicated or elastic premium plan.
+- Triggers / Bindings : Blob, Cosmos DB, Event Grid, Event Hub, HTTP, Kafka, MySQL, Queue, ServiceBus, SQL, Timer, and Warmup
+- Triggers / Bindings : Custom binding support
+
+### What's new?
+
+- [SDK Type Bindings for Blob](https://techcommunity.microsoft.com/t5/azure-compute-blog/azure-functions-sdk-type-bindings-for-azure-blob-storage-with/ba-p/4146744)
+- [HTTP Streaming](https://techcommunity.microsoft.com/t5/azure-compute-blog/azure-functions-support-for-http-streams-in-python-is-now-in/ba-p/4146697)
+
+### Get Started
+
+- [Create your first Python function](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python)
+- [Developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python)
+- [Binding API reference](https://docs.microsoft.com/en-us/python/api/azure-functions/azure.functions?view=azure-python)
+- [Develop using VS Code](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-vs-code)
+- [Create a Python Function on Linux using a custom docker image](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-function-linux-custom-image)
+
+# Give Feedback
+
+Issues and feature requests are tracked in a variety of places. To report this feedback, please file an issue to the relevant repository below:
+
+| Item | Description | Link |
+|---------------|----------------------------------------------|--------------------------------------------------------------------------------|
+| Python Worker | Programming Model, Triggers & Bindings | [File an Issue](https://github.com/Azure/azure-functions-python-worker/issues) |
+| Runtime | Script Host & Language Extensibility | [File an Issue](https://github.com/Azure/azure-functions-host/issues) |
+| VSCode | VSCode Extension for Azure Functions | [File an Issue](https://github.com/microsoft/vscode-azurefunctions/issues) |
+| Core Tools | Command Line Interface for Local Development | [File an Issue](https://github.com/Azure/azure-functions-core-tools/issues) |
+| Templates | Code Issues with Creation Template | [File an Issue](https://github.com/Azure/azure-functions-templates/issues) |
+
+# Contribute
+
+This project welcomes contributions and suggestions. Most contributions require you to agree to a
+Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
+the rights to use your contribution. For details, visit https://cla.microsoft.com.
+
+Here are some pointers to get started:
+
+- [Language worker architecture](https://github.com/Azure/azure-functions-python-worker/wiki/Worker-Architecture)
+- [Setting up the development environment](https://github.com/Azure/azure-functions-python-worker/wiki/Contributor-Guide)
+- [Adding support for a new binding](https://github.com/Azure/azure-functions-python-worker/wiki/Adding-support-for-a-new-binding-type)
+- [Release instructions](https://github.com/Azure/azure-functions-python-worker/wiki/Release-Instructions)
+
+When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
+a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
+provided by the bot. You will only need to do this once across all repos using our CLA.
+
+This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
+contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
\ No newline at end of file
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/__init__.py b/azure_functions_worker_v2/azure_functions_worker_v2/__init__.py
new file mode 100644
index 000000000..51d969720
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from .handle_event import (worker_init_request,
+ functions_metadata_request,
+ function_environment_reload_request,
+ invocation_request,
+ function_load_request)
+
+__all__ = ('worker_init_request',
+ 'functions_metadata_request',
+ 'function_environment_reload_request',
+ 'invocation_request',
+ 'function_load_request')
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/context.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/context.py
new file mode 100644
index 000000000..eb5229ca4
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/context.py
@@ -0,0 +1,66 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import threading
+
+from .retrycontext import RetryContext
+from .tracecontext import TraceContext
+
+
+class Context:
+ def __init__(self,
+ func_name: str,
+ func_dir: str,
+ invocation_id: str,
+ thread_local_storage: threading.local,
+ trace_context: TraceContext,
+ retry_context: RetryContext) -> None:
+ self.__func_name = func_name
+ self.__func_dir = func_dir
+ self.__invocation_id = invocation_id
+ self.__thread_local_storage = thread_local_storage
+ self.__trace_context = trace_context
+ self.__retry_context = retry_context
+
+ @property
+ def invocation_id(self) -> str:
+ return self.__invocation_id
+
+ @property
+ def thread_local_storage(self) -> threading.local:
+ return self.__thread_local_storage
+
+ @property
+ def function_name(self) -> str:
+ return self.__func_name
+
+ @property
+ def function_directory(self) -> str:
+ return self.__func_dir
+
+ @property
+ def trace_context(self) -> TraceContext:
+ return self.__trace_context
+
+ @property
+ def retry_context(self) -> RetryContext:
+ return self.__retry_context
+
+
+def get_context(invoc_request, name: str,
+ directory: str) -> Context:
+ """ For more information refer:
+ https://aka.ms/azfunc-invocation-context
+ """
+ trace_context = TraceContext(
+ invoc_request.trace_context.trace_parent,
+ invoc_request.trace_context.trace_state,
+ invoc_request.trace_context.attributes)
+
+ retry_context = RetryContext(
+ invoc_request.retry_context.retry_count,
+ invoc_request.retry_context.max_retry_count,
+ invoc_request.retry_context.exception)
+
+ return Context(
+ name, directory, invoc_request.invocation_id,
+ threading.local(), trace_context, retry_context)
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/datumdef.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/datumdef.py
new file mode 100644
index 000000000..d4dbc31cf
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/datumdef.py
@@ -0,0 +1,232 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+import logging
+
+from datetime import datetime
+from typing import Any, List, Optional
+
+from .nullable_converters import (
+ to_nullable_bool,
+ to_nullable_double,
+ to_nullable_string,
+ to_nullable_timestamp,
+)
+
+try:
+ from http.cookies import SimpleCookie
+except ImportError:
+ from Cookie import SimpleCookie # type: ignore
+
+
+class Datum:
+ def __init__(self, value, type):
+ self.value = value
+ self.type = type
+
+ @property
+ def python_value(self) -> Any:
+ if self.value is None or self.type is None:
+ return None
+ elif self.type in ('bytes', 'string', 'int', 'double'):
+ return self.value
+ elif self.type == 'json':
+ return json.loads(self.value)
+ elif self.type == 'collection_string':
+ return [v for v in self.value.string]
+ elif self.type == 'collection_bytes':
+ return [v for v in self.value.bytes]
+ elif self.type == 'collection_double':
+ return [v for v in self.value.double]
+ elif self.type == 'collection_sint64':
+ return [v for v in self.value.sint64]
+ else:
+ return self.value
+
+ @property
+ def python_type(self) -> type:
+ return type(self.python_value)
+
+ def __eq__(self, other):
+ if not isinstance(other, type(self)):
+ return False
+
+ return self.value == other.value and self.type == other.type
+
+ def __hash__(self):
+ return hash((type(self), (self.value, self.type)))
+
+ def __repr__(self):
+ val_repr = repr(self.value)
+ if len(val_repr) > 10:
+ val_repr = val_repr[:10] + '...'
+ return ''
+
+ @classmethod
+ def from_typed_data(cls, protos):
+ try:
+ td = protos.TypedData
+ except Exception:
+ td = protos
+ tt = td.WhichOneof('data')
+ if tt == 'http':
+ http = td.http
+ val = dict(
+ method=Datum(http.method, 'string'),
+ url=Datum(http.url, 'string'),
+ headers={
+ k: Datum(v, 'string') for k, v in http.headers.items()
+ },
+ body=(
+ Datum.from_typed_data(http.body)
+ or Datum(type='bytes', value=b'')
+ ),
+ params={
+ k: Datum(v, 'string') for k, v in http.params.items()
+ },
+ query={
+ k: Datum(v, 'string') for k, v in http.query.items()
+ },
+ )
+ elif tt == 'string':
+ val = td.string
+ elif tt == 'bytes':
+ val = td.bytes
+ elif tt == 'json':
+ val = td.json
+ elif tt == 'collection_bytes':
+ val = td.collection_bytes
+ elif tt == 'collection_string':
+ val = td.collection_string
+ elif tt == 'collection_sint64':
+ val = td.collection_sint64
+ elif tt == 'model_binding_data':
+ val = td.model_binding_data
+ elif tt == 'collection_model_binding_data':
+ val = td.collection_model_binding_data
+ elif tt is None:
+ return None
+ else:
+ raise NotImplementedError(
+ 'unsupported TypeData kind: %s' % tt
+ )
+
+ return cls(val, tt)
+
+
+def datum_as_proto(datum: Datum, protos):
+ if datum.type == 'string':
+ return protos.TypedData(string=datum.value)
+ elif datum.type == 'bytes':
+ return protos.TypedData(bytes=datum.value)
+ elif datum.type == 'json':
+ return protos.TypedData(json=datum.value)
+ elif datum.type == 'http':
+ return protos.TypedData(http=protos.RpcHttp(
+ status_code=datum.value['status_code'].value,
+ headers={
+ k: v.value
+ for k, v in datum.value['headers'].items()
+ },
+ cookies=parse_to_rpc_http_cookie_list(datum.value.get('cookies'), protos),
+ enable_content_negotiation=False,
+ body=datum_as_proto(datum.value['body'], protos),
+ ))
+ elif datum.type is None:
+ return None
+ elif datum.type == 'dict':
+ # TypedData doesn't support dict, so we return it as json
+ return protos.TypedData(json=json.dumps(datum.value))
+ elif datum.type == 'list':
+ # TypedData doesn't support list, so we return it as json
+ return protos.TypedData(json=json.dumps(datum.value))
+ elif datum.type == 'int':
+ return protos.TypedData(int=datum.value)
+ elif datum.type == 'double':
+ return protos.TypedData(double=datum.value)
+ elif datum.type == 'bool':
+ # TypedData doesn't support bool, so we return it as an int
+ return protos.TypedData(int=int(datum.value))
+ else:
+ raise NotImplementedError(
+ 'unexpected Datum type: %s' % datum.type
+ )
+
+
+def parse_to_rpc_http_cookie_list(cookies: Optional[List[SimpleCookie]], protos):
+ if cookies is None:
+ return cookies
+
+ rpc_http_cookies = []
+
+ for cookie in cookies:
+ for name, cookie_entity in cookie.items():
+ rpc_http_cookies.append(
+ protos.RpcHttpCookie(name=name,
+ value=cookie_entity.value,
+ domain=to_nullable_string(
+ cookie_entity['domain'],
+ 'cookie.domain',
+ protos),
+ path=to_nullable_string(
+ cookie_entity['path'],
+ 'cookie.path',
+ protos),
+ expires=to_nullable_timestamp(
+ parse_cookie_attr_expires(
+ cookie_entity), 'cookie.expires',
+ protos),
+ secure=to_nullable_bool(
+ bool(cookie_entity['secure']),
+ 'cookie.secure',
+ protos),
+ http_only=to_nullable_bool(
+ bool(cookie_entity['httponly']),
+ 'cookie.httpOnly',
+ protos),
+ same_site=parse_cookie_attr_same_site(
+ cookie_entity, protos),
+ max_age=to_nullable_double(
+ cookie_entity['max-age'],
+ 'cookie.maxAge',
+ protos)))
+
+ return rpc_http_cookies
+
+
+def parse_cookie_attr_expires(cookie_entity):
+ expires = cookie_entity['expires']
+
+ if expires is not None and len(expires) != 0:
+ try:
+ return datetime.strptime(expires, "%a, %d %b %Y %H:%M:%S GMT")
+ except ValueError:
+ logging.error(
+ "Can not parse value %s of expires in the cookie "
+ "due to invalid format.", expires)
+ raise
+ except OverflowError:
+ logging.error(
+ "Can not parse value %s of expires in the cookie "
+ "because the parsed date exceeds the largest valid C "
+ "integer on your system.", expires)
+ raise
+
+ return None
+
+
+def parse_cookie_attr_same_site(cookie_entity, protos):
+ same_site = getattr(protos.RpcHttpCookie.SameSite, "None")
+ try:
+ raw_same_site_str = cookie_entity['samesite'].lower()
+
+ if raw_same_site_str == 'lax':
+ same_site = protos.RpcHttpCookie.SameSite.Lax
+ elif raw_same_site_str == 'strict':
+ same_site = protos.RpcHttpCookie.SameSite.Strict
+ elif raw_same_site_str == 'none':
+ same_site = protos.RpcHttpCookie.SameSite.ExplicitNone
+ except Exception:
+ return same_site
+
+ return same_site
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/generic.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/generic.py
new file mode 100644
index 000000000..cb64ac760
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/generic.py
@@ -0,0 +1,75 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import typing
+
+from typing import Any, Optional
+
+from .datumdef import Datum
+
+
+class GenericBinding:
+
+ @classmethod
+ def has_trigger_support(cls) -> bool:
+ return False
+
+ @classmethod
+ def check_input_type_annotation(cls, pytype: type) -> bool:
+ return issubclass(pytype, (str, bytes))
+
+ @classmethod
+ def check_output_type_annotation(cls, pytype: type) -> bool:
+ return issubclass(pytype, (str, bytes, bytearray))
+
+ @classmethod
+ def encode(cls, obj: Any, *,
+ expected_type: Optional[type]) -> Datum:
+ if isinstance(obj, str):
+ return Datum(type='string', value=obj)
+
+ elif isinstance(obj, (bytes, bytearray)):
+ return Datum(type='bytes', value=bytes(obj))
+ elif obj is None:
+ return Datum(type=None, value=obj)
+ elif isinstance(obj, dict):
+ return Datum(type='dict', value=obj)
+ elif isinstance(obj, list):
+ return Datum(type='list', value=obj)
+ elif isinstance(obj, int):
+ return Datum(type='int', value=obj)
+ elif isinstance(obj, float):
+ return Datum(type='double', value=obj)
+ elif isinstance(obj, bool):
+ return Datum(type='bool', value=obj)
+ else:
+ raise NotImplementedError
+
+ @classmethod
+ def decode(cls, data: Datum, *, trigger_metadata) -> typing.Any:
+ # Enabling support for Dapr bindings
+ # https://github.com/Azure/azure-functions-python-worker/issues/1316
+ if data is None:
+ return None
+ data_type = data.type
+
+ if data_type == 'string':
+ result = data.value
+ elif data_type == 'bytes':
+ result = data.value
+ elif data_type == 'json':
+ result = data.value
+ elif data_type is None:
+ result = None
+ else:
+ raise ValueError(
+ 'unexpected type of data received for the "generic" binding ',
+ repr(data_type)
+ )
+
+ return result
+
+ @classmethod
+ def has_implicit_output(cls, bind_name: Optional[str]) -> bool:
+ if bind_name == 'durableClient':
+ return False
+ return True
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/meta.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/meta.py
new file mode 100644
index 000000000..fc7c111d2
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/meta.py
@@ -0,0 +1,288 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+# mypy: disable-error-code="attr-defined"
+import os
+import sys
+
+from typing import Any, Dict, Optional, Union
+
+from .datumdef import Datum, datum_as_proto
+from .generic import GenericBinding
+
+from ..http_v2 import HttpV2Registry
+from ..logging import logger
+from ..utils.constants import (
+ CUSTOMER_PACKAGES_PATH,
+ HTTP,
+ HTTP_TRIGGER,
+)
+from ..utils.helpers import set_sdk_version
+
+
+PB_TYPE = 'rpc_data'
+PB_TYPE_DATA = 'data'
+PB_TYPE_RPC_SHARED_MEMORY = 'rpc_shared_memory'
+
+BINDING_REGISTRY = None
+DEFERRED_BINDING_REGISTRY = None
+
+
+def _check_http_input_type_annotation(bind_name: str, pytype: type,
+ is_deferred_binding: bool) -> bool:
+ if HttpV2Registry.http_v2_enabled():
+ return HttpV2Registry.ext_base().RequestTrackerMeta \
+ .check_type(pytype)
+
+ binding = get_binding(bind_name, is_deferred_binding)
+ return binding.check_input_type_annotation(pytype)
+
+
+def _check_http_output_type_annotation(bind_name: str, pytype: type) -> bool:
+ if HttpV2Registry.http_v2_enabled():
+ return HttpV2Registry.ext_base().ResponseTrackerMeta.check_type(pytype)
+
+ binding = get_binding(bind_name)
+ return binding.check_output_type_annotation(pytype)
+
+
+INPUT_TYPE_CHECK_OVERRIDE_MAP = {
+ HTTP_TRIGGER: _check_http_input_type_annotation
+}
+
+OUTPUT_TYPE_CHECK_OVERRIDE_MAP = {
+ HTTP: _check_http_output_type_annotation
+}
+
+
+def load_binding_registry() -> None:
+ """
+ Tries to load azure-functions from the customer's BYO. If it's
+ not found, it loads the builtin. If the BINDING_REGISTRY is None,
+ azure-functions hasn't been loaded in properly.
+
+ Tries to load the base extension.
+ """
+
+ func = sys.modules.get('azure.functions')
+
+ if func is None:
+ import azure.functions as func
+
+ set_sdk_version(func.__version__) # type: ignore
+
+ global BINDING_REGISTRY
+ BINDING_REGISTRY = func.get_binding_registry() # type: ignore
+
+ if BINDING_REGISTRY is None:
+ raise AttributeError('BINDING_REGISTRY is None. azure-functions '
+ 'library not found. Sys Path: %s. '
+ 'Sys Modules: %s. '
+ 'python-packages Path exists: %s.',
+ sys.path, sys.modules,
+ os.path.exists(CUSTOMER_PACKAGES_PATH))
+
+ try:
+ import azurefunctions.extensions.base as clients
+ global DEFERRED_BINDING_REGISTRY
+ DEFERRED_BINDING_REGISTRY = clients.get_binding_registry()
+ except ImportError:
+ logger.debug('Base extension not found. '
+ 'Python version: 3.%s, Sys path: %s, '
+ 'Sys Module: %s, python-packages Path exists: %s.',
+ sys.version_info.minor, sys.path,
+ sys.modules, os.path.exists(CUSTOMER_PACKAGES_PATH))
+
+
+def get_binding(bind_name: str,
+ is_deferred_binding: Optional[bool] = False)\
+ -> object:
+ """
+ First checks if the binding is a non-deferred binding. This is
+ the most common case.
+ Second checks if the binding is a deferred binding.
+ If the binding is neither, it's a generic type.
+ """
+ binding = None
+ if binding is None and not is_deferred_binding:
+ binding = BINDING_REGISTRY.get(bind_name) # type: ignore
+ if binding is None and is_deferred_binding:
+ binding = DEFERRED_BINDING_REGISTRY.get(bind_name) # type: ignore
+ if binding is None:
+ binding = GenericBinding
+ return binding
+
+
+def is_trigger_binding(bind_name: str) -> bool:
+ binding = get_binding(bind_name)
+ return binding.has_trigger_support()
+
+
+def check_input_type_annotation(bind_name: str,
+ pytype: type,
+ is_deferred_binding: bool) -> bool:
+ global INPUT_TYPE_CHECK_OVERRIDE_MAP
+ if bind_name in INPUT_TYPE_CHECK_OVERRIDE_MAP:
+ return INPUT_TYPE_CHECK_OVERRIDE_MAP[bind_name](bind_name, pytype,
+ is_deferred_binding)
+
+ binding = get_binding(bind_name, is_deferred_binding)
+
+ return binding.check_input_type_annotation(pytype)
+
+
+def check_output_type_annotation(bind_name: str, pytype: type) -> bool:
+ global OUTPUT_TYPE_CHECK_OVERRIDE_MAP
+ if bind_name in OUTPUT_TYPE_CHECK_OVERRIDE_MAP:
+ return OUTPUT_TYPE_CHECK_OVERRIDE_MAP[bind_name](bind_name, pytype)
+
+ binding = get_binding(bind_name)
+ return binding.check_output_type_annotation(pytype)
+
+
+def has_implicit_output(bind_name: str) -> bool:
+ binding = get_binding(bind_name)
+
+ # Need to pass in bind_name to exempt Durable Functions
+ if binding is GenericBinding:
+ return (getattr(binding, 'has_implicit_output', lambda: False)
+ (bind_name)) # type: ignore
+
+ else:
+ # If the binding does not have metaclass of meta.InConverter
+ # The implicit_output does not exist
+ return getattr(binding, 'has_implicit_output', lambda: False)()
+
+
+def from_incoming_proto(
+ binding: str,
+ pb, *,
+ pytype: Optional[type],
+ trigger_metadata: Optional[Dict[str, Any]],
+ function_name: str,
+ is_deferred_binding: Optional[bool] = False) -> Any:
+ binding_obj = get_binding(binding, is_deferred_binding)
+ if trigger_metadata:
+ metadata = {
+ k: Datum.from_typed_data(v)
+ for k, v in trigger_metadata.items()
+ }
+ else:
+ metadata = {}
+
+ pb_type = pb.WhichOneof(PB_TYPE)
+ if pb_type == PB_TYPE_DATA:
+ val = pb.data
+ datum = Datum.from_typed_data(val)
+ else:
+ raise TypeError('Unknown ParameterBindingType: %s' % pb_type)
+
+ try:
+ # if the binding is an sdk type binding
+ if is_deferred_binding:
+ return deferred_bindings_decode(binding=binding_obj,
+ pb=pb,
+ pytype=pytype,
+ datum=datum,
+ metadata=metadata,
+ function_name=function_name)
+ return binding_obj.decode(datum, trigger_metadata=metadata)
+ except NotImplementedError:
+ # Binding does not support the data.
+ dt = val.WhichOneof('data')
+ raise TypeError(
+ 'unable to decode incoming TypedData: '
+ 'unsupported combination of TypedData field %s '
+ 'and expected binding type %s' % (repr(dt), binding_obj))
+
+
+def get_datum(binding: str, obj: Any,
+ pytype: Optional[type]) -> Union[Datum, None]:
+ """
+ Convert an object to a datum with the specified type.
+ """
+ binding_obj = get_binding(binding)
+ try:
+ datum = binding_obj.encode(obj, expected_type=pytype)
+ except NotImplementedError:
+ # Binding does not support the data.
+ raise TypeError(
+ 'unable to encode outgoing TypedData: '
+ 'unsupported type "%s" for '
+ 'Python type "%s"' % (binding, type(obj).__name__))
+ return datum
+
+
+def _does_datatype_support_caching(datum: Datum):
+ supported_datatypes = ('bytes', 'string')
+ return datum.type in supported_datatypes
+
+
+def to_outgoing_proto(binding: str, obj: Any, *,
+ pytype: Optional[type],
+ protos):
+ datum = get_datum(binding, obj, pytype)
+ return datum_as_proto(datum, protos) # type: ignore
+
+
+def to_outgoing_param_binding(binding: str, obj: Any, *,
+ pytype: Optional[type],
+ out_name: str,
+ protos):
+ datum = get_datum(binding, obj, pytype)
+ # If not, send it as part of the response message over RPC
+ # rpc_val can be None here as we now support a None return type
+ rpc_val = datum_as_proto(datum, protos) # type: ignore
+ return protos.ParameterBinding(
+ name=out_name,
+ data=rpc_val)
+
+
+def deferred_bindings_decode(binding: Any,
+ pb: Any, *,
+ pytype: Optional[type],
+ datum: Any,
+ metadata: Any,
+ function_name: str):
+ """
+ The extension manages a cache for clients (ie. BlobClient, ContainerClient)
+ That have already been created, so that the worker can reuse the
+ previously created type without creating a new one.
+
+ For async types, the function_name is needed as a key to differentiate.
+ This prevents a known SDK issue where reusing a client across functions
+ can lose the session context and cause an error.
+
+ """
+
+ deferred_binding_type = binding.decode(datum,
+ trigger_metadata=metadata,
+ pytype=pytype)
+
+ return deferred_binding_type
+
+
+def check_deferred_bindings_enabled(param_anno: Union[type, None],
+ deferred_bindings_enabled: bool) -> Any:
+ """
+ Checks if deferred bindings is enabled at fx and single binding level
+
+ The first bool represents if deferred bindings is enabled at a fx level
+ The second represents if the current binding is deferred binding
+ """
+ if (DEFERRED_BINDING_REGISTRY is not None
+ and DEFERRED_BINDING_REGISTRY.check_supported_type(param_anno)):
+ return True, True
+ else:
+ return deferred_bindings_enabled, False
+
+
+def get_deferred_raw_bindings(indexed_function, input_types):
+ """
+ Calls a method from the base extension that generates the raw bindings
+ for a given function. It also returns logs for that function including
+ the defined binding type and if deferred bindings is enabled for that
+ binding.
+ """
+ raw_bindings, bindings_logs = DEFERRED_BINDING_REGISTRY.get_raw_bindings(
+ indexed_function, input_types)
+ return raw_bindings, bindings_logs
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/nullable_converters.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/nullable_converters.py
new file mode 100644
index 000000000..51bf3f18e
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/nullable_converters.py
@@ -0,0 +1,114 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from datetime import datetime
+from typing import Optional, Union
+
+
+def to_nullable_string(nullable: Optional[str], property_name: str, protos):
+ """Converts string input to an 'NullableString' to be sent through the
+ RPC layer. Input that is not a string but is also not null or undefined
+ logs a function app level warning.
+
+ :param nullable Input to be converted to an NullableString if it is a
+ valid string
+ :param property_name The name of the property that the caller will
+ assign the output to. Used for debugging.
+ :param: protos The protos object used for returning the appropriate value
+ """
+ if isinstance(nullable, str):
+ return protos.NullableString(value=nullable)
+
+ if nullable is not None:
+ raise TypeError(
+ "A 'str' type was expected instead of a '%s' "
+ "type. Cannot parse value %s of '%s'."
+ % (type(nullable), nullable, property_name))
+
+ return None
+
+
+def to_nullable_bool(nullable: Optional[bool], property_name: str, protos):
+ """Converts boolean input to an 'NullableBool' to be sent through the
+ RPC layer. Input that is not a boolean but is also not null or undefined
+ logs a function app level warning.
+
+ :param nullable Input to be converted to an NullableBool if it is a
+ valid boolean
+ :param property_name The name of the property that the caller will
+ assign the output to. Used for debugging.
+ :param protos The protos object used for returning the appropriate value
+ """
+ if isinstance(nullable, bool):
+ return protos.NullableBool(value=nullable)
+
+ if nullable is not None:
+ raise TypeError(
+ "A 'bool' type was expected instead of a '%s' "
+ "type. Cannot parse value %s of '%s'."
+ % (type(nullable), nullable, property_name))
+
+ return None
+
+
+def to_nullable_double(nullable: Optional[Union[str, int, float]],
+ property_name: str, protos):
+ """Converts int or float or str that parses to a number to an
+ 'NullableDouble' to be sent through the RPC layer. Input that is not a
+ valid number but is also not null or undefined logs a function app level
+ warning.
+ :param nullable Input to be converted to an NullableDouble if it is a
+ valid number
+ :param property_name The name of the property that the caller will
+ assign the output to. Used for debugging.
+ :param protos The protos object used for returning the appropriate value
+ """
+ if isinstance(nullable, int) or isinstance(nullable, float):
+ return protos.NullableDouble(value=nullable)
+ elif isinstance(nullable, str):
+ if len(nullable) == 0:
+ return None
+
+ try:
+ return protos.NullableDouble(value=float(nullable))
+ except Exception:
+ raise TypeError(
+ "Cannot parse value %s of '%s' to "
+ "float." % (nullable, property_name))
+
+ if nullable is not None:
+ raise TypeError(
+ "A 'int' or 'float'"
+ " type was expected instead of a '%s' "
+ "type. Cannot parse value %s of '%s'."
+ % (type(nullable), nullable, property_name))
+
+ return None
+
+
+def to_nullable_timestamp(date_time: Optional[Union[datetime, int]],
+ property_name: str, protos):
+ """Converts Date or number input to an 'NullableTimestamp' to be sent
+ through the RPC layer. Input that is not a Date or number but is also
+ not null or undefined logs a function app level warning.
+
+ :param date_time Input to be converted to an NullableTimestamp if it is
+ valid input
+ :param property_name The name of the property that the caller will
+ assign the output to. Used for debugging.
+ :param protos The protos object used for returning the appropriate value
+ """
+ if date_time is not None:
+ try:
+ time_in_seconds = date_time if isinstance(date_time,
+ int) else \
+ date_time.timestamp()
+
+ return protos.NullableTimestamp(
+ value=protos.Timestamp(seconds=int(time_in_seconds)))
+ except Exception:
+ raise TypeError(
+ "A 'datetime' or 'int'"
+ " type was expected instead of a '%s' "
+ "type. Cannot parse value %s of '%s'."
+ % (type(date_time), date_time, property_name))
+ return None
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/out.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/out.py
new file mode 100644
index 000000000..99632167e
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/out.py
@@ -0,0 +1,15 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Optional
+
+
+class Out:
+
+ def __init__(self) -> None:
+ self.__value = None
+
+ def set(self, val):
+ self.__value = val
+
+ def get(self) -> Optional[str]:
+ return self.__value
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/retrycontext.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/retrycontext.py
new file mode 100644
index 000000000..a7c53507a
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/retrycontext.py
@@ -0,0 +1,47 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under
+from dataclasses import dataclass
+from enum import Enum
+
+
+class RpcException:
+ def __init__(self,
+ source: str,
+ stack_trace: str,
+ message: str) -> None:
+ self.__source = source
+ self.__stack_trace = stack_trace
+ self.__message = message
+
+ @property
+ def source(self) -> str:
+ return self.__source
+
+ @property
+ def stack_trace(self) -> str:
+ return self.__stack_trace
+
+ @property
+ def message(self) -> str:
+ return self.__message
+
+
+class RetryPolicy(Enum):
+ """Retry policy for the function invocation"""
+
+ MAX_RETRY_COUNT = "max_retry_count"
+ STRATEGY = "strategy"
+ DELAY_INTERVAL = "delay_interval"
+ MINIMUM_INTERVAL = "minimum_interval"
+ MAXIMUM_INTERVAL = "maximum_interval"
+
+
+@dataclass
+class RetryContext:
+ """Gets the current retry count from retry-context"""
+ retry_count: int
+
+ """Gets the max retry count from retry-context"""
+ max_retry_count: int
+
+ rpc_exception: RpcException
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/bindings/tracecontext.py b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/tracecontext.py
new file mode 100644
index 000000000..120f47ffd
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/bindings/tracecontext.py
@@ -0,0 +1,43 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Dict
+
+
+class TraceContext:
+ """Check https://www.w3.org/TR/trace-context/ for more information"""
+
+ def __init__(self, trace_parent: str,
+ trace_state: str, attributes: Dict[str, str]) -> None:
+ self.__trace_parent = trace_parent
+ self.__trace_state = trace_state
+ self.__attributes = attributes
+
+ @property
+ def Tracestate(self) -> str:
+ """Get trace state from trace-context (deprecated)."""
+ return self.__trace_state
+
+ @property
+ def Traceparent(self) -> str:
+ """Get trace parent from trace-context (deprecated)."""
+ return self.__trace_parent
+
+ @property
+ def Attributes(self) -> Dict[str, str]:
+ """Get trace-context attributes (deprecated)."""
+ return self.__attributes
+
+ @property
+ def trace_state(self) -> str:
+ """Get trace state from trace-context"""
+ return self.__trace_state
+
+ @property
+ def trace_parent(self) -> str:
+ """Get trace parent from trace-context"""
+ return self.__trace_parent
+
+ @property
+ def attributes(self) -> Dict[str, str]:
+ """Get trace-context attributes"""
+ return self.__attributes
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/functions.py b/azure_functions_worker_v2/azure_functions_worker_v2/functions.py
new file mode 100644
index 000000000..0557f35dd
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/functions.py
@@ -0,0 +1,435 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import inspect
+import operator
+import pathlib
+import typing
+import uuid
+
+from .logging import logger
+
+from .bindings.meta import (has_implicit_output,
+ check_deferred_bindings_enabled,
+ check_output_type_annotation,
+ check_input_type_annotation)
+from .utils.constants import HTTP_TRIGGER
+from .utils.typing_inspect import is_generic_type, get_origin, get_args # type: ignore
+
+
+class ParamTypeInfo(typing.NamedTuple):
+ binding_name: str
+ pytype: typing.Optional[type]
+ deferred_bindings_enabled: typing.Optional[bool] = False
+
+
+class FunctionInfo(typing.NamedTuple):
+ func: typing.Callable
+
+ name: str
+ directory: str
+ function_id: str
+ requires_context: bool
+ is_async: bool
+ has_return: bool
+ is_http_func: bool
+ deferred_bindings_enabled: bool
+
+ input_types: typing.Mapping[str, ParamTypeInfo]
+ output_types: typing.Mapping[str, ParamTypeInfo]
+ return_type: typing.Optional[typing.Union[str, ParamTypeInfo]]
+
+ trigger_metadata: typing.Optional[typing.Dict[str, typing.Any]]
+
+
+class FunctionLoadError(RuntimeError):
+
+ def __init__(self, function_name: str, msg: str) -> None:
+ super().__init__(
+ "cannot load the " + function_name + " function: " + msg)
+
+
+class Registry:
+ _functions: typing.MutableMapping[str, FunctionInfo]
+ _deferred_bindings_enabled: bool = False
+
+ def __init__(self) -> None:
+ self._functions = {}
+
+ def get_function(self, function_id: str) -> typing.Union[FunctionInfo, None]:
+ if function_id in self._functions:
+ return self._functions[function_id]
+
+ return None
+
+ def deferred_bindings_enabled(self) -> bool:
+ return self._deferred_bindings_enabled
+
+ @staticmethod
+ def get_explicit_and_implicit_return(binding_name: str,
+ binding,
+ explicit_return: bool,
+ implicit_return: bool,
+ bound_params: dict) -> \
+ typing.Tuple[bool, bool]:
+ if binding_name == '$return':
+ explicit_return = True
+ elif has_implicit_output(binding.type):
+ implicit_return = True
+ bound_params[binding_name] = binding
+ else:
+ bound_params[binding_name] = binding
+ return explicit_return, implicit_return
+
+ @staticmethod
+ def get_return_binding(binding_name: str,
+ binding_type: str,
+ return_binding_name: str,
+ explicit_return_val_set: bool) \
+ -> typing.Tuple[str, bool]:
+ # prioritize explicit return value
+ if explicit_return_val_set:
+ return return_binding_name, explicit_return_val_set
+ if binding_name == "$return":
+ return_binding_name = binding_type
+ assert return_binding_name is not None
+ explicit_return_val_set = True
+ elif has_implicit_output(binding_type):
+ return_binding_name = binding_type
+
+ return return_binding_name, explicit_return_val_set
+
+ @staticmethod
+ def validate_binding_direction(binding_name: str,
+ binding_direction: str,
+ func_name: str,
+ protos):
+ if binding_direction == protos.BindingInfo.inout:
+ raise FunctionLoadError(
+ func_name,
+ '"inout" bindings are not supported')
+
+ if binding_name == '$return' and \
+ binding_direction != protos.BindingInfo.out:
+ raise FunctionLoadError(
+ func_name,
+ '"$return" binding must have direction set to "out"')
+
+ @staticmethod
+ def is_context_required(params, bound_params: dict,
+ annotations: dict,
+ func_name: str) -> bool:
+ requires_context = False
+ if 'context' in params and 'context' not in bound_params:
+ requires_context = True
+ params.pop('context')
+ if 'context' in annotations:
+ ctx_anno = annotations.get('context')
+ if (not isinstance(ctx_anno, type)
+ or ctx_anno.__name__ != 'Context'):
+ raise FunctionLoadError(
+ func_name,
+ 'the "context" parameter is expected to be of '
+ 'type azure.functions.Context, got "' + repr(ctx_anno) + '"')
+ return requires_context
+
+ @staticmethod
+ def validate_function_params(params: dict, bound_params: dict,
+ annotations: dict, func_name: str,
+ protos):
+ logger.debug("Params: %s, BoundParams: %s, Annotations: %s, FuncName: %s",
+ params, bound_params, annotations, func_name)
+ if set(params) - set(bound_params):
+ raise FunctionLoadError(
+ func_name,
+ 'Function parameter mismatch — the following trigger/input bindings '
+ 'are declared in the function decorators but missing from the '
+ 'Python function signature: ' + repr(set(params) - set(bound_params)))
+
+ if set(bound_params) - set(params):
+ raise FunctionLoadError(
+ func_name,
+ 'Extra parameters in function signature — the following parameters '
+ 'are present in the Python function definition but are not declared '
+ 'as bindings: ' + repr(set(params) - set(bound_params)))
+
+ input_types: typing.Dict[str, ParamTypeInfo] = {}
+ output_types: typing.Dict[str, ParamTypeInfo] = {}
+ fx_deferred_bindings_enabled = False
+
+ for param in params.values():
+ binding = bound_params[param.name]
+ logger.debug("Param %s, binding: %s", param, binding)
+
+ param_has_anno = param.name in annotations
+ param_anno = annotations.get(param.name)
+ logger.debug("Param_has_anno %s, param_anno: %s",
+ param_has_anno, param_anno)
+
+ # Check if deferred bindings is enabled
+ fx_deferred_bindings_enabled, is_deferred_binding = (
+ check_deferred_bindings_enabled(
+ param_anno,
+ fx_deferred_bindings_enabled))
+
+ if param_has_anno:
+ if is_generic_type(param_anno):
+ param_anno_origin = get_origin(param_anno)
+ if param_anno_origin is not None:
+ is_param_out = (
+ isinstance(param_anno_origin, type)
+ and param_anno_origin.__name__ == 'Out'
+ )
+ else:
+ is_param_out = (
+ isinstance(param_anno, type)
+ and param_anno.__name__ == 'Out'
+ )
+ else:
+ is_param_out = (
+ isinstance(param_anno, type)
+ and param_anno.__name__ == 'Out'
+ )
+ else:
+ is_param_out = False
+
+ is_binding_out = binding.direction == protos.BindingInfo.out
+
+ if is_param_out:
+ param_anno_args = get_args(param_anno)
+ if len(param_anno_args) != 1:
+ raise FunctionLoadError(
+ func_name,
+ 'binding ' + param.name
+ + ' has invalid Out annotation ' + repr(param_anno))
+ param_py_type = param_anno_args[0]
+
+ # typing_inspect.get_args() returns a flat list,
+ # so if the annotation was func.Out[typing.List[foo]],
+ # we need to reconstruct it.
+ if (isinstance(param_py_type, tuple)
+ and is_generic_type(param_py_type[0])):
+ param_py_type = operator.getitem(
+ param_py_type[0], *param_py_type[1:])
+ else:
+ param_py_type = param_anno
+
+ logger.debug("Param_py_type %s", param_py_type)
+
+ if (param_has_anno and not isinstance(param_py_type, type)
+ and not is_generic_type(param_py_type)):
+ raise FunctionLoadError(
+ func_name,
+ 'binding ' + param.name
+ + ' has invalid non-type annotation ' + repr(param_anno))
+
+ if is_binding_out and param_has_anno and not is_param_out:
+ raise FunctionLoadError(
+ func_name,
+ 'binding ' + param.name + ' is declared to have the "out" '
+ 'direction, but its annotation in Python is not '
+ 'a subclass of azure.functions.Out')
+
+ if not is_binding_out and is_param_out:
+ raise FunctionLoadError(
+ func_name,
+ 'binding ' + param.name + ' is declared to have the "in" '
+ 'direction in the binding decorator, but its annotation '
+ 'is azure.functions.Out in Python')
+
+ if param_has_anno and param_py_type in (str, bytes) and (
+ not has_implicit_output(binding.type)):
+ param_bind_type = 'generic'
+ else:
+ param_bind_type = binding.type
+
+ logger.debug("param_bind_type %s", param_bind_type)
+
+ if param_has_anno:
+ if is_param_out:
+ checks_out = check_output_type_annotation(
+ param_bind_type, param_py_type)
+ else:
+ checks_out = check_input_type_annotation(
+ param_bind_type, param_py_type, is_deferred_binding)
+
+ logger.debug("checks_out: %s",
+ checks_out)
+
+ if not checks_out:
+ if binding.data_type is not protos.BindingInfo.undefined:
+ raise FunctionLoadError(
+ func_name,
+ 'binding type "' + repr(param.name)
+ + '" and dataType "' + binding.type + '" in '
+ 'the binding decorator do not match the corresponding '
+ 'function parameter\'s Python type '
+ 'annotation ' + param_py_type.__name__)
+ else:
+ raise FunctionLoadError(
+ func_name,
+ 'type of ' + param.name + ' binding in the binding '
+ 'decorator "' + binding.type + '" does not match its '
+ 'Python annotation "' + param_py_type.__name__ + '"')
+
+ param_type_info = ParamTypeInfo(param_bind_type,
+ param_py_type,
+ is_deferred_binding)
+ if is_binding_out:
+ output_types[param.name] = param_type_info
+ else:
+ input_types[param.name] = param_type_info
+ return input_types, output_types, fx_deferred_bindings_enabled
+
+ @staticmethod
+ def get_function_return_type(annotations: dict, has_explicit_return: bool,
+ has_implicit_return: bool, binding_name: str,
+ func_name: str):
+ return_pytype = None
+ if has_explicit_return and 'return' in annotations:
+ return_anno = annotations.get('return')
+ if is_generic_type(
+ return_anno) and get_origin(
+ return_anno) is not None and get_origin(
+ return_anno).__name__ == 'Out':
+ raise FunctionLoadError(
+ func_name,
+ 'return annotation should not be azure.functions.Out')
+
+ return_pytype = return_anno
+ if not isinstance(return_pytype, type):
+ raise FunctionLoadError(
+ func_name,
+ 'has invalid non-type return '
+ 'annotation ' + repr(return_pytype))
+
+ if return_pytype is (str, bytes):
+ binding_name = 'generic'
+
+ if not check_output_type_annotation(
+ binding_name, return_pytype):
+ raise FunctionLoadError(
+ func_name,
+ 'Python return annotation "' + return_pytype.__name__
+ + '" does not match binding type "' + binding_name + '"')
+
+ if has_implicit_return and 'return' in annotations:
+ return_pytype = annotations.get('return')
+
+ return_type = None
+ if has_explicit_return or has_implicit_return:
+ return_type = ParamTypeInfo(binding_name, return_pytype)
+
+ return return_type
+
+ def add_func_to_registry_and_return_funcinfo(
+ self, function,
+ function_name: str,
+ function_id: str,
+ directory: str,
+ requires_context: bool,
+ has_explicit_return: bool,
+ has_implicit_return: bool,
+ deferred_bindings_enabled: bool,
+ input_types: typing.Dict[str, ParamTypeInfo],
+ output_types: typing.Dict[str, ParamTypeInfo],
+ return_type: str):
+
+ http_trigger_param_name = self._get_http_trigger_param_name(input_types)
+
+ trigger_metadata = None
+ is_http_func = False
+ if http_trigger_param_name is not None:
+ trigger_metadata = {
+ "type": HTTP_TRIGGER,
+ "param_name": http_trigger_param_name
+ }
+ is_http_func = True
+
+ function_info = FunctionInfo(
+ func=function,
+ name=function_name,
+ directory=directory,
+ function_id=function_id,
+ requires_context=requires_context,
+ is_async=inspect.iscoroutinefunction(function),
+ has_return=has_explicit_return or has_implicit_return,
+ is_http_func=is_http_func,
+ deferred_bindings_enabled=deferred_bindings_enabled,
+ input_types=input_types,
+ output_types=output_types,
+ return_type=return_type,
+ trigger_metadata=trigger_metadata)
+
+ self._functions[function_id] = function_info
+
+ if not self._deferred_bindings_enabled:
+ self._deferred_bindings_enabled = deferred_bindings_enabled
+
+ return function_info
+
+ def _get_http_trigger_param_name(self, input_types):
+ http_trigger_param_name = next(
+ (input_type for input_type, type_info in input_types.items()
+ if type_info.binding_name == HTTP_TRIGGER),
+ None
+ )
+ return http_trigger_param_name
+
+ def add_indexed_function(self, function, protos):
+ func = function.get_user_function()
+ func_name = function.get_function_name()
+ function_id = str(uuid.uuid5(namespace=uuid.NAMESPACE_OID,
+ name=func_name))
+ return_binding_name: typing.Optional[str] = None
+ explicit_return_val_set = False
+ has_explicit_return = False
+ has_implicit_return = False
+
+ sig = inspect.signature(func)
+ params = dict(sig.parameters)
+ annotations = typing.get_type_hints(func)
+ func_dir = str(pathlib.Path(inspect.getfile(func)).parent)
+
+ bound_params = {}
+ for binding in function.get_bindings():
+ self.validate_binding_direction(binding.name,
+ binding.direction,
+ func_name, protos)
+
+ has_explicit_return, has_implicit_return = \
+ self.get_explicit_and_implicit_return(
+ binding.name, binding, has_explicit_return,
+ has_implicit_return, bound_params)
+
+ return_binding_name, explicit_return_val_set = \
+ self.get_return_binding(binding.name,
+ binding.type,
+ return_binding_name,
+ explicit_return_val_set)
+
+ requires_context = self.is_context_required(params, bound_params,
+ annotations,
+ func_name)
+
+ (input_types, output_types,
+ deferred_bindings_enabled) = self.validate_function_params(
+ params,
+ bound_params,
+ annotations,
+ func_name,
+ protos)
+
+ return_type = \
+ self.get_function_return_type(annotations,
+ has_explicit_return,
+ has_implicit_return,
+ return_binding_name,
+ func_name)
+
+ return \
+ self.add_func_to_registry_and_return_funcinfo(
+ func, func_name, function_id, func_dir,
+ requires_context, has_explicit_return,
+ has_implicit_return, deferred_bindings_enabled,
+ input_types, output_types,
+ return_type)
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/handle_event.py b/azure_functions_worker_v2/azure_functions_worker_v2/handle_event.py
new file mode 100644
index 000000000..783d93d44
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/handle_event.py
@@ -0,0 +1,442 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import json
+import logging
+import os
+import sys
+
+from typing import List, MutableMapping, Optional
+
+from .functions import FunctionInfo, Registry
+from .http_v2 import (
+ HttpServerInitError,
+ HttpV2Registry,
+ http_coordinator,
+ initialize_http_server,
+ sync_http_request,
+)
+from .loader import index_function_app, process_indexed_function
+from .logging import logger
+from .otel import otel_manager, configure_opentelemetry, initialize_azure_monitor
+from .version import VERSION
+
+from .bindings.context import get_context
+from .bindings.meta import (from_incoming_proto,
+ is_trigger_binding,
+ load_binding_registry,
+ to_outgoing_param_binding,
+ to_outgoing_proto)
+from .bindings.out import Out
+from .utils.app_setting_manager import (get_app_setting,
+ get_python_appsetting_state,
+ is_envvar_true)
+from .utils.constants import (FUNCTION_DATA_CACHE,
+ HTTP_URI,
+ PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY,
+ PYTHON_ENABLE_DEBUG_LOGGING,
+ PYTHON_ENABLE_OPENTELEMETRY,
+ PYTHON_SCRIPT_FILE_NAME,
+ PYTHON_SCRIPT_FILE_NAME_DEFAULT,
+ RAW_HTTP_BODY_BYTES,
+ REQUIRES_ROUTE_PARAMETERS,
+ RPC_HTTP_BODY_ONLY,
+ RPC_HTTP_TRIGGER_METADATA_REMOVED,
+ SHARED_MEMORY_DATA_TRANSFER,
+ TRUE,
+ TYPED_DATA_COLLECTION,
+ WORKER_OPEN_TELEMETRY_ENABLED,
+ WORKER_STATUS)
+from .utils.executor import get_current_loop, execute_async, run_sync_func
+from .utils.helpers import change_cwd, get_sdk_version, get_worker_metadata
+from .utils.tracing import serialize_exception
+from .utils.validators import validate_script_file_name
+
+_metadata_result: Optional[List] = None
+_functions: MutableMapping[str, FunctionInfo] = Registry()
+_function_data_cache_enabled: bool = False
+_host: str = ""
+protos = None
+
+
+async def worker_init_request(request):
+ logger.debug("V2 Library Worker: received WorkerInitRequest,"
+ "Version %s", VERSION)
+ global _host, protos, _function_data_cache_enabled
+ init_request = request.request.worker_init_request
+ host_capabilities = init_request.capabilities
+ _host = request.properties.get("host")
+ protos = request.properties.get("protos")
+ if FUNCTION_DATA_CACHE in host_capabilities:
+ val = host_capabilities[FUNCTION_DATA_CACHE]
+ _function_data_cache_enabled = val == TRUE
+
+ capabilities = {
+ RAW_HTTP_BODY_BYTES: TRUE,
+ TYPED_DATA_COLLECTION: TRUE,
+ RPC_HTTP_BODY_ONLY: TRUE,
+ WORKER_STATUS: TRUE,
+ RPC_HTTP_TRIGGER_METADATA_REMOVED: TRUE,
+ SHARED_MEMORY_DATA_TRANSFER: TRUE,
+ }
+ if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY):
+ initialize_azure_monitor()
+
+ if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY):
+ otel_manager.set_otel_libs_available(True)
+
+ if (otel_manager.get_azure_monitor_available()
+ or otel_manager.get_otel_libs_available()):
+ capabilities[WORKER_OPEN_TELEMETRY_ENABLED] = TRUE
+
+ # loading bindings registry and saving results to a static
+ # dictionary which will be later used in the invocation request
+ load_binding_registry()
+
+ # Index in init by default. Fail if an exception occurs.
+ try:
+ load_function_metadata(
+ init_request.function_app_directory,
+ caller_info="worker_init_request")
+ try:
+ if HttpV2Registry.http_v2_enabled():
+ logger.debug("Streaming enabled.")
+ capabilities[HTTP_URI] = \
+ initialize_http_server(_host)
+ capabilities[REQUIRES_ROUTE_PARAMETERS] = TRUE
+ except HttpServerInitError as ex:
+ return protos.WorkerInitResponse(
+ capabilities=capabilities,
+ worker_metadata=get_worker_metadata(protos),
+ result=protos.StatusResult(
+ status=protos.StatusResult.Failure,
+ exception=serialize_exception(
+ ex, protos))
+ )
+ except Exception as ex:
+ return protos.WorkerInitResponse(
+ capabilities=capabilities,
+ worker_metadata=get_worker_metadata(protos),
+ result=protos.StatusResult(
+ status=protos.StatusResult.Failure,
+ exception=serialize_exception(
+ ex, protos))
+ )
+
+ logger.debug("Successfully completed WorkerInitRequest")
+ return protos.WorkerInitResponse(
+ capabilities=capabilities,
+ worker_metadata=get_worker_metadata(protos),
+ result=protos.StatusResult(status=protos.StatusResult.Success)
+ )
+
+
+# worker_status_request can be done in the proxy worker
+
+async def functions_metadata_request(request):
+ global protos, _metadata_result
+ logger.debug("V2 Library Worker: received WorkerMetadataRequest."
+ " Metadata Result: %s,"
+ " azure-functions version: %s",
+ _metadata_result, get_sdk_version())
+
+ return protos.FunctionMetadataResponse(
+ use_default_metadata_indexing=False,
+ function_metadata_results=_metadata_result,
+ result=protos.StatusResult(
+ status=protos.StatusResult.Success))
+
+
+async def function_load_request(request):
+ logger.debug("V2 Library Worker: received WorkerLoadRequest")
+ global protos
+ func_request = request.request.function_load_request
+ function_id = func_request.function_id
+
+ return protos.FunctionLoadResponse(
+ function_id=function_id,
+ result=protos.StatusResult(
+ status=protos.StatusResult.Success))
+
+
+async def invocation_request(request):
+ logger.debug("V2 Library Worker: received WorkerInvocationRequest")
+ global protos
+ invoc_request = request.request.invocation_request
+ invocation_id = invoc_request.invocation_id
+ function_id = invoc_request.function_id
+ http_v2_enabled = False
+ threadpool = request.properties.get("threadpool")
+ logger.debug("All variables obtained from proxy worker."
+ " Invocation ID: %s, Function ID: %s, Threadpool: %s",
+ invocation_id, function_id, threadpool)
+
+ try:
+ fi: FunctionInfo = _functions.get_function(
+ function_id)
+ assert fi is not None
+ logger.info("Function name: %s, Function Type: %s",
+ fi.name,
+ ("async" if fi.is_async else "sync"))
+
+ args = {}
+
+ http_v2_enabled = _functions.get_function(
+ function_id).is_http_func and \
+ HttpV2Registry.http_v2_enabled()
+
+ for pb in invoc_request.input_data:
+ pb_type_info = fi.input_types[pb.name]
+ if is_trigger_binding(pb_type_info.binding_name):
+ trigger_metadata = invoc_request.trigger_metadata
+ else:
+ trigger_metadata = None
+
+ args[pb.name] = from_incoming_proto(
+ pb_type_info.binding_name,
+ pb,
+ trigger_metadata=trigger_metadata,
+ pytype=pb_type_info.pytype,
+ function_name=_functions.get_function(
+ function_id).name,
+ is_deferred_binding=pb_type_info.deferred_bindings_enabled)
+
+ if http_v2_enabled:
+ http_request = await http_coordinator.get_http_request_async(
+ invocation_id)
+
+ trigger_arg_name = fi.trigger_metadata.get('param_name')
+ func_http_request = args[trigger_arg_name]
+ await sync_http_request(http_request, func_http_request)
+ args[trigger_arg_name] = http_request
+
+ fi_context = get_context(invoc_request, fi.name,
+ fi.directory)
+
+ # Use local thread storage to store the invocation ID
+ # for a customer's threads
+ fi_context.thread_local_storage.invocation_id = invocation_id
+ if fi.requires_context:
+ args['context'] = fi_context
+
+ if fi.output_types:
+ for name in fi.output_types:
+ args[name] = Out()
+
+ if fi.is_async:
+ if (otel_manager.get_azure_monitor_available()
+ or otel_manager.get_otel_libs_available()):
+ configure_opentelemetry(fi_context)
+
+ # Extensions are not supported
+ call_result = await execute_async(fi.func, args)
+ else:
+ _loop = get_current_loop()
+ call_result = await _loop.run_in_executor(
+ threadpool,
+ run_sync_func,
+ invocation_id, fi_context, fi.func, args)
+
+ if call_result is not None and not fi.has_return:
+ raise RuntimeError(
+ 'function %s without a $return binding'
+ 'returned a non-None value', repr(fi.name))
+
+ if http_v2_enabled:
+ http_coordinator.set_http_response(invocation_id, call_result)
+
+ output_data = []
+ if fi.output_types:
+ for out_name, out_type_info in fi.output_types.items():
+ val = args[out_name].get()
+ if val is None:
+ continue
+
+ param_binding = to_outgoing_param_binding(
+ out_type_info.binding_name, val,
+ pytype=out_type_info.pytype,
+ out_name=out_name,
+ protos=protos)
+ output_data.append(param_binding)
+
+ return_value = None
+ if fi.return_type is not None and not http_v2_enabled:
+ return_value = to_outgoing_proto(
+ fi.return_type.binding_name,
+ call_result,
+ pytype=fi.return_type.pytype,
+ protos=protos
+ )
+
+ # Actively flush customer print() function to console
+ sys.stdout.flush()
+ logger.debug("Successfully completed WorkerInvocationRequest.")
+ return protos.InvocationResponse(
+ invocation_id=invocation_id,
+ return_value=return_value,
+ result=protos.StatusResult(
+ status=protos.StatusResult.Success),
+ output_data=output_data)
+
+ except Exception as ex:
+ if http_v2_enabled:
+ http_coordinator.set_http_response(invocation_id, ex)
+ return protos.InvocationResponse(
+ invocation_id=invocation_id,
+ result=protos.StatusResult(
+ status=protos.StatusResult.Failure,
+ exception=serialize_exception(ex, protos)))
+
+
+async def function_environment_reload_request(request):
+ """Only runs on Linux Consumption placeholder specialization.
+ This is called only when placeholder mode is true. On worker restarts
+ worker init request will be called directly.
+ """
+ logger.debug("V2 Library Worker: received FunctionEnvironmentReloadRequest, "
+ "Version %s", VERSION)
+ global _host, protos
+ try:
+
+ func_env_reload_request = \
+ request.request.function_environment_reload_request
+ directory = func_env_reload_request.function_app_directory
+
+ if func_env_reload_request.function_app_directory:
+ sys.path.append(func_env_reload_request.function_app_directory)
+
+ # Clear sys.path import cache, reload all module from new sys.path
+ sys.path_importer_cache.clear()
+
+ # Reload environment variables
+ os.environ.clear()
+ env_vars = func_env_reload_request.environment_variables
+ for var in env_vars:
+ os.environ[var] = env_vars[var]
+
+ # TODO: Apply PYTHON_THREADPOOL_THREAD_COUNT
+
+ if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING):
+ root_logger = logging.getLogger("azure.functions")
+ root_logger.setLevel(logging.DEBUG)
+
+ # calling load_binding_registry again since the
+ # reload_customer_libraries call clears the registry
+ load_binding_registry()
+
+ capabilities = {}
+ if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY):
+ otel_manager.set_otel_libs_available(True)
+
+ if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY):
+ initialize_azure_monitor()
+
+ if (otel_manager.get_azure_monitor_available()
+ or otel_manager.get_otel_libs_available()):
+ capabilities[WORKER_OPEN_TELEMETRY_ENABLED] = (
+ TRUE)
+
+ try:
+ _host = request.properties.get("host")
+ protos = request.properties.get("protos")
+ load_function_metadata(
+ directory,
+ caller_info="environment_reload_request")
+ if HttpV2Registry.http_v2_enabled():
+ capabilities[HTTP_URI] = \
+ initialize_http_server(_host)
+ capabilities[REQUIRES_ROUTE_PARAMETERS] = TRUE
+ except HttpServerInitError as ex:
+ return protos.FunctionEnvironmentReloadResponse(
+ result=protos.StatusResult(
+ status=protos.StatusResult.Failure,
+ exception=serialize_exception(ex, protos)))
+
+ # Change function app directory
+ if getattr(func_env_reload_request,
+ 'function_app_directory', None):
+ change_cwd(
+ func_env_reload_request.function_app_directory)
+
+ logger.debug("Successfully completed WorkerEnvReloadRequest.")
+ return protos.FunctionEnvironmentReloadResponse(
+ capabilities=capabilities,
+ worker_metadata=get_worker_metadata(protos),
+ result=protos.StatusResult(
+ status=protos.StatusResult.Success))
+
+ except Exception as ex:
+ return protos.FunctionEnvironmentReloadResponse(
+ result=protos.StatusResult(
+ status=protos.StatusResult.Failure,
+ exception=serialize_exception(ex, protos)))
+
+
+def load_function_metadata(function_app_directory, caller_info):
+ global protos, _metadata_result
+ """
+ This method is called to index the functions in the function app
+ directory and save the results in function_metadata_result.
+
+ If an exception occurs during the indexing, it will be caught
+ in the worker_init_request and returned as a failure
+ status result.
+ """
+ script_file_name = get_app_setting(
+ setting=PYTHON_SCRIPT_FILE_NAME,
+ default_value=PYTHON_SCRIPT_FILE_NAME_DEFAULT)
+
+ logger.debug(
+ 'Received load_function_metadata request from %s, '
+ 'script_file_name: %s',
+ caller_info, script_file_name)
+
+ validate_script_file_name(script_file_name)
+ function_path = os.path.join(function_app_directory,
+ script_file_name)
+
+ # For V1, the function path will not exist and
+ # return None.
+ global _metadata_result
+ _metadata_result = (index_functions(function_path, function_app_directory)) \
+ if os.path.exists(function_path) else None
+
+
+def index_functions(function_path: str, function_dir: str):
+ global protos
+ indexed_functions = index_function_app(function_path)
+
+ if indexed_functions:
+ fx__metadata_results, fx_bindings_logs = (
+ process_indexed_function(
+ protos,
+ _functions,
+ indexed_functions,
+ function_dir))
+
+ indexed_function_logs: List[str] = []
+ indexed_function_bindings_logs = []
+ for func in indexed_functions:
+ func_binding_logs = fx_bindings_logs.get(func)
+ for binding in func.get_bindings():
+ deferred_binding_info = func_binding_logs.get(
+ binding.name)\
+ if func_binding_logs.get(binding.name) else ""
+ indexed_function_bindings_logs.append((
+ binding.type, binding.name, deferred_binding_info))
+
+ function_log = ("Function Name: " + func.get_function_name()
+ + ", Function Binding: "
+ + str(indexed_function_bindings_logs))
+ indexed_function_logs.append(function_log)
+
+ log_data = {
+ "message": "Successfully indexed function app.",
+ "function_count": len(indexed_functions),
+ "functions": " ".join(indexed_function_logs),
+ "deferred_bindings_enabled": _functions.deferred_bindings_enabled(),
+ "app_settings": get_python_appsetting_state(),
+ "azure-functions version": get_sdk_version(),
+ }
+ logger.info(json.dumps(log_data))
+
+ return fx__metadata_results
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/http_v2.py b/azure_functions_worker_v2/azure_functions_worker_v2/http_v2.py
new file mode 100644
index 000000000..9dad2f54d
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/http_v2.py
@@ -0,0 +1,291 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import abc
+import asyncio
+import importlib
+import socket
+
+from typing import Any, Dict
+
+from azure_functions_worker_v2.logging import logger
+
+from azure_functions_worker_v2.utils.constants import (
+ X_MS_INVOCATION_ID,
+)
+
+
+# Http V2 Exceptions
+class HttpServerInitError(Exception):
+ """Exception raised when there is an error during HTTP server
+ initialization."""
+
+
+class MissingHeaderError(ValueError):
+ """Exception raised when a required header is missing in the
+ HTTP request."""
+
+
+class BaseContextReference(abc.ABC):
+ """
+ Base class for context references.
+ """
+ def __init__(self, event_class, http_request=None, http_response=None,
+ function=None, fi_context=None, args=None,
+ http_trigger_param_name=None):
+ self._http_request = http_request
+ self._http_response = http_response
+ self._function = function
+ self._fi_context = fi_context
+ self._args = args
+ self._http_trigger_param_name = http_trigger_param_name
+ self._http_request_available_event = event_class()
+ self._http_response_available_event = event_class()
+
+ @property
+ def http_request(self):
+ return self._http_request
+
+ @http_request.setter
+ def http_request(self, value):
+ self._http_request = value
+ self._http_request_available_event.set()
+
+ @property
+ def http_response(self):
+ return self._http_response
+
+ @http_response.setter
+ def http_response(self, value):
+ self._http_response = value
+ self._http_response_available_event.set()
+
+ @property
+ def function(self):
+ return self._function
+
+ @function.setter
+ def function(self, value):
+ self._function = value
+
+ @property
+ def fi_context(self):
+ return self._fi_context
+
+ @fi_context.setter
+ def fi_context(self, value):
+ self._fi_context = value
+
+ @property
+ def http_trigger_param_name(self):
+ return self._http_trigger_param_name
+
+ @http_trigger_param_name.setter
+ def http_trigger_param_name(self, value):
+ self._http_trigger_param_name = value
+
+ @property
+ def args(self):
+ return self._args
+
+ @args.setter
+ def args(self, value):
+ self._args = value
+
+ @property
+ def http_request_available_event(self):
+ return self._http_request_available_event
+
+ @property
+ def http_response_available_event(self):
+ return self._http_response_available_event
+
+
+class AsyncContextReference(BaseContextReference):
+ """
+ Asynchronous context reference class.
+ """
+ def __init__(self, http_request=None, http_response=None, function=None,
+ fi_context=None, args=None):
+ super().__init__(event_class=asyncio.Event, http_request=http_request,
+ http_response=http_response,
+ function=function, fi_context=fi_context, args=args)
+ self.is_async = True
+
+
+class SingletonMeta(type):
+ """
+ Metaclass for implementing the singleton pattern.
+ """
+ _instances: Dict[Any, Any] = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super().__call__(*args, **kwargs)
+ return cls._instances[cls]
+
+
+class HttpCoordinator(metaclass=SingletonMeta):
+ """
+ HTTP coordinator class for managing HTTP v2 requests and responses.
+ """
+ def __init__(self):
+ self._context_references: Dict[str, BaseContextReference] = {}
+
+ def set_http_request(self, invoc_id, http_request):
+ if invoc_id not in self._context_references:
+ self._context_references[invoc_id] = AsyncContextReference()
+ context_ref = self._context_references.get(invoc_id)
+ context_ref.http_request = http_request
+
+ def set_http_response(self, invoc_id, http_response):
+ if invoc_id not in self._context_references:
+ raise KeyError("No context reference found for invocation %s"
+ % invoc_id)
+ context_ref = self._context_references.get(invoc_id)
+ context_ref.http_response = http_response
+
+ async def get_http_request_async(self, invoc_id):
+ if invoc_id not in self._context_references:
+ self._context_references[invoc_id] = AsyncContextReference()
+
+ await self._context_references.get(
+ invoc_id).http_request_available_event.wait()
+ return self._pop_http_request(invoc_id)
+
+ async def await_http_response_async(self, invoc_id):
+ if invoc_id not in self._context_references:
+ raise KeyError("No context reference found for invocation %s"
+ % invoc_id)
+
+ await self._context_references.get(
+ invoc_id).http_response_available_event.wait()
+ return self._pop_http_response(invoc_id)
+
+ def _pop_http_request(self, invoc_id):
+ context_ref = self._context_references.get(invoc_id)
+ request = context_ref.http_request
+ if request is not None:
+ context_ref.http_request = None
+ return request
+
+ raise ValueError("No http request found for invocation %s" % invoc_id)
+
+ def _pop_http_response(self, invoc_id):
+ context_ref = self._context_references.get(invoc_id)
+ response = context_ref.http_response
+ if response is not None:
+ context_ref.http_response = None
+ return response
+
+ raise ValueError("No http response found for invocation %s" % invoc_id)
+
+
+def get_unused_tcp_port():
+ # Create a TCP socket
+ tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ # Bind it to a free port provided by the OS
+ tcp_socket.bind(("", 0))
+ # Get the port number
+ port = tcp_socket.getsockname()[1]
+ # Close the socket
+ tcp_socket.close()
+ # Return the port number
+ return port
+
+
+def initialize_http_server(host_addr, **kwargs):
+ """
+ Initialize HTTP v2 server for handling HTTP requests.
+ """
+ try:
+ ext_base = HttpV2Registry.ext_base()
+ web_extension_mod_name = ext_base.ModuleTrackerMeta.get_module()
+ extension_module = importlib.import_module(web_extension_mod_name)
+ web_app_class = extension_module.WebApp
+ web_server_class = extension_module.WebServer
+
+ unused_port = get_unused_tcp_port()
+
+ app = web_app_class()
+ request_type = ext_base.RequestTrackerMeta.get_request_type()
+
+ @app.route
+ async def catch_all(request: request_type): # type: ignore
+ invoc_id = request.headers.get(X_MS_INVOCATION_ID) # type: ignore
+ if invoc_id is None:
+ raise MissingHeaderError("Header %s not found" %
+ X_MS_INVOCATION_ID)
+ logger.info('Received HTTP request for invocation %s', invoc_id)
+ http_coordinator.set_http_request(invoc_id, request)
+ http_resp = \
+ await http_coordinator.await_http_response_async(invoc_id)
+
+ logger.info('Sending HTTP response for invocation %s', invoc_id)
+ # if http_resp is an python exception, raise it
+ if isinstance(http_resp, Exception):
+ raise http_resp
+
+ return http_resp
+
+ web_server = web_server_class(host_addr, unused_port, app)
+ web_server_run_task = web_server.serve()
+
+ loop = asyncio.get_event_loop()
+ loop.create_task(web_server_run_task)
+
+ web_server_address = "http://" + str(host_addr) + ":" + str(unused_port)
+ logger.info('HTTP server starting on %s', web_server_address)
+
+ return web_server_address
+
+ except Exception as e:
+ raise HttpServerInitError("Error initializing HTTP server: %s" % e) \
+ from e
+
+
+async def sync_http_request(http_request, func_http_request):
+ # Sync http request route params from invoc_request to http_request
+ (HttpV2Registry.ext_base().RequestTrackerMeta
+ .get_synchronizer()
+ .sync_route_params(http_request, func_http_request.route_params))
+
+
+class HttpV2Registry:
+ """
+ HTTP v2 registry class for managing HTTP v2 states.
+ """
+ _http_v2_enabled = False
+ _ext_base = None
+ _http_v2_enabled_checked = False
+
+ @classmethod
+ def http_v2_enabled(cls, **kwargs):
+ # Check if HTTP/2 enablement has already been checked
+ if not cls._http_v2_enabled_checked:
+ # If not checked yet, mark as checked
+ cls._http_v2_enabled_checked = True
+
+ cls._http_v2_enabled = cls._check_http_v2_enabled()
+
+ # Return the result of HTTP/2 enablement
+ logger.debug("Streaming enabled: %s", cls._http_v2_enabled_checked)
+ return cls._http_v2_enabled
+
+ @classmethod
+ def ext_base(cls):
+ return cls._ext_base
+
+ @classmethod
+ def _check_http_v2_enabled(cls):
+ try:
+ # Attempt to import the base extension module
+ import azurefunctions.extensions.base as ext_base
+ cls._ext_base = ext_base
+
+ return cls._ext_base.HttpV2FeatureChecker.http_v2_enabled()
+ except ImportError:
+ logger.debug("ImportError when importing base extension.")
+ return False
+
+
+http_coordinator = HttpCoordinator()
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/loader.py b/azure_functions_worker_v2/azure_functions_worker_v2/loader.py
new file mode 100644
index 000000000..ee72481c0
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/loader.py
@@ -0,0 +1,201 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import importlib
+import os.path
+import pathlib
+import sys
+import time
+
+from datetime import timedelta
+from typing import Dict, Optional, Union
+
+from .functions import Registry
+from .logging import logger
+
+from .bindings.meta import get_deferred_raw_bindings
+from .bindings.retrycontext import RetryPolicy
+from .utils.constants import (
+ CUSTOMER_PACKAGES_PATH,
+ METADATA_PROPERTIES_WORKER_INDEXED,
+ MODULE_NOT_FOUND_TS_URL,
+ PYTHON_LANGUAGE_RUNTIME,
+ PYTHON_SCRIPT_FILE_NAME,
+ PYTHON_SCRIPT_FILE_NAME_DEFAULT,
+ RETRY_POLICY,
+)
+from .utils.app_setting_manager import get_app_setting
+from .utils.wrappers import attach_message_to_exception
+
+
+def convert_to_seconds(timestr: str):
+ x = time.strptime(timestr, '%H:%M:%S')
+ return int(timedelta(hours=x.tm_hour, minutes=x.tm_min,
+ seconds=x.tm_sec).total_seconds())
+
+
+def build_binding_protos(protos, indexed_function) -> Dict:
+ binding_protos = {}
+ for binding in indexed_function.get_bindings():
+ binding_protos[binding.name] = protos.BindingInfo(
+ type=binding.type,
+ data_type=binding.data_type,
+ direction=binding.direction)
+
+ return binding_protos
+
+
+def build_retry_protos(protos, indexed_function) -> Union[Dict, None]:
+ retry = get_retry_settings(indexed_function)
+
+ if not retry:
+ return None
+
+ strategy = retry.get(RetryPolicy.STRATEGY.value)
+ max_retry_count = int(retry.get(RetryPolicy.MAX_RETRY_COUNT.value))
+ retry_strategy = retry.get(RetryPolicy.STRATEGY.value)
+
+ if strategy == "fixed_delay":
+ return build_fixed_delay_retry(protos, retry, max_retry_count, retry_strategy)
+ else:
+ return build_variable_interval_retry(protos, retry, max_retry_count,
+ retry_strategy)
+
+
+def get_retry_settings(indexed_function):
+ try:
+ return indexed_function.get_settings_dict(RETRY_POLICY)
+ except AttributeError as e:
+ logger.warning("AttributeError while loading retry policy. %s", e)
+ return None
+
+
+def build_fixed_delay_retry(protos, retry, max_retry_count, retry_strategy):
+ delay_interval = protos.Duration(
+ seconds=convert_to_seconds(retry.get(RetryPolicy.DELAY_INTERVAL.value))
+ )
+ return protos.RpcRetryOptions(
+ max_retry_count=max_retry_count,
+ retry_strategy=retry_strategy,
+ delay_interval=delay_interval,
+ )
+
+
+def build_variable_interval_retry(protos, retry, max_retry_count, retry_strategy):
+ minimum_interval = protos.Duration(
+ seconds=convert_to_seconds(
+ retry.get(RetryPolicy.MINIMUM_INTERVAL.value))
+ )
+ maximum_interval = protos.Duration(
+ seconds=convert_to_seconds(
+ retry.get(RetryPolicy.MAXIMUM_INTERVAL.value))
+ )
+ return protos.RpcRetryOptions(
+ max_retry_count=max_retry_count,
+ retry_strategy=retry_strategy,
+ minimum_interval=minimum_interval,
+ maximum_interval=maximum_interval
+ )
+
+
+def process_indexed_function(protos,
+ functions_registry: Registry,
+ indexed_functions, function_dir):
+ """
+ fx_metadata_results is a list of the RpcFunctionMetadata for
+ all the functions in the particular app.
+
+ fx_binding_logs represents a dictionary of each function in
+ the app and its corresponding bindings. The raw bindings and
+ binding logs are generated from the base extension if the
+ function is using deferred bindings. If not, the raw bindings
+ come from the azure-functions sdk and no additional binding
+ logs are generated.
+ """
+ fx_metadata_results = []
+ fx_bindings_logs = {}
+ for indexed_function in indexed_functions:
+ function_info = functions_registry.add_indexed_function(
+ function=indexed_function, protos=protos)
+
+ binding_protos = build_binding_protos(protos, indexed_function)
+ retry_protos = build_retry_protos(protos, indexed_function)
+
+ raw_bindings, bindings_logs = get_fx_raw_bindings(
+ indexed_function=indexed_function,
+ function_info=function_info)
+
+ function_metadata = protos.RpcFunctionMetadata(
+ name=function_info.name,
+ function_id=function_info.function_id,
+ managed_dependency_enabled=False, # only enabled for PowerShell
+ directory=function_dir,
+ script_file=indexed_function.function_script_file,
+ entry_point=function_info.name,
+ is_proxy=False, # not supported in V4
+ language=PYTHON_LANGUAGE_RUNTIME,
+ bindings=binding_protos,
+ raw_bindings=raw_bindings,
+ retry_options=retry_protos,
+ properties={METADATA_PROPERTIES_WORKER_INDEXED: "True"})
+
+ fx_bindings_logs.update({indexed_function: bindings_logs})
+ fx_metadata_results.append(function_metadata)
+
+ return fx_metadata_results, fx_bindings_logs
+
+
+@attach_message_to_exception(
+ expt_type=(ImportError, ModuleNotFoundError),
+ message="Cannot find module. Please check the requirements.txt file for the "
+ "missing module. For more info, please refer the troubleshooting guide: "
+ + MODULE_NOT_FOUND_TS_URL + ". Current sys.path: " + " ".join(sys.path),
+ debug_logs="Error when indexing function. Sys Path:" + " ".join(sys.path)
+ + ", python-packages Path exists: "
+ + str(os.path.exists(CUSTOMER_PACKAGES_PATH)))
+def index_function_app(function_path: str):
+ module_name = pathlib.Path(function_path).stem
+ imported_module = importlib.import_module(module_name)
+
+ from azure.functions import FunctionRegister
+ app: Optional[FunctionRegister] = None
+ for i in imported_module.__dir__():
+ if isinstance(getattr(imported_module, i, None), FunctionRegister):
+ if not app:
+ app = getattr(imported_module, i, None)
+ else:
+ raise ValueError(
+ "More than one %s or other top "
+ "level function app instances are defined."
+ % app.__class__.__name__)
+
+ if not app:
+ script_file_name = get_app_setting(
+ setting=PYTHON_SCRIPT_FILE_NAME,
+ default_value=PYTHON_SCRIPT_FILE_NAME_DEFAULT)
+ raise ValueError("Could not find top level function app instances in %s."
+ % script_file_name)
+
+ return app.get_functions()
+
+
+def get_fx_raw_bindings(indexed_function, function_info):
+ """
+ If deferred bindings is enabled at the function level,
+ raw bindings are generated through the base extension.
+ This method returns two things: the raw bindings for that
+ function and a dict the corresponding logs.
+
+
+ If not, raw bindings are generated through azure-functions.
+ An empty dict is returned as we are not logging any
+ additional information if deferred bindings is not enabled
+ for this function.
+ """
+ if function_info.deferred_bindings_enabled:
+ raw_bindings, bindings_logs = get_deferred_raw_bindings(
+ indexed_function, function_info.input_types)
+ return raw_bindings, bindings_logs
+
+ else:
+ return indexed_function.get_raw_bindings(), {}
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/logging.py b/azure_functions_worker_v2/azure_functions_worker_v2/logging.py
new file mode 100644
index 000000000..49be533f6
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/logging.py
@@ -0,0 +1,16 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import logging.handlers
+import traceback
+
+# Logging Prefixes
+SDK_LOG_PREFIX = "azure.functions"
+
+logger: logging.Logger = logging.getLogger(SDK_LOG_PREFIX)
+
+
+def format_exception(exception: Exception) -> str:
+ msg = str(exception) + "\n"
+ msg += ''.join(traceback.format_exception(exception))
+ return msg
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/otel.py b/azure_functions_worker_v2/azure_functions_worker_v2/otel.py
new file mode 100644
index 000000000..1974a5c6d
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/otel.py
@@ -0,0 +1,116 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+
+from .logging import logger
+
+from .utils.app_setting_manager import get_app_setting
+from .utils.constants import (APPLICATIONINSIGHTS_CONNECTION_STRING,
+ PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME,
+ PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT,
+ TRACESTATE, TRACEPARENT)
+from .utils.tracing import serialize_exception_as_str
+
+
+class OTelManager:
+ def __init__(self):
+ self._azure_monitor_available = False
+ self._otel_libs_available = False
+ self._context_api = None
+ self._trace_context_propagator = None
+
+ def set_azure_monitor_available(self, azure_monitor_available):
+ self._azure_monitor_available = azure_monitor_available
+
+ def get_azure_monitor_available(self):
+ return self._azure_monitor_available
+
+ def set_otel_libs_available(self, otel_libs_available):
+ self._aotel_libs_available = otel_libs_available
+
+ def get_otel_libs_available(self):
+ return self._otel_libs_available
+
+ def set_context_api(self, context_api):
+ self._context_api = context_api
+
+ def get_context_api(self):
+ return self._context_api
+
+ def set_trace_context_propagator(self, trace_context_propagator):
+ self._trace_context_propagator = trace_context_propagator
+
+ def get_trace_context_propagator(self):
+ return self._trace_context_propagator
+
+
+def update_opentelemetry_status():
+ """Check for OpenTelemetry library availability and
+ update the status attribute."""
+ try:
+ from opentelemetry import context as context_api
+ from opentelemetry.trace.propagation.tracecontext import (
+ TraceContextTextMapPropagator,
+ )
+
+ otel_manager.set_context_api(context_api)
+ otel_manager.set_trace_context_propagator(TraceContextTextMapPropagator())
+
+ except ImportError as e:
+ logger.exception(
+ "Cannot import OpenTelemetry libraries. Exception: %s",
+ serialize_exception_as_str(e)
+ )
+
+
+def initialize_azure_monitor():
+ """Initializes OpenTelemetry and Azure monitor distro
+ """
+ update_opentelemetry_status()
+ try:
+ from azure.monitor.opentelemetry import configure_azure_monitor
+
+ # Set functions resource detector manually until officially
+ # include in Azure monitor distro
+ os.environ.setdefault(
+ "OTEL_EXPERIMENTAL_RESOURCE_DETECTORS",
+ "azure_functions",
+ )
+
+ configure_azure_monitor(
+ # Connection string can be explicitly specified in Appsetting
+ # If not set, defaults to env var
+ # APPLICATIONINSIGHTS_CONNECTION_STRING
+ connection_string=get_app_setting(
+ setting=APPLICATIONINSIGHTS_CONNECTION_STRING
+ ),
+ logger_name=get_app_setting(
+ setting=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME,
+ default_value=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT
+ ),
+ )
+ otel_manager.set_azure_monitor_available(azure_monitor_available=True)
+
+ logger.info("Successfully configured Azure monitor distro.")
+ except ImportError as e:
+ logger.exception(
+ "Cannot import Azure Monitor distro. Exception: %s",
+ serialize_exception_as_str(e)
+ )
+ otel_manager.set_azure_monitor_available(False)
+ except Exception as e:
+ logger.exception(
+ "Error initializing Azure monitor distro. Exception: %s",
+ serialize_exception_as_str(e)
+ )
+ otel_manager.set_azure_monitor_available(False)
+
+
+def configure_opentelemetry(invocation_context):
+ carrier = {TRACEPARENT: invocation_context.trace_context.trace_parent,
+ TRACESTATE: invocation_context.trace_context.trace_state}
+ ctx = otel_manager.get_trace_context_propagator().extract(carrier)
+ otel_manager.get_context_api().attach(ctx)
+
+
+otel_manager = OTelManager()
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/__init__.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/__init__.py
new file mode 100644
index 000000000..5b7f7a925
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/app_setting_manager.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/app_setting_manager.py
new file mode 100644
index 000000000..aa3ba0814
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/app_setting_manager.py
@@ -0,0 +1,102 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+
+from typing import Callable, Optional
+
+from .constants import (
+ PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY,
+ PYTHON_ENABLE_DEBUG_LOGGING,
+ PYTHON_ENABLE_OPENTELEMETRY,
+ PYTHON_SCRIPT_FILE_NAME,
+ PYTHON_THREADPOOL_THREAD_COUNT,
+)
+
+
+def get_python_appsetting_state():
+ current_vars = os.environ.copy()
+ python_specific_settings = \
+ [PYTHON_THREADPOOL_THREAD_COUNT,
+ PYTHON_ENABLE_DEBUG_LOGGING,
+ PYTHON_SCRIPT_FILE_NAME,
+ PYTHON_ENABLE_OPENTELEMETRY,
+ PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY]
+
+ app_setting_states = "".join(
+ f"{app_setting}: {current_vars[app_setting]} | "
+ for app_setting in python_specific_settings
+ if app_setting in current_vars
+ )
+
+ return app_setting_states
+
+
+def is_true_like(setting: str) -> bool:
+ if setting is None:
+ return False
+
+ return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'}
+
+
+def is_false_like(setting: str) -> bool:
+ if setting is None:
+ return False
+
+ return setting.lower().strip() in {'0', 'false', 'f', 'no', 'n'}
+
+
+def is_envvar_true(env_key: str) -> bool:
+ if os.getenv(env_key) is None:
+ return False
+
+ return is_true_like(os.environ[env_key])
+
+
+def is_envvar_false(env_key: str) -> bool:
+ if os.getenv(env_key) is None:
+ return False
+
+ return is_false_like(os.environ[env_key])
+
+
+def get_app_setting(
+ setting: str,
+ default_value: Optional[str] = None,
+ validator: Optional[Callable[[str], bool]] = None
+) -> Optional[str]:
+ """Returns the application setting from environment variable.
+
+ Parameters
+ ----------
+ setting: str
+ The name of the application setting (e.g. FUNCTIONS_RUNTIME_VERSION)
+
+ default_value: Optional[str]
+ The expected return value when the application setting is not found,
+ or the app setting does not pass the validator.
+
+ validator: Optional[Callable[[str], bool]]
+ A function accepts the app setting value and should return True when
+ the app setting value is acceptable.
+
+ Returns
+ -------
+ Optional[str]
+ A string value that is set in the application setting
+ """
+ app_setting_value = os.getenv(setting)
+
+ # If an app setting is not configured, we return the default value
+ if app_setting_value is None:
+ return default_value
+
+ # If there's no validator, we should return the app setting value directly
+ if validator is None:
+ return app_setting_value
+
+ # If the app setting is set with a validator,
+ # On True, should return the app setting value
+ # On False, should return the default value
+ if validator(app_setting_value):
+ return app_setting_value
+ return default_value
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/constants.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/constants.py
new file mode 100644
index 000000000..ffdaf37c8
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/constants.py
@@ -0,0 +1,63 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import sys
+
+# Constants for Azure Functions Python Worker
+CUSTOMER_PACKAGES_PATH = "/home/site/wwwroot/.python_packages/lib/site" \
+ "-packages"
+HTTP = "http"
+HTTP_TRIGGER = "httpTrigger"
+METADATA_PROPERTIES_WORKER_INDEXED = "worker_indexed"
+MODULE_NOT_FOUND_TS_URL = "https://aka.ms/functions-modulenotfound"
+PYTHON_LANGUAGE_RUNTIME = "python"
+RETRY_POLICY = "retry_policy"
+TRUE = "true"
+TRACEPARENT = "traceparent"
+TRACESTATE = "tracestate"
+X_MS_INVOCATION_ID = "x-ms-invocation-id"
+
+
+# Capabilities
+FUNCTION_DATA_CACHE = "FunctionDataCache"
+HTTP_URI = "HttpUri"
+RAW_HTTP_BODY_BYTES = "RawHttpBodyBytes"
+REQUIRES_ROUTE_PARAMETERS = "RequiresRouteParameters"
+RPC_HTTP_BODY_ONLY = "RpcHttpBodyOnly"
+RPC_HTTP_TRIGGER_METADATA_REMOVED = "RpcHttpTriggerMetadataRemoved"
+SHARED_MEMORY_DATA_TRANSFER = "SharedMemoryDataTransfer"
+TYPED_DATA_COLLECTION = "TypedDataCollection"
+# When this capability is enabled, logs are not piped back to the
+# host from the worker. Logs will directly go to where the user has
+# configured them to go. This is to ensure that the logs are not
+# duplicated.
+WORKER_OPEN_TELEMETRY_ENABLED = "WorkerOpenTelemetryEnabled"
+WORKER_STATUS = "WorkerStatus"
+
+
+# Platform Environment Variables
+AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot"
+CONTAINER_NAME = "CONTAINER_NAME"
+
+
+# Python Specific Feature Flags and App Settings
+# Appsetting to specify AppInsights connection string
+APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING"
+# Appsetting to turn on ApplicationInsights support/features
+# A value of "true" enables the setting
+PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY = \
+ "PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY"
+# Appsetting to specify root logger name of logger to collect telemetry for
+# Used by Azure monitor distro (Application Insights)
+PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME = "PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME"
+PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT = ""
+PYTHON_ENABLE_DEBUG_LOGGING = "PYTHON_ENABLE_DEBUG_LOGGING"
+# Appsetting to turn on OpenTelemetry support/features
+# A value of "true" enables the setting
+PYTHON_ENABLE_OPENTELEMETRY = "PYTHON_ENABLE_OPENTELEMETRY"
+# Allows for non-default script file name
+PYTHON_SCRIPT_FILE_NAME = "PYTHON_SCRIPT_FILE_NAME"
+PYTHON_SCRIPT_FILE_NAME_DEFAULT = "function_app.py"
+PYTHON_THREADPOOL_THREAD_COUNT = "PYTHON_THREADPOOL_THREAD_COUNT"
+PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT = 1
+PYTHON_THREADPOOL_THREAD_COUNT_MAX = sys.maxsize
+PYTHON_THREADPOOL_THREAD_COUNT_MIN = 1
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/executor.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/executor.py
new file mode 100644
index 000000000..52330dcc8
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/executor.py
@@ -0,0 +1,34 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import asyncio
+import functools
+
+from typing import Any
+
+from ..otel import otel_manager, configure_opentelemetry
+
+
+def get_current_loop():
+ return asyncio.events.get_event_loop()
+
+
+async def execute_async(function, args) -> Any:
+ return await function(**args)
+
+
+def execute_sync(function, args) -> Any:
+ return function(**args)
+
+
+def run_sync_func(invocation_id, context, func, params):
+ # This helper exists because we need to access the current
+ # invocation_id from ThreadPoolExecutor's threads.
+ context.thread_local_storage.invocation_id = invocation_id
+ try:
+ if (otel_manager.get_azure_monitor_available()
+ or otel_manager.get_otel_libs_available()):
+ configure_opentelemetry(context)
+ result = functools.partial(execute_sync, func)
+ return result(params)
+ finally:
+ context.thread_local_storage.invocation_id = None
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/helpers.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/helpers.py
new file mode 100644
index 000000000..5bb626eb8
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/helpers.py
@@ -0,0 +1,40 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+import platform
+import sys
+
+from .constants import PYTHON_LANGUAGE_RUNTIME
+
+from ..logging import logger
+from ..version import VERSION
+
+sdk_version: str = "0.0.0"
+
+
+def set_sdk_version(version: str):
+ """Set the SDK version."""
+ global sdk_version
+ sdk_version = version
+
+
+def get_sdk_version() -> str:
+ """Get the SDK version."""
+ return sdk_version
+
+
+def change_cwd(new_cwd: str):
+ if os.path.exists(new_cwd):
+ os.chdir(new_cwd)
+ logger.info('Changing current working directory to %s', new_cwd)
+ else:
+ logger.warning('Directory %s is not found when reloading', new_cwd)
+
+
+def get_worker_metadata(protos):
+ return protos.WorkerMetadata(
+ runtime_name=PYTHON_LANGUAGE_RUNTIME,
+ runtime_version=str(sys.version_info.major) + "." + str(sys.version_info.minor),
+ worker_version=VERSION,
+ worker_bitness=platform.machine(),
+ custom_properties={})
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/tracing.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/tracing.py
new file mode 100644
index 000000000..f258ad3b6
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/tracing.py
@@ -0,0 +1,70 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import traceback
+
+from traceback import StackSummary, extract_tb
+from typing import List
+
+
+def extend_exception_message(exc: Exception, msg: str) -> Exception:
+ # Reconstruct exception message
+ # From: ImportModule: no module name
+ # To: ImportModule: no module name. msg
+ old_tb = exc.__traceback__
+ old_msg = getattr(exc, 'msg', None) or str(exc) or ''
+ new_msg = (old_msg.rstrip('.') + '. ' + msg).rstrip()
+ new_excpt = type(exc)(new_msg).with_traceback(old_tb)
+ return new_excpt
+
+
+def marshall_exception_trace(exc: Exception) -> str:
+ stack_summary: StackSummary = extract_tb(exc.__traceback__)
+ if isinstance(exc, ModuleNotFoundError):
+ stack_summary = _marshall_module_not_found_error(stack_summary)
+ return ''.join(stack_summary.format())
+
+
+def _marshall_module_not_found_error(tbss: StackSummary) -> StackSummary:
+ tbss = _remove_frame_from_stack(tbss, '')
+ tbss = _remove_frame_from_stack(
+ tbss, '')
+ return tbss
+
+
+def _remove_frame_from_stack(tbss: StackSummary,
+ framename: str) -> StackSummary:
+ filtered_stack_list: List[traceback.FrameSummary] = \
+ list(filter(lambda frame: getattr(frame,
+ 'filename') != framename, tbss))
+ filtered_stack: StackSummary = StackSummary.from_list(filtered_stack_list)
+ return filtered_stack
+
+
+def serialize_exception(exc: Exception, protos):
+ try:
+ message = str(type(exc).__name__) + ": " + str(exc)
+ except Exception:
+ message = ('Unhandled exception in function. '
+ 'Could not serialize original exception message.')
+
+ try:
+ stack_trace = marshall_exception_trace(exc)
+ except Exception:
+ stack_trace = ''
+
+ return protos.RpcException(message=message, stack_trace=stack_trace)
+
+
+def serialize_exception_as_str(exc: Exception):
+ try:
+ message = str(type(exc).__name__) + ": " + str(exc)
+ except Exception:
+ message = ('Unhandled exception in function. '
+ 'Could not serialize original exception message.')
+
+ try:
+ stack_trace = marshall_exception_trace(exc)
+ except Exception:
+ stack_trace = ''
+
+ return "Message: " + message + " | " + "Stack Trace: " + stack_trace
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/typing_inspect.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/typing_inspect.py
new file mode 100644
index 000000000..67726ee3a
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/typing_inspect.py
@@ -0,0 +1,295 @@
+# type: ignore
+# Imported from https://github.com/ilevkivskyi/typing_inspect/blob/168fa6f7c5c55f720ce6282727211cf4cf6368f6/typing_inspect.py # NoQA E501
+# Author: Ivan Levkivskyi
+# License: MIT
+
+"""Defines experimental API for runtime inspection of types defined
+in the standard "typing" module.
+
+Example usage::
+ from typing_inspect import is_generic_type
+"""
+
+# NOTE: This module must support Python 2.7 in addition to Python 3.x
+
+import collections.abc
+from typing import (Callable, ClassVar, Generic, Tuple,
+ TypeVar, Union, _GenericAlias, _SpecialGenericAlias)
+
+
+# from mypy_extensions import _TypedDictMeta
+
+def is_generic_type(tp):
+ """Test if the given type is a generic type. This includes Generic itself,
+ but excludes special typing constructs such as Union, Tuple, Callable,
+ ClassVar.
+ Examples::
+
+ is_generic_type(int) == False
+ is_generic_type(Union[int, str]) == False
+ is_generic_type(Union[int, T]) == False
+ is_generic_type(ClassVar[List[int]]) == False
+ is_generic_type(Callable[..., T]) == False
+ is_generic_type(Generic) == True
+ is_generic_type(Generic[T]) == True
+ is_generic_type(Iterable[int]) == True
+ is_generic_type(Mapping) == True
+ is_generic_type(MutableMapping[T, List[int]]) == True
+ is_generic_type(Sequence[Union[str, bytes]]) == True
+ """
+ return (isinstance(tp, type) and issubclass(tp, Generic)
+ or ((isinstance(tp, _GenericAlias) or isinstance(tp, _SpecialGenericAlias)) # NoQA E501
+ and tp.__origin__ not in (Union, tuple, ClassVar, collections.abc.Callable))) # NoQA E501
+
+
+def is_callable_type(tp):
+ """Test if the type is a generic callable type, including subclasses
+ excluding non-generic types and callables.
+ Examples::
+
+ is_callable_type(int) == False
+ is_callable_type(type) == False
+ is_callable_type(Callable) == True
+ is_callable_type(Callable[..., int]) == True
+ is_callable_type(Callable[[int, int], Iterable[str]]) == True
+ class MyClass(Callable[[int], int]):
+ ...
+ is_callable_type(MyClass) == True
+
+ For more general tests use callable(), for more precise test
+ (excluding subclasses) use::
+
+ get_origin(tp) is collections.abc.Callable # Callable prior to Python 3.7 # NoQA E501
+ """
+ return (tp is Callable or isinstance(tp, _GenericAlias) and
+ tp.__origin__ is collections.abc.Callable or
+ isinstance(tp, type) and issubclass(tp, Generic) and
+ issubclass(tp, collections.abc.Callable))
+
+
+def is_tuple_type(tp):
+ """Test if the type is a generic tuple type, including subclasses excluding
+ non-generic classes.
+ Examples::
+
+ is_tuple_type(int) == False
+ is_tuple_type(tuple) == False
+ is_tuple_type(Tuple) == True
+ is_tuple_type(Tuple[str, int]) == True
+ class MyClass(Tuple[str, int]):
+ ...
+ is_tuple_type(MyClass) == True
+
+ For more general tests use issubclass(..., tuple), for more precise test
+ (excluding subclasses) use::
+
+ get_origin(tp) is tuple # Tuple prior to Python 3.7
+ """
+ return (tp is Tuple or isinstance(tp, _GenericAlias) and
+ tp.__origin__ is tuple or
+ isinstance(tp, type) and issubclass(tp, Generic) and
+ issubclass(tp, tuple))
+
+
+def is_union_type(tp):
+ """Test if the type is a union type. Examples::
+
+ is_union_type(int) == False
+ is_union_type(Union) == True
+ is_union_type(Union[int, int]) == False
+ is_union_type(Union[T, int]) == True
+ """
+ return (tp is Union or
+ isinstance(tp, _GenericAlias) and tp.__origin__ is Union)
+
+
+def is_typevar(tp):
+ """Test if the type represents a type variable. Examples::
+
+ is_typevar(int) == False
+ is_typevar(T) == True
+ is_typevar(Union[T, int]) == False
+ """
+
+ return type(tp) is TypeVar
+
+
+def is_classvar(tp):
+ """Test if the type represents a class variable. Examples::
+
+ is_classvar(int) == False
+ is_classvar(ClassVar) == True
+ is_classvar(ClassVar[int]) == True
+ is_classvar(ClassVar[List[T]]) == True
+ """
+ return (tp is ClassVar or
+ isinstance(tp, _GenericAlias) and tp.__origin__ is ClassVar)
+
+
+def get_last_origin(tp):
+ """Get the last base of (multiply) subscripted type. Supports generic types,
+ Union, Callable, and Tuple. Returns None for unsupported types.
+ Examples::
+
+ get_last_origin(int) == None
+ get_last_origin(ClassVar[int]) == None
+ get_last_origin(Generic[T]) == Generic
+ get_last_origin(Union[T, int][str]) == Union[T, int]
+ get_last_origin(List[Tuple[T, T]][int]) == List[Tuple[T, T]]
+ get_last_origin(List) == List
+ """
+ raise ValueError('This function is only supported in Python 3.6,'
+ ' use get_origin instead')
+
+
+def get_origin(tp):
+ """Get the unsubscripted version of a type. Supports generic types, Union,
+ Callable, and Tuple. Returns None for unsupported types. Examples::
+
+ get_origin(int) == None
+ get_origin(ClassVar[int]) == None
+ get_origin(Generic) == Generic
+ get_origin(Generic[T]) == Generic
+ get_origin(Union[T, int]) == Union
+ get_origin(List[Tuple[T, T]][int]) == list # List prior to Python 3.7
+ """
+ if isinstance(tp, _GenericAlias):
+ return tp.__origin__ if tp.__origin__ is not ClassVar else None
+ if tp is Generic:
+ return Generic
+ return None
+
+
+def get_parameters(tp):
+ """Return type parameters of a parameterizable type as a tuple
+ in lexicographic order. Parameterizable types are generic types,
+ unions, tuple types and callable types. Examples::
+
+ get_parameters(int) == ()
+ get_parameters(Generic) == ()
+ get_parameters(Union) == ()
+ get_parameters(List[int]) == ()
+
+ get_parameters(Generic[T]) == (T,)
+ get_parameters(Tuple[List[T], List[S_co]]) == (T, S_co)
+ get_parameters(Union[S_co, Tuple[T, T]][int, U]) == (U,)
+ get_parameters(Mapping[T, Tuple[S_co, T]]) == (T, S_co)
+ """
+ if (isinstance(tp, _GenericAlias) or isinstance(tp, type) and
+ issubclass(tp, Generic) and tp is not Generic): # NoQA E129
+ return tp.__parameters__
+ return ()
+
+
+def get_last_args(tp):
+ """Get last arguments of (multiply) subscripted type.
+ Parameters for Callable are flattened. Examples::
+
+ get_last_args(int) == ()
+ get_last_args(Union) == ()
+ get_last_args(ClassVar[int]) == (int,)
+ get_last_args(Union[T, int]) == (T, int)
+ get_last_args(Iterable[Tuple[T, S]][int, T]) == (int, T)
+ get_last_args(Callable[[T], int]) == (T, int)
+ get_last_args(Callable[[], int]) == (int,)
+ """
+ raise ValueError('This function is only supported in Python 3.6,'
+ ' use get_args instead')
+
+
+def _eval_args(args):
+ """Internal helper for get_args."""
+ res = []
+ for arg in args:
+ if not isinstance(arg, tuple):
+ res.append(arg)
+ elif is_callable_type(arg[0]):
+ if len(arg) == 2:
+ res.append(Callable[[], arg[1]])
+ elif arg[1] is Ellipsis:
+ res.append(Callable[..., arg[2]])
+ else:
+ res.append(Callable[list(arg[1:-1]), arg[-1]])
+ else:
+ res.append(type(arg[0]).__getitem__(arg[0], _eval_args(arg[1:])))
+ return tuple(res)
+
+
+def get_args(tp, evaluate=None):
+ """Get type arguments with all substitutions performed. For unions,
+ basic simplifications used by Union constructor are performed.
+ On versions prior to 3.7 if `evaluate` is False (default),
+ report result as nested tuple, this matches
+ the internal representation of types. If `evaluate` is True
+ (or if Python version is 3.7 or greater), then all
+ type parameters are applied (this could be time and memory expensive).
+ Examples::
+
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, (Tuple, str, int))
+
+ get_args(Union[int, Tuple[T, int]][str], evaluate=True) == \
+ (int, Tuple[str, int])
+ get_args(Dict[int, Tuple[T, T]][Optional[int]], evaluate=True) == \
+ (int, Tuple[Optional[int], Optional[int]])
+ get_args(Callable[[], T][int], evaluate=True) == ([], int,)
+ """
+ if evaluate is not None and not evaluate:
+ raise ValueError('evaluate can only be True in Python 3.7')
+ if isinstance(tp, _GenericAlias):
+ res = tp.__args__
+ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: # NoQA E501
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
+def get_generic_type(obj):
+ """Get the generic type of an object if possible, or runtime class
+ otherwise.
+ Examples::
+
+ class Node(Generic[T]):
+ ...
+ type(Node[int]()) == Node
+ get_generic_type(Node[int]()) == Node[int]
+ get_generic_type(Node[T]()) == Node[T]
+ get_generic_type(1) == int
+ """
+
+ gen_type = getattr(obj, '__orig_class__', None)
+ return gen_type if gen_type is not None else type(obj)
+
+
+def get_generic_bases(tp):
+ """Get generic base types of a type or empty tuple if not possible.
+ Example::
+
+ class MyClass(List[int], Mapping[str, List[int]]):
+ ...
+ MyClass.__bases__ == (List, Mapping)
+ get_generic_bases(MyClass) == (List[int], Mapping[str, List[int]])
+ """
+
+ return getattr(tp, '__orig_bases__', ())
+
+
+def typed_dict_keys(td):
+ """If td is a TypedDict class, return a dictionary mapping the typed keys
+ to types. Otherwise, return None. Examples::
+
+ class TD(TypedDict):
+ x: int
+ y: int
+ class Other(dict):
+ x: int
+ y: int
+
+ typed_dict_keys(TD) == {'x': int, 'y': int}
+ typed_dict_keys(dict) == None
+ typed_dict_keys(Other) == None
+ """
+ if isinstance(td, _TypedDictMeta): # NoQA F821
+ return td.__annotations__.copy()
+ return None
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/validators.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/validators.py
new file mode 100644
index 000000000..88f71a86c
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/validators.py
@@ -0,0 +1,19 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import re
+
+
+class InvalidFileNameError(Exception):
+
+ def __init__(self, file_name: str) -> None:
+ super().__init__(
+ 'Invalid file name: %s', file_name)
+
+
+def validate_script_file_name(file_name: str):
+ # First character can be a letter, number, or underscore
+ # Following characters can be a letter, number, underscore, hyphen, or dash
+ # Ending must be .py
+ pattern = re.compile(r'^[a-zA-Z0-9_][a-zA-Z0-9_\-]*\.py$')
+ if not pattern.match(file_name):
+ raise InvalidFileNameError(file_name)
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/utils/wrappers.py b/azure_functions_worker_v2/azure_functions_worker_v2/utils/wrappers.py
new file mode 100644
index 000000000..c6b7dc6fb
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/utils/wrappers.py
@@ -0,0 +1,52 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Any, Callable
+
+from .app_setting_manager import is_envvar_false, is_envvar_true
+from .tracing import extend_exception_message
+
+from ..logging import logger
+
+
+def enable_feature_by(flag: str,
+ default: Any = None,
+ flag_default: bool = False) -> Callable:
+ def decorate(func):
+ def call(*args, **kwargs):
+ if is_envvar_true(flag):
+ return func(*args, **kwargs)
+ if flag_default and not is_envvar_false(flag):
+ return func(*args, **kwargs)
+ return default
+ return call
+ return decorate
+
+
+def disable_feature_by(flag: str,
+ default: Any = None,
+ flag_default: bool = False) -> Callable:
+ def decorate(func):
+ def call(*args, **kwargs):
+ if is_envvar_true(flag):
+ return default
+ if flag_default and not is_envvar_false(flag):
+ return default
+ return func(*args, **kwargs)
+ return call
+ return decorate
+
+
+def attach_message_to_exception(
+ expt_type: type[Exception] | tuple[type[Exception], ...],
+ message: str,
+ debug_logs=None) -> Callable:
+ def decorate(func):
+ def call(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except expt_type as e:
+ if debug_logs is not None:
+ logger.error(debug_logs)
+ raise extend_exception_message(e, message)
+ return call
+ return decorate
diff --git a/azure_functions_worker_v2/azure_functions_worker_v2/version.py b/azure_functions_worker_v2/azure_functions_worker_v2/version.py
new file mode 100644
index 000000000..51170743d
--- /dev/null
+++ b/azure_functions_worker_v2/azure_functions_worker_v2/version.py
@@ -0,0 +1,4 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+VERSION = '1.0.0a3'
diff --git a/azure_functions_worker_v2/pyproject.toml b/azure_functions_worker_v2/pyproject.toml
new file mode 100644
index 000000000..0a2b4806b
--- /dev/null
+++ b/azure_functions_worker_v2/pyproject.toml
@@ -0,0 +1,102 @@
+[project]
+name = "azure-functions-runtime"
+dynamic = ["version"]
+requires-python = ">=3.13"
+description = "Python Language Worker for Azure Functions Runtime"
+authors = [
+ { name = "Azure Functions team at Microsoft Corp.", email = "azurefunctions@microsoft.com" }
+]
+keywords = ["azure", "functions", "azurefunctions",
+ "python", "serverless"]
+license = { name = "MIT", file = "LICENSE" }
+readme = { file = "README.md", content-type = "text/markdown" }
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.13",
+ "Operating System :: Microsoft :: Windows",
+ "Operating System :: POSIX",
+ "Operating System :: MacOS :: MacOS X",
+ "Environment :: Web Environment",
+ "License :: OSI Approved :: MIT License",
+ "Intended Audience :: Developers"
+]
+dependencies = [
+ "azurefunctions-extensions-base",
+ "azure-functions"
+]
+
+[project.urls]
+Documentation = "https://github.com/Azure/azure-functions-python-worker/blob/dev/azure_functions_worker_v2/README.md"
+Repository = "https://github.com/Azure/azure-functions-python-worker"
+
+[project.optional-dependencies]
+dev = [
+ "azure-monitor-opentelemetry", # Used for Azure Monitor unit tests
+ "flake8==6.*",
+ "mypy",
+ "pytest",
+ "requests==2.*",
+ "coverage",
+ "grpcio~=1.70.0",
+ "grpcio-tools~=1.70.0",
+ "pytest-sugar",
+ "pytest-cov",
+ "pytest-xdist",
+ "pytest-randomly",
+ "pytest-instafail",
+ "pytest-rerunfailures",
+ "pytest-asyncio",
+ "pre-commit",
+ "invoke"
+]
+test-http-v2 = [
+ "azurefunctions-extensions-http-fastapi==1.0.0",
+ "ujson",
+ "orjson"
+]
+test-deferred-bindings = [
+ "azurefunctions-extensions-bindings-blob==1.0.0",
+ "azurefunctions-extensions-bindings-eventhub==1.0.0b1"
+]
+
+[build-system]
+requires = ["setuptools>=62", "wheel"]
+build-backend = "setuptools.build_meta"
+
+
+[tool.setuptools.packages.find]
+exclude = [
+ 'eng', 'tests*', 'pack', 'azure_functions_worker'
+ ]
+
+[tool.isort]
+profile = "black"
+line_length = 88
+multi_line_output = 3
+include_trailing_comma = true
+known_first_party = ["azure_functions_worker_v2"]
+default_section = "THIRDPARTY"
+src_paths = ["azure_functions_worker_v2"]
+skip_glob = [
+ "build",
+ "dist",
+ "env",
+ "venv",
+ "*venv*",
+ "*.egg-info",
+ ".tox",
+ ".nox",
+ ".git",
+ "__pycache__",
+ ".ci",
+ ".devcontainer",
+ ".github",
+ "eng",
+ "pack",
+ "*protos/*"
+]
+
+[tool.setuptools.dynamic]
+version = {attr = "azure_functions_worker_v2.version.VERSION"}
diff --git a/azure_functions_worker_v2/requirements.txt b/azure_functions_worker_v2/requirements.txt
new file mode 100644
index 000000000..3fdb69c81
--- /dev/null
+++ b/azure_functions_worker_v2/requirements.txt
@@ -0,0 +1,2 @@
+# Required dependencies listed in pyproject.toml
+.
diff --git a/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2.py b/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2.py
new file mode 100644
index 000000000..df996ff4f
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: FunctionRpc.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+from .identity import ClaimsIdentityRpc_pb2 as identity_dot_ClaimsIdentityRpc__pb2
+from .shared import NullableTypes_pb2 as shared_dot_NullableTypes__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x46unctionRpc.proto\x12\x19\x41zureFunctionsRpcMessages\x1a\x1egoogle/protobuf/duration.proto\x1a identity/ClaimsIdentityRpc.proto\x1a\x1ashared/NullableTypes.proto\"\x8c\x11\n\x10StreamingMessage\x12\x12\n\nrequest_id\x18\x01 \x01(\t\x12>\n\x0cstart_stream\x18\x14 \x01(\x0b\x32&.AzureFunctionsRpcMessages.StartStreamH\x00\x12K\n\x13worker_init_request\x18\x11 \x01(\x0b\x32,.AzureFunctionsRpcMessages.WorkerInitRequestH\x00\x12M\n\x14worker_init_response\x18\x10 \x01(\x0b\x32-.AzureFunctionsRpcMessages.WorkerInitResponseH\x00\x12\x46\n\x10worker_heartbeat\x18\x0f \x01(\x0b\x32*.AzureFunctionsRpcMessages.WorkerHeartbeatH\x00\x12\x46\n\x10worker_terminate\x18\x0e \x01(\x0b\x32*.AzureFunctionsRpcMessages.WorkerTerminateH\x00\x12O\n\x15worker_status_request\x18\x0c \x01(\x0b\x32..AzureFunctionsRpcMessages.WorkerStatusRequestH\x00\x12Q\n\x16worker_status_response\x18\r \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerStatusResponseH\x00\x12V\n\x19\x66ile_change_event_request\x18\x06 \x01(\x0b\x32\x31.AzureFunctionsRpcMessages.FileChangeEventRequestH\x00\x12Q\n\x16worker_action_response\x18\x07 \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerActionResponseH\x00\x12O\n\x15\x66unction_load_request\x18\x08 \x01(\x0b\x32..AzureFunctionsRpcMessages.FunctionLoadRequestH\x00\x12Q\n\x16\x66unction_load_response\x18\t \x01(\x0b\x32/.AzureFunctionsRpcMessages.FunctionLoadResponseH\x00\x12J\n\x12invocation_request\x18\x04 \x01(\x0b\x32,.AzureFunctionsRpcMessages.InvocationRequestH\x00\x12L\n\x13invocation_response\x18\x05 \x01(\x0b\x32-.AzureFunctionsRpcMessages.InvocationResponseH\x00\x12H\n\x11invocation_cancel\x18\x15 \x01(\x0b\x32+.AzureFunctionsRpcMessages.InvocationCancelH\x00\x12\x34\n\x07rpc_log\x18\x02 \x01(\x0b\x32!.AzureFunctionsRpcMessages.RpcLogH\x00\x12j\n#function_environment_reload_request\x18\x19 \x01(\x0b\x32;.AzureFunctionsRpcMessages.FunctionEnvironmentReloadRequestH\x00\x12l\n$function_environment_reload_response\x18\x1a \x01(\x0b\x32<.AzureFunctionsRpcMessages.FunctionEnvironmentReloadResponseH\x00\x12m\n%close_shared_memory_resources_request\x18\x1b \x01(\x0b\x32<.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesRequestH\x00\x12o\n&close_shared_memory_resources_response\x18\x1c \x01(\x0b\x32=.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesResponseH\x00\x12Y\n\x1a\x66unctions_metadata_request\x18\x1d \x01(\x0b\x32\x33.AzureFunctionsRpcMessages.FunctionsMetadataRequestH\x00\x12Y\n\x1a\x66unction_metadata_response\x18\x1e \x01(\x0b\x32\x33.AzureFunctionsRpcMessages.FunctionMetadataResponseH\x00\x12\x64\n function_load_request_collection\x18\x1f \x01(\x0b\x32\x38.AzureFunctionsRpcMessages.FunctionLoadRequestCollectionH\x00\x12\x66\n!function_load_response_collection\x18 \x01(\x0b\x32\x39.AzureFunctionsRpcMessages.FunctionLoadResponseCollectionH\x00\x12O\n\x15worker_warmup_request\x18! \x01(\x0b\x32..AzureFunctionsRpcMessages.WorkerWarmupRequestH\x00\x12Q\n\x16worker_warmup_response\x18\" \x01(\x0b\x32/.AzureFunctionsRpcMessages.WorkerWarmupResponseH\x00\x42\t\n\x07\x63ontent\" \n\x0bStartStream\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xa6\x03\n\x11WorkerInitRequest\x12\x14\n\x0chost_version\x18\x01 \x01(\t\x12T\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32>.AzureFunctionsRpcMessages.WorkerInitRequest.CapabilitiesEntry\x12W\n\x0elog_categories\x18\x03 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerInitRequest.LogCategoriesEntry\x12\x18\n\x10worker_directory\x18\x04 \x01(\t\x12\x1e\n\x16\x66unction_app_directory\x18\x05 \x01(\t\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a]\n\x12LogCategoriesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0e\x32\'.AzureFunctionsRpcMessages.RpcLog.Level:\x02\x38\x01\"\xb5\x02\n\x12WorkerInitResponse\x12\x16\n\x0eworker_version\x18\x01 \x01(\t\x12U\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerInitResponse.CapabilitiesEntry\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12\x42\n\x0fworker_metadata\x18\x04 \x01(\x0b\x32).AzureFunctionsRpcMessages.WorkerMetadata\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x84\x02\n\x0eWorkerMetadata\x12\x14\n\x0cruntime_name\x18\x01 \x01(\t\x12\x17\n\x0fruntime_version\x18\x02 \x01(\t\x12\x16\n\x0eworker_version\x18\x03 \x01(\t\x12\x16\n\x0eworker_bitness\x18\x04 \x01(\t\x12Z\n\x11\x63ustom_properties\x18\x05 \x03(\x0b\x32?.AzureFunctionsRpcMessages.WorkerMetadata.CustomPropertiesEntry\x1a\x37\n\x15\x43ustomPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xfe\x01\n\x0cStatusResult\x12>\n\x06status\x18\x04 \x01(\x0e\x32..AzureFunctionsRpcMessages.StatusResult.Status\x12\x0e\n\x06result\x18\x01 \x01(\t\x12:\n\texception\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\x12/\n\x04logs\x18\x03 \x03(\x0b\x32!.AzureFunctionsRpcMessages.RpcLog\"1\n\x06Status\x12\x0b\n\x07\x46\x61ilure\x10\x00\x12\x0b\n\x07Success\x10\x01\x12\r\n\tCancelled\x10\x02\"\x11\n\x0fWorkerHeartbeat\"B\n\x0fWorkerTerminate\x12/\n\x0cgrace_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\xd1\x01\n\x16\x46ileChangeEventRequest\x12\x44\n\x04type\x18\x01 \x01(\x0e\x32\x36.AzureFunctionsRpcMessages.FileChangeEventRequest.Type\x12\x11\n\tfull_path\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\"P\n\x04Type\x12\x0b\n\x07Unknown\x10\x00\x12\x0b\n\x07\x43reated\x10\x01\x12\x0b\n\x07\x44\x65leted\x10\x02\x12\x0b\n\x07\x43hanged\x10\x04\x12\x0b\n\x07Renamed\x10\x08\x12\x07\n\x03\x41ll\x10\x0f\"\x91\x01\n\x14WorkerActionResponse\x12\x46\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x36.AzureFunctionsRpcMessages.WorkerActionResponse.Action\x12\x0e\n\x06reason\x18\x02 \x01(\t\"!\n\x06\x41\x63tion\x12\x0b\n\x07Restart\x10\x00\x12\n\n\x06Reload\x10\x01\"\x15\n\x13WorkerStatusRequest\"\x16\n\x14WorkerStatusResponse\"\xf5\x01\n FunctionEnvironmentReloadRequest\x12t\n\x15\x65nvironment_variables\x18\x01 \x03(\x0b\x32U.AzureFunctionsRpcMessages.FunctionEnvironmentReloadRequest.EnvironmentVariablesEntry\x12\x1e\n\x16\x66unction_app_directory\x18\x02 \x01(\t\x1a;\n\x19\x45nvironmentVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xbb\x02\n!FunctionEnvironmentReloadResponse\x12\x42\n\x0fworker_metadata\x18\x01 \x01(\x0b\x32).AzureFunctionsRpcMessages.WorkerMetadata\x12\x64\n\x0c\x63\x61pabilities\x18\x02 \x03(\x0b\x32N.AzureFunctionsRpcMessages.FunctionEnvironmentReloadResponse.CapabilitiesEntry\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n!CloseSharedMemoryResourcesRequest\x12\x11\n\tmap_names\x18\x01 \x03(\t\"\xcb\x01\n\"CloseSharedMemoryResourcesResponse\x12m\n\x11\x63lose_map_results\x18\x01 \x03(\x0b\x32R.AzureFunctionsRpcMessages.CloseSharedMemoryResourcesResponse.CloseMapResultsEntry\x1a\x36\n\x14\x43loseMapResultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\"o\n\x1d\x46unctionLoadRequestCollection\x12N\n\x16\x66unction_load_requests\x18\x01 \x03(\x0b\x32..AzureFunctionsRpcMessages.FunctionLoadRequest\"r\n\x1e\x46unctionLoadResponseCollection\x12P\n\x17\x66unction_load_responses\x18\x01 \x03(\x0b\x32/.AzureFunctionsRpcMessages.FunctionLoadResponse\"\x90\x01\n\x13\x46unctionLoadRequest\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\t\x12@\n\x08metadata\x18\x02 \x01(\x0b\x32..AzureFunctionsRpcMessages.RpcFunctionMetadata\x12\"\n\x1amanaged_dependency_enabled\x18\x03 \x01(\x08\"\x86\x01\n\x14\x46unctionLoadResponse\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\t\x12\x37\n\x06result\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12 \n\x18is_dependency_downloaded\x18\x03 \x01(\x08\"\xff\x04\n\x13RpcFunctionMetadata\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x11\n\tdirectory\x18\x01 \x01(\t\x12\x13\n\x0bscript_file\x18\x02 \x01(\t\x12\x13\n\x0b\x65ntry_point\x18\x03 \x01(\t\x12N\n\x08\x62indings\x18\x06 \x03(\x0b\x32<.AzureFunctionsRpcMessages.RpcFunctionMetadata.BindingsEntry\x12\x10\n\x08is_proxy\x18\x07 \x01(\x08\x12\x37\n\x06status\x18\x08 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12\x10\n\x08language\x18\t \x01(\t\x12\x14\n\x0craw_bindings\x18\n \x03(\t\x12\x13\n\x0b\x66unction_id\x18\r \x01(\t\x12\"\n\x1amanaged_dependency_enabled\x18\x0e \x01(\x08\x12\x41\n\rretry_options\x18\x0f \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcRetryOptions\x12R\n\nproperties\x18\x10 \x03(\x0b\x32>.AzureFunctionsRpcMessages.RpcFunctionMetadata.PropertiesEntry\x1aW\n\rBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.AzureFunctionsRpcMessages.BindingInfo:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\":\n\x18\x46unctionsMetadataRequest\x12\x1e\n\x16\x66unction_app_directory\x18\x01 \x01(\t\"\xcd\x01\n\x18\x46unctionMetadataResponse\x12Q\n\x19\x66unction_metadata_results\x18\x01 \x03(\x0b\x32..AzureFunctionsRpcMessages.RpcFunctionMetadata\x12\x37\n\x06result\x18\x02 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\x12%\n\x1duse_default_metadata_indexing\x18\x03 \x01(\x08\"\xbe\x03\n\x11InvocationRequest\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12\x13\n\x0b\x66unction_id\x18\x02 \x01(\t\x12?\n\ninput_data\x18\x03 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ParameterBinding\x12[\n\x10trigger_metadata\x18\x04 \x03(\x0b\x32\x41.AzureFunctionsRpcMessages.InvocationRequest.TriggerMetadataEntry\x12\x41\n\rtrace_context\x18\x05 \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcTraceContext\x12>\n\rretry_context\x18\x06 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RetryContext\x1a\\\n\x14TriggerMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData:\x02\x38\x01\"\xbf\x01\n\x0fRpcTraceContext\x12\x14\n\x0ctrace_parent\x18\x01 \x01(\t\x12\x13\n\x0btrace_state\x18\x02 \x01(\t\x12N\n\nattributes\x18\x03 \x03(\x0b\x32:.AzureFunctionsRpcMessages.RpcTraceContext.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"x\n\x0cRetryContext\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x17\n\x0fmax_retry_count\x18\x02 \x01(\x05\x12:\n\texception\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\"Z\n\x10InvocationCancel\x12\x15\n\rinvocation_id\x18\x02 \x01(\t\x12/\n\x0cgrace_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\xe2\x01\n\x12InvocationResponse\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12@\n\x0boutput_data\x18\x02 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ParameterBinding\x12:\n\x0creturn_value\x18\x04 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12\x37\n\x06result\x18\x03 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\"/\n\x13WorkerWarmupRequest\x12\x18\n\x10worker_directory\x18\x01 \x01(\t\"O\n\x14WorkerWarmupResponse\x12\x37\n\x06result\x18\x01 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.StatusResult\"\xfe\x04\n\tTypedData\x12\x10\n\x06string\x18\x01 \x01(\tH\x00\x12\x0e\n\x04json\x18\x02 \x01(\tH\x00\x12\x0f\n\x05\x62ytes\x18\x03 \x01(\x0cH\x00\x12\x10\n\x06stream\x18\x04 \x01(\x0cH\x00\x12\x32\n\x04http\x18\x05 \x01(\x0b\x32\".AzureFunctionsRpcMessages.RpcHttpH\x00\x12\r\n\x03int\x18\x06 \x01(\x12H\x00\x12\x10\n\x06\x64ouble\x18\x07 \x01(\x01H\x00\x12\x46\n\x10\x63ollection_bytes\x18\x08 \x01(\x0b\x32*.AzureFunctionsRpcMessages.CollectionBytesH\x00\x12H\n\x11\x63ollection_string\x18\t \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionStringH\x00\x12H\n\x11\x63ollection_double\x18\n \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionDoubleH\x00\x12H\n\x11\x63ollection_sint64\x18\x0b \x01(\x0b\x32+.AzureFunctionsRpcMessages.CollectionSInt64H\x00\x12I\n\x12model_binding_data\x18\x0c \x01(\x0b\x32+.AzureFunctionsRpcMessages.ModelBindingDataH\x00\x12^\n\x1d\x63ollection_model_binding_data\x18\r \x01(\x0b\x32\x35.AzureFunctionsRpcMessages.CollectionModelBindingDataH\x00\x42\x06\n\x04\x64\x61ta\"t\n\x0fRpcSharedMemory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\r\n\x05\x63ount\x18\x03 \x01(\x03\x12\x34\n\x04type\x18\x04 \x01(\x0e\x32&.AzureFunctionsRpcMessages.RpcDataType\"\"\n\x10\x43ollectionString\x12\x0e\n\x06string\x18\x01 \x03(\t\" \n\x0f\x43ollectionBytes\x12\r\n\x05\x62ytes\x18\x01 \x03(\x0c\"\"\n\x10\x43ollectionDouble\x12\x0e\n\x06\x64ouble\x18\x01 \x03(\x01\"\"\n\x10\x43ollectionSInt64\x12\x0e\n\x06sint64\x18\x01 \x03(\x12\"\xab\x01\n\x10ParameterBinding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedDataH\x00\x12G\n\x11rpc_shared_memory\x18\x03 \x01(\x0b\x32*.AzureFunctionsRpcMessages.RpcSharedMemoryH\x00\x42\n\n\x08rpc_data\"\x8b\x03\n\x0b\x42indingInfo\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x43\n\tdirection\x18\x03 \x01(\x0e\x32\x30.AzureFunctionsRpcMessages.BindingInfo.Direction\x12\x42\n\tdata_type\x18\x04 \x01(\x0e\x32/.AzureFunctionsRpcMessages.BindingInfo.DataType\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.AzureFunctionsRpcMessages.BindingInfo.PropertiesEntry\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\'\n\tDirection\x12\x06\n\x02in\x10\x00\x12\x07\n\x03out\x10\x01\x12\t\n\x05inout\x10\x02\"=\n\x08\x44\x61taType\x12\r\n\tundefined\x10\x00\x12\n\n\x06string\x10\x01\x12\n\n\x06\x62inary\x10\x02\x12\n\n\x06stream\x10\x03\"\xe7\x04\n\x06RpcLog\x12\x15\n\rinvocation_id\x18\x01 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x02 \x01(\t\x12\x36\n\x05level\x18\x03 \x01(\x0e\x32\'.AzureFunctionsRpcMessages.RpcLog.Level\x12\x0f\n\x07message\x18\x04 \x01(\t\x12\x10\n\x08\x65vent_id\x18\x05 \x01(\t\x12:\n\texception\x18\x06 \x01(\x0b\x32\'.AzureFunctionsRpcMessages.RpcException\x12\x12\n\nproperties\x18\x07 \x01(\t\x12\x46\n\x0clog_category\x18\x08 \x01(\x0e\x32\x30.AzureFunctionsRpcMessages.RpcLog.RpcLogCategory\x12K\n\rpropertiesMap\x18\t \x03(\x0b\x32\x34.AzureFunctionsRpcMessages.RpcLog.PropertiesMapEntry\x1aZ\n\x12PropertiesMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x33\n\x05value\x18\x02 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData:\x02\x38\x01\"^\n\x05Level\x12\t\n\x05Trace\x10\x00\x12\t\n\x05\x44\x65\x62ug\x10\x01\x12\x0f\n\x0bInformation\x10\x02\x12\x0b\n\x07Warning\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x0c\n\x08\x43ritical\x10\x05\x12\x08\n\x04None\x10\x06\"8\n\x0eRpcLogCategory\x12\x08\n\x04User\x10\x00\x12\n\n\x06System\x10\x01\x12\x10\n\x0c\x43ustomMetric\x10\x02\"m\n\x0cRpcException\x12\x0e\n\x06source\x18\x03 \x01(\t\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x19\n\x11is_user_exception\x18\x04 \x01(\x08\x12\x0c\n\x04type\x18\x05 \x01(\t\"\xf7\x02\n\rRpcHttpCookie\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x1f\n\x06\x64omain\x18\x03 \x01(\x0b\x32\x0f.NullableString\x12\x1d\n\x04path\x18\x04 \x01(\x0b\x32\x0f.NullableString\x12#\n\x07\x65xpires\x18\x05 \x01(\x0b\x32\x12.NullableTimestamp\x12\x1d\n\x06secure\x18\x06 \x01(\x0b\x32\r.NullableBool\x12 \n\thttp_only\x18\x07 \x01(\x0b\x32\r.NullableBool\x12\x44\n\tsame_site\x18\x08 \x01(\x0e\x32\x31.AzureFunctionsRpcMessages.RpcHttpCookie.SameSite\x12 \n\x07max_age\x18\t \x01(\x0b\x32\x0f.NullableDouble\";\n\x08SameSite\x12\x08\n\x04None\x10\x00\x12\x07\n\x03Lax\x10\x01\x12\n\n\x06Strict\x10\x02\x12\x10\n\x0c\x45xplicitNone\x10\x03\"\xc5\x08\n\x07RpcHttp\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.AzureFunctionsRpcMessages.RpcHttp.HeadersEntry\x12\x32\n\x04\x62ody\x18\x04 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12>\n\x06params\x18\n \x03(\x0b\x32..AzureFunctionsRpcMessages.RpcHttp.ParamsEntry\x12\x13\n\x0bstatus_code\x18\x0c \x01(\t\x12<\n\x05query\x18\x0f \x03(\x0b\x32-.AzureFunctionsRpcMessages.RpcHttp.QueryEntry\x12\"\n\x1a\x65nable_content_negotiation\x18\x10 \x01(\x08\x12\x35\n\x07rawBody\x18\x11 \x01(\x0b\x32$.AzureFunctionsRpcMessages.TypedData\x12&\n\nidentities\x18\x12 \x03(\x0b\x32\x12.RpcClaimsIdentity\x12\x39\n\x07\x63ookies\x18\x13 \x03(\x0b\x32(.AzureFunctionsRpcMessages.RpcHttpCookie\x12Q\n\x10nullable_headers\x18\x14 \x03(\x0b\x32\x37.AzureFunctionsRpcMessages.RpcHttp.NullableHeadersEntry\x12O\n\x0fnullable_params\x18\x15 \x03(\x0b\x32\x36.AzureFunctionsRpcMessages.RpcHttp.NullableParamsEntry\x12M\n\x0enullable_query\x18\x16 \x03(\x0b\x32\x35.AzureFunctionsRpcMessages.RpcHttp.NullableQueryEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a,\n\nQueryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aG\n\x14NullableHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\x1a\x46\n\x13NullableParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\x1a\x45\n\x12NullableQueryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0f.NullableString:\x02\x38\x01\"Z\n\x10ModelBindingData\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x0e\n\x06source\x18\x02 \x01(\t\x12\x14\n\x0c\x63ontent_type\x18\x03 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\"e\n\x1a\x43ollectionModelBindingData\x12G\n\x12model_binding_data\x18\x01 \x03(\x0b\x32+.AzureFunctionsRpcMessages.ModelBindingData\"\xd4\x02\n\x0fRpcRetryOptions\x12\x17\n\x0fmax_retry_count\x18\x02 \x01(\x05\x12\x31\n\x0e\x64\x65lay_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x33\n\x10minimum_interval\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x33\n\x10maximum_interval\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12P\n\x0eretry_strategy\x18\x06 \x01(\x0e\x32\x38.AzureFunctionsRpcMessages.RpcRetryOptions.RetryStrategy\"9\n\rRetryStrategy\x12\x17\n\x13\x65xponential_backoff\x10\x00\x12\x0f\n\x0b\x66ixed_delay\x10\x01*\xc1\x01\n\x0bRpcDataType\x12\x0b\n\x07unknown\x10\x00\x12\n\n\x06string\x10\x01\x12\x08\n\x04json\x10\x02\x12\t\n\x05\x62ytes\x10\x03\x12\n\n\x06stream\x10\x04\x12\x08\n\x04http\x10\x05\x12\x07\n\x03int\x10\x06\x12\n\n\x06\x64ouble\x10\x07\x12\x14\n\x10\x63ollection_bytes\x10\x08\x12\x15\n\x11\x63ollection_string\x10\t\x12\x15\n\x11\x63ollection_double\x10\n\x12\x15\n\x11\x63ollection_sint64\x10\x0b\x32|\n\x0b\x46unctionRpc\x12m\n\x0b\x45ventStream\x12+.AzureFunctionsRpcMessages.StreamingMessage\x1a+.AzureFunctionsRpcMessages.StreamingMessage\"\x00(\x01\x30\x01\x42\xa5\x01\n*com.microsoft.azure.functions.rpc.messagesB\rFunctionProtoP\x01Z7github.com/Azure/azure-functions-go-worker/internal/rpc\xaa\x02,Microsoft.Azure.WebJobs.Script.Grpc.Messagesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'FunctionRpc_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messagesB\rFunctionProtoP\001Z7github.com/Azure/azure-functions-go-worker/internal/rpc\252\002,Microsoft.Azure.WebJobs.Script.Grpc.Messages'
+ _WORKERINITREQUEST_CAPABILITIESENTRY._options = None
+ _WORKERINITREQUEST_CAPABILITIESENTRY._serialized_options = b'8\001'
+ _WORKERINITREQUEST_LOGCATEGORIESENTRY._options = None
+ _WORKERINITREQUEST_LOGCATEGORIESENTRY._serialized_options = b'8\001'
+ _WORKERINITRESPONSE_CAPABILITIESENTRY._options = None
+ _WORKERINITRESPONSE_CAPABILITIESENTRY._serialized_options = b'8\001'
+ _WORKERMETADATA_CUSTOMPROPERTIESENTRY._options = None
+ _WORKERMETADATA_CUSTOMPROPERTIESENTRY._serialized_options = b'8\001'
+ _FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY._options = None
+ _FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY._serialized_options = b'8\001'
+ _FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY._options = None
+ _FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY._serialized_options = b'8\001'
+ _CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY._options = None
+ _CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY._serialized_options = b'8\001'
+ _RPCFUNCTIONMETADATA_BINDINGSENTRY._options = None
+ _RPCFUNCTIONMETADATA_BINDINGSENTRY._serialized_options = b'8\001'
+ _RPCFUNCTIONMETADATA_PROPERTIESENTRY._options = None
+ _RPCFUNCTIONMETADATA_PROPERTIESENTRY._serialized_options = b'8\001'
+ _INVOCATIONREQUEST_TRIGGERMETADATAENTRY._options = None
+ _INVOCATIONREQUEST_TRIGGERMETADATAENTRY._serialized_options = b'8\001'
+ _RPCTRACECONTEXT_ATTRIBUTESENTRY._options = None
+ _RPCTRACECONTEXT_ATTRIBUTESENTRY._serialized_options = b'8\001'
+ _BINDINGINFO_PROPERTIESENTRY._options = None
+ _BINDINGINFO_PROPERTIESENTRY._serialized_options = b'8\001'
+ _RPCLOG_PROPERTIESMAPENTRY._options = None
+ _RPCLOG_PROPERTIESMAPENTRY._serialized_options = b'8\001'
+ _RPCHTTP_HEADERSENTRY._options = None
+ _RPCHTTP_HEADERSENTRY._serialized_options = b'8\001'
+ _RPCHTTP_PARAMSENTRY._options = None
+ _RPCHTTP_PARAMSENTRY._serialized_options = b'8\001'
+ _RPCHTTP_QUERYENTRY._options = None
+ _RPCHTTP_QUERYENTRY._serialized_options = b'8\001'
+ _RPCHTTP_NULLABLEHEADERSENTRY._options = None
+ _RPCHTTP_NULLABLEHEADERSENTRY._serialized_options = b'8\001'
+ _RPCHTTP_NULLABLEPARAMSENTRY._options = None
+ _RPCHTTP_NULLABLEPARAMSENTRY._serialized_options = b'8\001'
+ _RPCHTTP_NULLABLEQUERYENTRY._options = None
+ _RPCHTTP_NULLABLEQUERYENTRY._serialized_options = b'8\001'
+ _globals['_RPCDATATYPE']._serialized_start=11800
+ _globals['_RPCDATATYPE']._serialized_end=11993
+ _globals['_STREAMINGMESSAGE']._serialized_start=143
+ _globals['_STREAMINGMESSAGE']._serialized_end=2331
+ _globals['_STARTSTREAM']._serialized_start=2333
+ _globals['_STARTSTREAM']._serialized_end=2365
+ _globals['_WORKERINITREQUEST']._serialized_start=2368
+ _globals['_WORKERINITREQUEST']._serialized_end=2790
+ _globals['_WORKERINITREQUEST_CAPABILITIESENTRY']._serialized_start=2644
+ _globals['_WORKERINITREQUEST_CAPABILITIESENTRY']._serialized_end=2695
+ _globals['_WORKERINITREQUEST_LOGCATEGORIESENTRY']._serialized_start=2697
+ _globals['_WORKERINITREQUEST_LOGCATEGORIESENTRY']._serialized_end=2790
+ _globals['_WORKERINITRESPONSE']._serialized_start=2793
+ _globals['_WORKERINITRESPONSE']._serialized_end=3102
+ _globals['_WORKERINITRESPONSE_CAPABILITIESENTRY']._serialized_start=2644
+ _globals['_WORKERINITRESPONSE_CAPABILITIESENTRY']._serialized_end=2695
+ _globals['_WORKERMETADATA']._serialized_start=3105
+ _globals['_WORKERMETADATA']._serialized_end=3365
+ _globals['_WORKERMETADATA_CUSTOMPROPERTIESENTRY']._serialized_start=3310
+ _globals['_WORKERMETADATA_CUSTOMPROPERTIESENTRY']._serialized_end=3365
+ _globals['_STATUSRESULT']._serialized_start=3368
+ _globals['_STATUSRESULT']._serialized_end=3622
+ _globals['_STATUSRESULT_STATUS']._serialized_start=3573
+ _globals['_STATUSRESULT_STATUS']._serialized_end=3622
+ _globals['_WORKERHEARTBEAT']._serialized_start=3624
+ _globals['_WORKERHEARTBEAT']._serialized_end=3641
+ _globals['_WORKERTERMINATE']._serialized_start=3643
+ _globals['_WORKERTERMINATE']._serialized_end=3709
+ _globals['_FILECHANGEEVENTREQUEST']._serialized_start=3712
+ _globals['_FILECHANGEEVENTREQUEST']._serialized_end=3921
+ _globals['_FILECHANGEEVENTREQUEST_TYPE']._serialized_start=3841
+ _globals['_FILECHANGEEVENTREQUEST_TYPE']._serialized_end=3921
+ _globals['_WORKERACTIONRESPONSE']._serialized_start=3924
+ _globals['_WORKERACTIONRESPONSE']._serialized_end=4069
+ _globals['_WORKERACTIONRESPONSE_ACTION']._serialized_start=4036
+ _globals['_WORKERACTIONRESPONSE_ACTION']._serialized_end=4069
+ _globals['_WORKERSTATUSREQUEST']._serialized_start=4071
+ _globals['_WORKERSTATUSREQUEST']._serialized_end=4092
+ _globals['_WORKERSTATUSRESPONSE']._serialized_start=4094
+ _globals['_WORKERSTATUSRESPONSE']._serialized_end=4116
+ _globals['_FUNCTIONENVIRONMENTRELOADREQUEST']._serialized_start=4119
+ _globals['_FUNCTIONENVIRONMENTRELOADREQUEST']._serialized_end=4364
+ _globals['_FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY']._serialized_start=4305
+ _globals['_FUNCTIONENVIRONMENTRELOADREQUEST_ENVIRONMENTVARIABLESENTRY']._serialized_end=4364
+ _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE']._serialized_start=4367
+ _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE']._serialized_end=4682
+ _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY']._serialized_start=2644
+ _globals['_FUNCTIONENVIRONMENTRELOADRESPONSE_CAPABILITIESENTRY']._serialized_end=2695
+ _globals['_CLOSESHAREDMEMORYRESOURCESREQUEST']._serialized_start=4684
+ _globals['_CLOSESHAREDMEMORYRESOURCESREQUEST']._serialized_end=4738
+ _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE']._serialized_start=4741
+ _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE']._serialized_end=4944
+ _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY']._serialized_start=4890
+ _globals['_CLOSESHAREDMEMORYRESOURCESRESPONSE_CLOSEMAPRESULTSENTRY']._serialized_end=4944
+ _globals['_FUNCTIONLOADREQUESTCOLLECTION']._serialized_start=4946
+ _globals['_FUNCTIONLOADREQUESTCOLLECTION']._serialized_end=5057
+ _globals['_FUNCTIONLOADRESPONSECOLLECTION']._serialized_start=5059
+ _globals['_FUNCTIONLOADRESPONSECOLLECTION']._serialized_end=5173
+ _globals['_FUNCTIONLOADREQUEST']._serialized_start=5176
+ _globals['_FUNCTIONLOADREQUEST']._serialized_end=5320
+ _globals['_FUNCTIONLOADRESPONSE']._serialized_start=5323
+ _globals['_FUNCTIONLOADRESPONSE']._serialized_end=5457
+ _globals['_RPCFUNCTIONMETADATA']._serialized_start=5460
+ _globals['_RPCFUNCTIONMETADATA']._serialized_end=6099
+ _globals['_RPCFUNCTIONMETADATA_BINDINGSENTRY']._serialized_start=5961
+ _globals['_RPCFUNCTIONMETADATA_BINDINGSENTRY']._serialized_end=6048
+ _globals['_RPCFUNCTIONMETADATA_PROPERTIESENTRY']._serialized_start=6050
+ _globals['_RPCFUNCTIONMETADATA_PROPERTIESENTRY']._serialized_end=6099
+ _globals['_FUNCTIONSMETADATAREQUEST']._serialized_start=6101
+ _globals['_FUNCTIONSMETADATAREQUEST']._serialized_end=6159
+ _globals['_FUNCTIONMETADATARESPONSE']._serialized_start=6162
+ _globals['_FUNCTIONMETADATARESPONSE']._serialized_end=6367
+ _globals['_INVOCATIONREQUEST']._serialized_start=6370
+ _globals['_INVOCATIONREQUEST']._serialized_end=6816
+ _globals['_INVOCATIONREQUEST_TRIGGERMETADATAENTRY']._serialized_start=6724
+ _globals['_INVOCATIONREQUEST_TRIGGERMETADATAENTRY']._serialized_end=6816
+ _globals['_RPCTRACECONTEXT']._serialized_start=6819
+ _globals['_RPCTRACECONTEXT']._serialized_end=7010
+ _globals['_RPCTRACECONTEXT_ATTRIBUTESENTRY']._serialized_start=6961
+ _globals['_RPCTRACECONTEXT_ATTRIBUTESENTRY']._serialized_end=7010
+ _globals['_RETRYCONTEXT']._serialized_start=7012
+ _globals['_RETRYCONTEXT']._serialized_end=7132
+ _globals['_INVOCATIONCANCEL']._serialized_start=7134
+ _globals['_INVOCATIONCANCEL']._serialized_end=7224
+ _globals['_INVOCATIONRESPONSE']._serialized_start=7227
+ _globals['_INVOCATIONRESPONSE']._serialized_end=7453
+ _globals['_WORKERWARMUPREQUEST']._serialized_start=7455
+ _globals['_WORKERWARMUPREQUEST']._serialized_end=7502
+ _globals['_WORKERWARMUPRESPONSE']._serialized_start=7504
+ _globals['_WORKERWARMUPRESPONSE']._serialized_end=7583
+ _globals['_TYPEDDATA']._serialized_start=7586
+ _globals['_TYPEDDATA']._serialized_end=8224
+ _globals['_RPCSHAREDMEMORY']._serialized_start=8226
+ _globals['_RPCSHAREDMEMORY']._serialized_end=8342
+ _globals['_COLLECTIONSTRING']._serialized_start=8344
+ _globals['_COLLECTIONSTRING']._serialized_end=8378
+ _globals['_COLLECTIONBYTES']._serialized_start=8380
+ _globals['_COLLECTIONBYTES']._serialized_end=8412
+ _globals['_COLLECTIONDOUBLE']._serialized_start=8414
+ _globals['_COLLECTIONDOUBLE']._serialized_end=8448
+ _globals['_COLLECTIONSINT64']._serialized_start=8450
+ _globals['_COLLECTIONSINT64']._serialized_end=8484
+ _globals['_PARAMETERBINDING']._serialized_start=8487
+ _globals['_PARAMETERBINDING']._serialized_end=8658
+ _globals['_BINDINGINFO']._serialized_start=8661
+ _globals['_BINDINGINFO']._serialized_end=9056
+ _globals['_BINDINGINFO_PROPERTIESENTRY']._serialized_start=6050
+ _globals['_BINDINGINFO_PROPERTIESENTRY']._serialized_end=6099
+ _globals['_BINDINGINFO_DIRECTION']._serialized_start=8954
+ _globals['_BINDINGINFO_DIRECTION']._serialized_end=8993
+ _globals['_BINDINGINFO_DATATYPE']._serialized_start=8995
+ _globals['_BINDINGINFO_DATATYPE']._serialized_end=9056
+ _globals['_RPCLOG']._serialized_start=9059
+ _globals['_RPCLOG']._serialized_end=9674
+ _globals['_RPCLOG_PROPERTIESMAPENTRY']._serialized_start=9430
+ _globals['_RPCLOG_PROPERTIESMAPENTRY']._serialized_end=9520
+ _globals['_RPCLOG_LEVEL']._serialized_start=9522
+ _globals['_RPCLOG_LEVEL']._serialized_end=9616
+ _globals['_RPCLOG_RPCLOGCATEGORY']._serialized_start=9618
+ _globals['_RPCLOG_RPCLOGCATEGORY']._serialized_end=9674
+ _globals['_RPCEXCEPTION']._serialized_start=9676
+ _globals['_RPCEXCEPTION']._serialized_end=9785
+ _globals['_RPCHTTPCOOKIE']._serialized_start=9788
+ _globals['_RPCHTTPCOOKIE']._serialized_end=10163
+ _globals['_RPCHTTPCOOKIE_SAMESITE']._serialized_start=10104
+ _globals['_RPCHTTPCOOKIE_SAMESITE']._serialized_end=10163
+ _globals['_RPCHTTP']._serialized_start=10166
+ _globals['_RPCHTTP']._serialized_end=11259
+ _globals['_RPCHTTP_HEADERSENTRY']._serialized_start=10904
+ _globals['_RPCHTTP_HEADERSENTRY']._serialized_end=10950
+ _globals['_RPCHTTP_PARAMSENTRY']._serialized_start=10952
+ _globals['_RPCHTTP_PARAMSENTRY']._serialized_end=10997
+ _globals['_RPCHTTP_QUERYENTRY']._serialized_start=10999
+ _globals['_RPCHTTP_QUERYENTRY']._serialized_end=11043
+ _globals['_RPCHTTP_NULLABLEHEADERSENTRY']._serialized_start=11045
+ _globals['_RPCHTTP_NULLABLEHEADERSENTRY']._serialized_end=11116
+ _globals['_RPCHTTP_NULLABLEPARAMSENTRY']._serialized_start=11118
+ _globals['_RPCHTTP_NULLABLEPARAMSENTRY']._serialized_end=11188
+ _globals['_RPCHTTP_NULLABLEQUERYENTRY']._serialized_start=11190
+ _globals['_RPCHTTP_NULLABLEQUERYENTRY']._serialized_end=11259
+ _globals['_MODELBINDINGDATA']._serialized_start=11261
+ _globals['_MODELBINDINGDATA']._serialized_end=11351
+ _globals['_COLLECTIONMODELBINDINGDATA']._serialized_start=11353
+ _globals['_COLLECTIONMODELBINDINGDATA']._serialized_end=11454
+ _globals['_RPCRETRYOPTIONS']._serialized_start=11457
+ _globals['_RPCRETRYOPTIONS']._serialized_end=11797
+ _globals['_RPCRETRYOPTIONS_RETRYSTRATEGY']._serialized_start=11740
+ _globals['_RPCRETRYOPTIONS_RETRYSTRATEGY']._serialized_end=11797
+ _globals['_FUNCTIONRPC']._serialized_start=11995
+ _globals['_FUNCTIONRPC']._serialized_end=12119
+# @@protoc_insertion_point(module_scope)
diff --git a/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2_grpc.py b/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2_grpc.py
new file mode 100644
index 000000000..364658aa9
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/FunctionRpc_pb2_grpc.py
@@ -0,0 +1,69 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from . import FunctionRpc_pb2 as FunctionRpc__pb2
+
+
+class FunctionRpcStub(object):
+ """Interface exported by the server.
+ """
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.EventStream = channel.stream_stream(
+ '/AzureFunctionsRpcMessages.FunctionRpc/EventStream',
+ request_serializer=FunctionRpc__pb2.StreamingMessage.SerializeToString,
+ response_deserializer=FunctionRpc__pb2.StreamingMessage.FromString,
+ )
+
+
+class FunctionRpcServicer(object):
+ """Interface exported by the server.
+ """
+
+ def EventStream(self, request_iterator, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_FunctionRpcServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'EventStream': grpc.stream_stream_rpc_method_handler(
+ servicer.EventStream,
+ request_deserializer=FunctionRpc__pb2.StreamingMessage.FromString,
+ response_serializer=FunctionRpc__pb2.StreamingMessage.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'AzureFunctionsRpcMessages.FunctionRpc', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class FunctionRpc(object):
+ """Interface exported by the server.
+ """
+
+ @staticmethod
+ def EventStream(request_iterator,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ insecure=False,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.stream_stream(request_iterator, target, '/AzureFunctionsRpcMessages.FunctionRpc/EventStream',
+ FunctionRpc__pb2.StreamingMessage.SerializeToString,
+ FunctionRpc__pb2.StreamingMessage.FromString,
+ options, channel_credentials,
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/azure_functions_worker_v2/tests/protos/__init__.py b/azure_functions_worker_v2/tests/protos/__init__.py
new file mode 100644
index 000000000..e9c4f2397
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/__init__.py
@@ -0,0 +1,43 @@
+from .FunctionRpc_pb2_grpc import ( # NoQA
+ FunctionRpcStub,
+ FunctionRpcServicer,
+ add_FunctionRpcServicer_to_server)
+
+from .FunctionRpc_pb2 import ( # NoQA
+ StreamingMessage,
+ StartStream,
+ WorkerInitRequest,
+ WorkerInitResponse,
+ RpcFunctionMetadata,
+ FunctionLoadRequest,
+ FunctionLoadResponse,
+ FunctionEnvironmentReloadRequest,
+ FunctionEnvironmentReloadResponse,
+ InvocationRequest,
+ InvocationResponse,
+ WorkerHeartbeat,
+ WorkerStatusRequest,
+ WorkerStatusResponse,
+ BindingInfo,
+ StatusResult,
+ RpcException,
+ ParameterBinding,
+ TypedData,
+ RpcHttp,
+ RpcHttpCookie,
+ RpcLog,
+ RpcSharedMemory,
+ RpcDataType,
+ CloseSharedMemoryResourcesRequest,
+ CloseSharedMemoryResourcesResponse,
+ FunctionsMetadataRequest,
+ FunctionMetadataResponse,
+ WorkerMetadata,
+ RpcRetryOptions)
+
+from .shared.NullableTypes_pb2 import (
+ NullableString,
+ NullableBool,
+ NullableDouble,
+ NullableTimestamp
+)
diff --git a/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2.py b/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2.py
new file mode 100644
index 000000000..e4a2be477
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: identity/ClaimsIdentityRpc.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from ..shared import NullableTypes_pb2 as shared_dot_NullableTypes__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n identity/ClaimsIdentityRpc.proto\x1a\x1ashared/NullableTypes.proto\"\xb0\x01\n\x11RpcClaimsIdentity\x12,\n\x13\x61uthentication_type\x18\x01 \x01(\x0b\x32\x0f.NullableString\x12(\n\x0fname_claim_type\x18\x02 \x01(\x0b\x32\x0f.NullableString\x12(\n\x0frole_claim_type\x18\x03 \x01(\x0b\x32\x0f.NullableString\x12\x19\n\x06\x63laims\x18\x04 \x03(\x0b\x32\t.RpcClaim\"\'\n\x08RpcClaim\x12\r\n\x05value\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\tB,\n*com.microsoft.azure.functions.rpc.messagesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'identity.ClaimsIdentityRpc_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messages'
+ _globals['_RPCCLAIMSIDENTITY']._serialized_start=65
+ _globals['_RPCCLAIMSIDENTITY']._serialized_end=241
+ _globals['_RPCCLAIM']._serialized_start=243
+ _globals['_RPCCLAIM']._serialized_end=282
+# @@protoc_insertion_point(module_scope)
diff --git a/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py b/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py
new file mode 100644
index 000000000..2daafffeb
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/identity/ClaimsIdentityRpc_pb2_grpc.py
@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
diff --git a/azure_functions_worker_v2/tests/protos/identity/__init__.py b/azure_functions_worker_v2/tests/protos/identity/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2.py b/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2.py
new file mode 100644
index 000000000..0b5b96bf1
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: shared/NullableTypes.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1ashared/NullableTypes.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"+\n\x0eNullableString\x12\x0f\n\x05value\x18\x01 \x01(\tH\x00\x42\x08\n\x06string\"+\n\x0eNullableDouble\x12\x0f\n\x05value\x18\x01 \x01(\x01H\x00\x42\x08\n\x06\x64ouble\"\'\n\x0cNullableBool\x12\x0f\n\x05value\x18\x01 \x01(\x08H\x00\x42\x06\n\x04\x62ool\"M\n\x11NullableTimestamp\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x0b\n\ttimestampB,\n*com.microsoft.azure.functions.rpc.messagesb\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shared.NullableTypes_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n*com.microsoft.azure.functions.rpc.messages'
+ _globals['_NULLABLESTRING']._serialized_start=63
+ _globals['_NULLABLESTRING']._serialized_end=106
+ _globals['_NULLABLEDOUBLE']._serialized_start=108
+ _globals['_NULLABLEDOUBLE']._serialized_end=151
+ _globals['_NULLABLEBOOL']._serialized_start=153
+ _globals['_NULLABLEBOOL']._serialized_end=192
+ _globals['_NULLABLETIMESTAMP']._serialized_start=194
+ _globals['_NULLABLETIMESTAMP']._serialized_end=271
+# @@protoc_insertion_point(module_scope)
diff --git a/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2_grpc.py b/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2_grpc.py
new file mode 100644
index 000000000..2daafffeb
--- /dev/null
+++ b/azure_functions_worker_v2/tests/protos/shared/NullableTypes_pb2_grpc.py
@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
diff --git a/azure_functions_worker_v2/tests/protos/shared/__init__.py b/azure_functions_worker_v2/tests/protos/shared/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/azure_functions_worker_v2/tests/unittest_proxy/test_dependency.py b/azure_functions_worker_v2/tests/unittest_proxy/test_dependency.py
new file mode 100644
index 000000000..cea4c5af0
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittest_proxy/test_dependency.py
@@ -0,0 +1,65 @@
+import sys
+import os
+from unittest.mock import patch
+
+from proxy_worker.utils.dependency import DependencyManager
+
+
+@patch("proxy_worker.utils.dependency.DependencyManager._get_cx_deps_path",
+ return_value="/mock/cx/site-packages")
+@patch("proxy_worker.utils.dependency.DependencyManager._get_cx_working_dir",
+ return_value="/mock/cx")
+@patch("proxy_worker.utils.dependency.DependencyManager._get_worker_deps_path",
+ return_value="/mock/worker")
+@patch("proxy_worker.utils.dependency.logger")
+def test_use_worker_dependencies(mock_logger, mock_worker, mock_cx_dir, mock_cx_deps):
+ sys.path = ["/mock/cx/site-packages", "/mock/cx", "/original"]
+
+ DependencyManager.initialize()
+ DependencyManager.use_worker_dependencies()
+
+ assert sys.path[0] == "/mock/worker"
+ assert "/mock/cx/site-packages" not in sys.path
+ assert "/mock/cx" not in sys.path
+
+ mock_logger.info.assert_any_call(
+ 'Applying use_worker_dependencies:'
+ ' worker_dependencies: %s,'
+ ' customer_dependencies: %s,'
+ ' working_directory: %s',
+ "/mock/worker", "/mock/cx/site-packages", "/mock/cx"
+ )
+
+
+@patch("proxy_worker.utils.dependency.DependencyManager._get_cx_deps_path",
+ return_value="/mock/cx/site-packages")
+@patch("proxy_worker.utils.dependency.DependencyManager._get_worker_deps_path",
+ return_value="/mock/worker")
+@patch("proxy_worker.utils.dependency.DependencyManager._get_cx_working_dir",
+ return_value="/mock/cx")
+@patch("proxy_worker.utils.dependency.DependencyManager.is_in_linux_consumption",
+ return_value=False)
+@patch("proxy_worker.utils.dependency.is_envvar_true", return_value=False)
+@patch("proxy_worker.utils.dependency.logger")
+def test_prioritize_customer_dependencies(mock_logger, mock_env, mock_linux,
+ mock_cx_dir, mock_worker, mock_cx_deps):
+ sys.path = ["/mock/worker", "/some/old/path"]
+
+ DependencyManager.initialize()
+ DependencyManager.prioritize_customer_dependencies("/override/cx")
+
+ assert sys.path[0] == "/mock/cx/site-packages"
+ assert sys.path[1] == "/mock/worker"
+ expected_path = os.path.abspath("/override/cx")
+ assert expected_path in sys.path
+
+ # Relaxed log validation: look for matching prefix
+ assert any(
+ "Applying prioritize_customer_dependencies" in str(call[0][0])
+ for call in mock_logger.info.call_args_list
+ )
+
+ assert any(
+ "Finished prioritize_customer_dependencies" in str(call[0][0])
+ for call in mock_logger.info.call_args_list
+ )
diff --git a/azure_functions_worker_v2/tests/unittest_proxy/test_dispatcher.py b/azure_functions_worker_v2/tests/unittest_proxy/test_dispatcher.py
new file mode 100644
index 000000000..22c38fa0a
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittest_proxy/test_dispatcher.py
@@ -0,0 +1,254 @@
+import asyncio
+import builtins
+import logging
+import types
+import unittest
+from unittest.mock import Mock, patch, MagicMock, AsyncMock, ANY
+
+import pytest
+
+from proxy_worker.dispatcher import Dispatcher
+
+
+class TestDispatcher(unittest.TestCase):
+
+ @patch("proxy_worker.dispatcher.queue.Queue")
+ @patch("proxy_worker.dispatcher.threading.Thread")
+ def test_dispatcher_initialization(self, mock_thread, mock_queue):
+ # Arrange
+ mock_loop = Mock()
+ mock_future = Mock()
+ mock_loop.create_future.return_value = mock_future
+
+ # Act
+ dispatcher = Dispatcher(
+ loop=mock_loop,
+ host="127.0.0.1",
+ port=7070,
+ worker_id="worker123",
+ request_id="req456",
+ grpc_connect_timeout=5.0,
+ grpc_max_msg_len=1024
+ )
+
+ # Assert
+ self.assertEqual(dispatcher._host, "127.0.0.1")
+ self.assertEqual(dispatcher._port, 7070)
+ self.assertEqual(dispatcher._worker_id, "worker123")
+ self.assertEqual(dispatcher._request_id, "req456")
+ self.assertEqual(dispatcher._grpc_connect_timeout, 5.0)
+ self.assertEqual(dispatcher._grpc_max_msg_len, 1024)
+ self.assertEqual(dispatcher._grpc_connected_fut, mock_future)
+ mock_queue.assert_called_once()
+ mock_thread.assert_called_once()
+
+ @patch("proxy_worker.dispatcher.protos.StreamingMessage")
+ @patch("proxy_worker.dispatcher.protos.RpcLog")
+ @patch("proxy_worker.dispatcher.is_system_log_category")
+ def test_on_logging_levels_and_categories(self, mock_is_system, mock_rpc_log,
+ mock_streaming_message):
+ loop = Mock()
+ dispatcher = Dispatcher(loop, "localhost", 5000, "worker",
+ "req", 5.0)
+
+ mock_rpc_log.return_value = Mock()
+ mock_streaming_message.return_value = Mock()
+
+ levels = [
+ (logging.CRITICAL, mock_rpc_log.Critical),
+ (logging.ERROR, mock_rpc_log.Error),
+ (logging.WARNING, mock_rpc_log.Warning),
+ (logging.INFO, mock_rpc_log.Information),
+ (logging.DEBUG, mock_rpc_log.Debug),
+ (5, getattr(mock_rpc_log, 'None')),
+ ]
+
+ for level, expected in levels:
+ record = Mock(levelno=level, name="custom.logger")
+ mock_is_system.return_value = level % 2 == 0 # alternate True/False
+ dispatcher.on_logging(record, "Test message")
+
+ if mock_is_system.return_value:
+ mock_rpc_log.RpcLogCategory.Value.assert_called_with("System")
+ else:
+ mock_rpc_log.RpcLogCategory.Value.assert_called_with("User")
+
+
+def fake_import(name, globals=None, locals=None, fromlist=(), level=0):
+ mock_module = types.SimpleNamespace(__file__=f"{name}.py")
+ mock_module.worker_init_request = AsyncMock(return_value="fake_response")
+ mock_module.function_environment_reload_request = AsyncMock(
+ return_value="mocked_env_reload_response")
+ if name in ["azure_functions_worker_v2", "azure_functions_worker_v1"]:
+ return mock_module
+ return builtins.__import__(name, globals, locals, fromlist, level)
+
+
+@patch("proxy_worker.dispatcher.DependencyManager.should_load_cx_dependencies",
+ return_value=True)
+@patch("proxy_worker.dispatcher.DependencyManager.prioritize_customer_dependencies")
+@patch("proxy_worker.dispatcher.logger")
+@patch("proxy_worker.dispatcher.os.path.exists",
+ side_effect=lambda p: p.endswith("function_app.py"))
+@patch("builtins.__import__", side_effect=fake_import)
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_streaming_response")
+@pytest.mark.asyncio
+async def test_worker_init_v2_import(
+ mock_streaming, mock_import, mock_exists, mock_logger, mock_prioritize,
+ mock_should_load
+):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+ request = MagicMock()
+ request.worker_init_request.function_app_directory = "/home/site/wwwroot"
+
+ result = await dispatcher._handle__worker_init_request(request)
+
+ assert result == "mocked_streaming_response"
+ mock_logger.debug.assert_any_call("azure_functions_worker_v2 import succeeded: %s",
+ ANY)
+
+
+@patch("proxy_worker.dispatcher.DependencyManager.should_load_cx_dependencies",
+ return_value=True)
+@patch("proxy_worker.dispatcher.DependencyManager.prioritize_customer_dependencies")
+@patch("proxy_worker.dispatcher.logger")
+@patch("proxy_worker.dispatcher.os.path.exists", side_effect=lambda p: False)
+@patch("builtins.__import__", side_effect=fake_import)
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_streaming_response")
+@pytest.mark.asyncio
+async def test_worker_init_fallback_to_v1(
+ mock_streaming, mock_import, mock_exists, mock_logger, mock_prioritize,
+ mock_should_load
+):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+ request = MagicMock()
+ request.worker_init_request.function_app_directory = "/home/site/wwwroot"
+
+ result = await dispatcher._handle__worker_init_request(request)
+
+ assert result == "mocked_streaming_response"
+ mock_logger.debug.assert_any_call("azure_functions_worker_v1 import succeeded: %s",
+ ANY)
+
+
+@patch("proxy_worker.dispatcher.DependencyManager.prioritize_customer_dependencies")
+@patch("proxy_worker.dispatcher.logger")
+@patch("proxy_worker.dispatcher.os.path.exists",
+ side_effect=lambda p: p.endswith("function_app.py"))
+@patch("builtins.__import__", side_effect=fake_import)
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_reload_response")
+@pytest.mark.asyncio
+async def test_function_environment_reload_v2_import(
+ mock_streaming, mock_import, mock_exists, mock_logger, mock_prioritize
+):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071,
+ "worker123", "req789", 5.0)
+ request = MagicMock()
+ request.function_environment_reload_request.function_app_directory = \
+ "/home/site/wwwroot"
+
+ result = await dispatcher._handle__function_environment_reload_request(request)
+
+ assert result == "mocked_reload_response"
+ mock_logger.debug.assert_any_call("azure_functions_worker_v2 import succeeded: %s",
+ ANY)
+
+
+@patch("proxy_worker.dispatcher.DependencyManager.prioritize_customer_dependencies")
+@patch("proxy_worker.dispatcher.logger")
+@patch("proxy_worker.dispatcher.os.path.exists", side_effect=lambda p: False)
+@patch("builtins.__import__", side_effect=fake_import)
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_reload_response")
+@pytest.mark.asyncio
+async def test_function_environment_reload_fallback_to_v1(
+ mock_streaming, mock_import, mock_exists, mock_logger, mock_prioritize
+):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+ request = MagicMock()
+ request.function_environment_reload_request.function_app_directory = "/some/path"
+
+ result = await dispatcher._handle__function_environment_reload_request(request)
+
+ assert result == "mocked_reload_response"
+ mock_logger.debug.assert_any_call("azure_functions_worker_v1 import succeeded: %s",
+ ANY)
+
+
+@patch("proxy_worker.dispatcher._library_worker",
+ new=MagicMock(
+ functions_metadata_request=AsyncMock(return_value="mocked_meta_resp")))
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_response")
+@patch("proxy_worker.dispatcher.logger")
+@pytest.mark.asyncio
+async def test_handle_functions_metadata_request(mock_logger, mock_streaming):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+ request = MagicMock()
+ request.request_id = "req789"
+
+ result = await dispatcher._handle__functions_metadata_request(request)
+
+ assert result == "mocked_response"
+ mock_logger.info.assert_called_with(
+ 'Received WorkerMetadataRequest, request ID %s, worker id: %s',
+ "req789", "worker123"
+ )
+
+
+@patch("proxy_worker.dispatcher._library_worker",
+ new=MagicMock(
+ function_load_request=AsyncMock(return_value="mocked_load_response")))
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_stream_response")
+@patch("proxy_worker.dispatcher.logger")
+@pytest.mark.asyncio
+async def test_handle_function_load_request(mock_logger, mock_streaming):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+
+ request = MagicMock()
+ request.function_load_request.function_id = "func123"
+ request.function_load_request.metadata.name = "hello_function"
+ request.request_id = "req789"
+
+ result = await dispatcher._handle__function_load_request(request)
+
+ assert result == "mocked_stream_response"
+ mock_logger.info.assert_called_with(
+ 'Received WorkerLoadRequest, request ID %s, function_id: %s,function_name: %s, '
+ 'worker_id: %s', "req789", "func123", "hello_function", "worker123"
+ )
+
+
+@patch("proxy_worker.dispatcher._library_worker",
+ new=MagicMock(
+ invocation_request=AsyncMock(return_value="mocked_invoc_response")))
+@patch("proxy_worker.dispatcher.protos.StreamingMessage",
+ return_value="mocked_streaming_response")
+@patch("proxy_worker.dispatcher.logger")
+@pytest.mark.asyncio
+async def test_handle_invocation_request(mock_logger, mock_streaming):
+ dispatcher = Dispatcher(asyncio.get_event_loop(), "localhost", 7071, "worker123",
+ "req789", 5.0)
+
+ request = MagicMock()
+ request.invocation_request.invocation_id = "inv123"
+ request.invocation_request.function_id = "func123"
+ request.request_id = "req789"
+
+ result = await dispatcher._handle__invocation_request(request)
+
+ assert result == "mocked_streaming_response"
+ mock_logger.info.assert_called_with(
+ 'Received FunctionInvocationRequest, request ID %s, function_id: %s,'
+ 'invocation_id: %s, worker_id: %s',
+ "req789", "func123", "inv123", "worker123"
+ )
diff --git a/azure_functions_worker_v2/tests/unittests/basic_function/function_app.py b/azure_functions_worker_v2/tests/unittests/basic_function/function_app.py
new file mode 100644
index 000000000..67f0dadfd
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/basic_function/function_app.py
@@ -0,0 +1,34 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import logging
+
+import azure.functions as func
+
+app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+
+@app.route(route="default_template")
+def default_template(req: func.HttpRequest) -> func.HttpResponse:
+ logging.info('Python HTTP trigger function processed a request.')
+
+ name = req.params.get('name')
+ if not name:
+ try:
+ req_body = req.get_json()
+ except ValueError:
+ pass
+ else:
+ name = req_body.get('name')
+
+ if name:
+ return func.HttpResponse(
+ f"Hello, {name}. This HTTP triggered function "
+ f"executed successfully.")
+ else:
+ return func.HttpResponse(
+ "This HTTP triggered function executed successfully. "
+ "Pass a name in the query string or in the request body for a"
+ " personalized response.",
+ status_code=200
+ )
diff --git a/azure_functions_worker_v2/tests/unittests/indexing_exception_function/function_app.py b/azure_functions_worker_v2/tests/unittests/indexing_exception_function/function_app.py
new file mode 100644
index 000000000..c4f22a38b
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/indexing_exception_function/function_app.py
@@ -0,0 +1,35 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+import logging
+import nonexistent.package # noqa
+
+import azure.functions as func
+
+app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+
+@app.route(route="default_template")
+def default_template(req: func.HttpRequest) -> func.HttpResponse:
+ logging.info('Python HTTP trigger function processed a request.')
+
+ name = req.params.get('name')
+ if not name:
+ try:
+ req_body = req.get_json()
+ except ValueError:
+ pass
+ else:
+ name = req_body.get('name')
+
+ if name:
+ return func.HttpResponse(
+ f"Hello, {name}. This HTTP triggered function "
+ f"executed successfully.")
+ else:
+ return func.HttpResponse(
+ "This HTTP triggered function executed successfully. "
+ "Pass a name in the query string or in the request body for a"
+ " personalized response.",
+ status_code=200
+ )
diff --git a/azure_functions_worker_v2/tests/unittests/streaming_function/function_app.py b/azure_functions_worker_v2/tests/unittests/streaming_function/function_app.py
new file mode 100644
index 000000000..b20f5440c
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/streaming_function/function_app.py
@@ -0,0 +1,100 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import logging
+import time
+from datetime import datetime
+
+import azure.functions as func
+from azurefunctions.extensions.http.fastapi import (
+ FileResponse,
+ HTMLResponse,
+ ORJSONResponse,
+ Request,
+ Response,
+ StreamingResponse,
+ UJSONResponse,
+)
+
+app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
+
+
+@app.route(route="default_template")
+async def default_template(req: Request) -> Response:
+ logging.info('Python HTTP trigger function processed a request.')
+
+ name = req.query_params.get('name')
+ if not name:
+ try:
+ req_body = await req.json()
+ except ValueError:
+ pass
+ else:
+ name = req_body.get('name')
+
+ if name:
+ return Response(
+ f"Hello, {name}. This HTTP triggered function "
+ f"executed successfully.")
+ else:
+ return Response(
+ "This HTTP triggered function executed successfully. "
+ "Pass a name in the query string or in the request body for a"
+ " personalized response.",
+ status_code=200
+ )
+
+
+@app.route(route="http_func")
+def http_func(req: Request) -> Response:
+ time.sleep(1)
+
+ current_time = datetime.now().strftime("%H:%M:%S")
+ return Response(f"{current_time}")
+
+
+@app.route(route="upload_data_stream")
+async def upload_data_stream(req: Request) -> Response:
+ # Define a list to accumulate the streaming data
+ data_chunks = []
+
+ async def process_stream():
+ async for chunk in req.stream():
+ # Append each chunk of streaming data to the list
+ data_chunks.append(chunk)
+
+ await process_stream()
+
+ # Concatenate the data chunks to form the complete data
+ complete_data = b"".join(data_chunks)
+
+ # Return the complete data as the response
+ return Response(content=complete_data, status_code=200)
+
+
+@app.route(route="return_streaming")
+async def return_streaming(req: Request) -> StreamingResponse:
+ async def content():
+ yield b"First chunk\n"
+ yield b"Second chunk\n"
+ return StreamingResponse(content())
+
+
+@app.route(route="return_html")
+def return_html(req: Request) -> HTMLResponse:
+ html_content = "Hello, World!
"
+ return HTMLResponse(content=html_content, status_code=200)
+
+
+@app.route(route="return_ujson")
+def return_ujson(req: Request) -> UJSONResponse:
+ return UJSONResponse(content={"message": "Hello, World!"}, status_code=200)
+
+
+@app.route(route="return_orjson")
+def return_orjson(req: Request) -> ORJSONResponse:
+ return ORJSONResponse(content={"message": "Hello, World!"}, status_code=200)
+
+
+@app.route(route="return_file")
+def return_file(req: Request) -> FileResponse:
+ return FileResponse("function_app.py")
diff --git a/azure_functions_worker_v2/tests/unittests/test_app_setting_manager.py b/azure_functions_worker_v2/tests/unittests/test_app_setting_manager.py
new file mode 100644
index 000000000..10ddfb6cc
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_app_setting_manager.py
@@ -0,0 +1,45 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+
+from azure_functions_worker_v2.utils.app_setting_manager import (
+ get_python_appsetting_state)
+from azure_functions_worker_v2.utils.constants import (
+ PYTHON_ENABLE_DEBUG_LOGGING,
+ PYTHON_THREADPOOL_THREAD_COUNT,
+)
+from tests.utils import testutils
+from unittest.mock import patch
+
+
+class TestDefaultAppSettingsLogs(testutils.AsyncTestCase):
+ """Tests for default app settings logs."""
+
+ def test_get_python_appsetting_state(self):
+ app_setting_state = get_python_appsetting_state()
+ expected_string = ""
+ self.assertEqual(expected_string, app_setting_state)
+
+
+class TestNonDefaultAppSettingsLogs(testutils.AsyncTestCase):
+ """Tests for non-default app settings logs."""
+
+ @classmethod
+ def setUpClass(cls):
+ os_environ = os.environ.copy()
+ os_environ[PYTHON_THREADPOOL_THREAD_COUNT] = '20'
+ os_environ[PYTHON_ENABLE_DEBUG_LOGGING] = '1'
+ cls._patch_environ = patch.dict('os.environ', os_environ)
+ cls._patch_environ.start()
+ super().setUpClass()
+
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass()
+ cls._patch_environ.stop()
+
+ def test_get_python_appsetting_state(self):
+ app_setting_state = get_python_appsetting_state()
+ self.assertIn("PYTHON_THREADPOOL_THREAD_COUNT: 20 | ",
+ app_setting_state)
+ self.assertIn("PYTHON_ENABLE_DEBUG_LOGGING: 1 | ", app_setting_state)
diff --git a/azure_functions_worker_v2/tests/unittests/test_code_quality.py b/azure_functions_worker_v2/tests/unittests/test_code_quality.py
new file mode 100644
index 000000000..3502a7550
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_code_quality.py
@@ -0,0 +1,34 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import pathlib
+import subprocess
+import sys
+import unittest
+
+ROOT_PATH = pathlib.Path(__file__).parent.parent.parent.parent
+
+
+class TestCodeQuality(unittest.TestCase):
+
+ def test_flake8(self):
+ try:
+ import flake8 # NoQA
+ except ImportError as e:
+ raise unittest.SkipTest('flake8 module is missing') from e
+
+ config_path = ROOT_PATH / '.flake8'
+ if not config_path.exists():
+ raise unittest.SkipTest('could not locate the .flake8 file')
+
+ try:
+ subprocess.run(
+ [sys.executable, '-m', 'flake8', '--config', str(config_path),
+ 'azure_functions_worker_v2',],
+ check=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=str(ROOT_PATH))
+ except subprocess.CalledProcessError as ex:
+ output = ex.output.decode()
+ raise AssertionError(
+ 'flake8 validation failed:\n%s', output) from None
diff --git a/azure_functions_worker_v2/tests/unittests/test_datumdef.py b/azure_functions_worker_v2/tests/unittests/test_datumdef.py
new file mode 100644
index 000000000..4631cb1ac
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_datumdef.py
@@ -0,0 +1,161 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import unittest
+
+import tests.protos as protos
+
+from datetime import datetime
+from http.cookies import SimpleCookie
+
+from azure_functions_worker_v2.bindings.datumdef import (
+ Datum,
+ parse_cookie_attr_expires,
+ parse_cookie_attr_same_site,
+ parse_to_rpc_http_cookie_list,
+)
+from azure_functions_worker_v2.bindings.nullable_converters import (
+ to_nullable_bool,
+ to_nullable_double,
+ to_nullable_string,
+ to_nullable_timestamp,
+)
+
+
+class TestDatumDef(unittest.TestCase):
+ def test_parse_cookie_attr_expires_none(self):
+ self.assertEqual(parse_cookie_attr_expires({"expires": None}), None)
+
+ def test_parse_cookie_attr_expires_zero_length(self):
+ self.assertEqual(parse_cookie_attr_expires({"expires": ""}), None)
+
+ def test_parse_cookie_attr_expires_valid(self):
+ self.assertEqual(parse_cookie_attr_expires(
+ {"expires": "Thu, 12 Jan 2017 13:55:08 GMT"}),
+ datetime.strptime("Thu, 12 Jan 2017 13:55:08 GMT",
+ "%a, %d %b %Y %H:%M:%S GMT"))
+
+ def test_parse_cookie_attr_expires_value_error(self):
+ with self.assertRaises(ValueError):
+ parse_cookie_attr_expires(
+ {"expires": "Thu, 12 Jan 2017 13:550:08 GMT"})
+
+ def test_parse_cookie_attr_expires_overflow_error(self):
+ with self.assertRaises(ValueError):
+ parse_cookie_attr_expires(
+ {"expires": "Thu, 12 Jan 9999999999999999 13:55:08 GMT"})
+
+ def test_parse_cookie_attr_same_site_default(self):
+ self.assertEqual(parse_cookie_attr_same_site(
+ {}, protos),
+ getattr(protos.RpcHttpCookie.SameSite, "None"))
+
+ def test_parse_cookie_attr_same_site_lax(self):
+ self.assertEqual(parse_cookie_attr_same_site(
+ {'samesite': 'lax'}, protos),
+ getattr(protos.RpcHttpCookie.SameSite, "Lax"))
+
+ def test_parse_cookie_attr_same_site_strict(self):
+ self.assertEqual(parse_cookie_attr_same_site(
+ {'samesite': 'strict'}, protos),
+ getattr(protos.RpcHttpCookie.SameSite, "Strict"))
+
+ def test_parse_cookie_attr_same_site_explicit_none(self):
+ self.assertEqual(parse_cookie_attr_same_site(
+ {'samesite': 'none'}, protos),
+ getattr(protos.RpcHttpCookie.SameSite, "ExplicitNone"))
+
+ def test_parse_to_rpc_http_cookie_list_none(self):
+ self.assertEqual(parse_to_rpc_http_cookie_list(None, protos), None)
+
+ @unittest.skip("TODO: fix this test. Figure out what to do with Timestamp")
+ def test_parse_to_rpc_http_cookie_list_valid(self):
+ headers = [
+ 'foo=bar; Path=/some/path; Secure; HttpOnly; Domain=123; '
+ 'SameSite=Lax; Max-Age=12345; Expires=Thu, 12 Jan 2017 13:55:08 '
+ 'GMT;',
+ 'foo2=bar; Path=/some/path2; Secure; HttpOnly; Domain=123; '
+ 'SameSite=Lax; Max-Age=12345; Expires=Thu, 12 Jan 2017 13:55:08 '
+ 'GMT;']
+
+ cookies = SimpleCookie('\r\n'.join(headers))
+
+ cookie1 = protos.RpcHttpCookie(name="foo",
+ value="bar",
+ domain=to_nullable_string("123",
+ "cookie.domain",
+ protos),
+ path=to_nullable_string("/some/path",
+ "cookie.path",
+ protos),
+ expires=to_nullable_timestamp(
+ parse_cookie_attr_expires(
+ {
+ "expires": "Thu, "
+ "12 Jan 2017 13:55:08"
+ " GMT"}),
+ 'cookie.expires',
+ protos),
+ secure=to_nullable_bool(
+ bool("True"),
+ 'cookie.secure',
+ protos),
+ http_only=to_nullable_bool(
+ bool("True"),
+ 'cookie.httpOnly',
+ protos),
+ same_site=parse_cookie_attr_same_site(
+ {"samesite": "Lax"},
+ protos),
+ max_age=to_nullable_double(
+ 12345,
+ 'cookie.maxAge',
+ protos))
+
+ cookie2 = protos.RpcHttpCookie(name="foo2",
+ value="bar",
+ domain=to_nullable_string("123",
+ "cookie.domain",
+ protos),
+ path=to_nullable_string("/some/path2",
+ "cookie.path",
+ protos),
+ expires=to_nullable_timestamp(
+ parse_cookie_attr_expires(
+ {
+ "expires": "Thu, "
+ "12 Jan 2017 13:55:08"
+ " GMT"}),
+ 'cookie.expires',
+ protos),
+ secure=to_nullable_bool(
+ bool("True"),
+ 'cookie.secure',
+ protos),
+ http_only=to_nullable_bool(
+ bool("True"),
+ 'cookie.httpOnly',
+ protos),
+ same_site=parse_cookie_attr_same_site(
+ {"samesite": "Lax"},
+ protos),
+ max_age=to_nullable_double(
+ 12345,
+ 'cookie.maxAge',
+ protos))
+
+ rpc_cookies = parse_to_rpc_http_cookie_list([cookies], protos)
+ self.assertEqual(cookie1, rpc_cookies[0])
+ self.assertEqual(cookie2, rpc_cookies[1])
+
+ def test_parse_to_rpc_http_cookie_list_no_cookie(self):
+ datum = Datum(
+ type='http',
+ value=dict(
+ status_code=None,
+ headers=None,
+ body=None,
+ )
+ )
+
+ self.assertIsNone(
+ parse_to_rpc_http_cookie_list(datum.value.get('cookies'), protos))
diff --git a/azure_functions_worker_v2/tests/unittests/test_deferred_bindings.py b/azure_functions_worker_v2/tests/unittests/test_deferred_bindings.py
new file mode 100644
index 000000000..0bad522d6
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_deferred_bindings.py
@@ -0,0 +1,97 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import azure.functions as func
+import azurefunctions.extensions.base as clients
+import tests.protos as protos
+
+from azure_functions_worker_v2.bindings import datumdef, meta
+from tests.utils import testutils
+from tests.utils.mock_classes import MockMBD, MockCMBD
+
+
+from azurefunctions.extensions.bindings.blob import (BlobClient,
+ BlobClientConverter,
+ ContainerClient,
+ StorageStreamDownloader)
+from azurefunctions.extensions.bindings.eventhub import EventData, EventDataConverter
+
+EVENTHUB_SAMPLE_CONTENT = b"\x00Sr\xc1\x8e\x08\xa3\x1bx-opt-sequence-number-epochT\xff\xa3\x15x-opt-sequence-numberU\x04\xa3\x0cx-opt-offset\x81\x00\x00\x00\x01\x00\x00\x010\xa3\x13x-opt-enqueued-time\x00\xa3\x1dcom.microsoft:datetime-offset\x81\x08\xddW\x05\xc3Q\xcf\x10\x00St\xc1I\x02\xa1\rDiagnostic-Id\xa1700-bdc3fde4889b4e907e0c9dcb46ff8d92-21f637af293ef13b-00\x00Su\xa0\x08message1" # noqa: E501
+
+
+class TestDeferredBindingsEnabled(testutils.AsyncTestCase):
+
+ def test_mbd_deferred_bindings_enabled_decode(self):
+ binding = BlobClientConverter
+ pb = protos.ParameterBinding(name='test',
+ data=protos.TypedData(
+ string='test'))
+ sample_mbd = MockMBD(version="1.0",
+ source="AzureStorageBlobs",
+ content_type="application/json",
+ content="{\"Connection\":\"AzureWebJobsStorage\","
+ "\"ContainerName\":"
+ "\"python-worker-tests\","
+ "\"BlobName\":"
+ "\"test-blobclient-trigger.txt\"}")
+ datum = datumdef.Datum(value=sample_mbd, type='model_binding_data')
+
+ obj = meta.deferred_bindings_decode(binding=binding, pb=pb,
+ pytype=BlobClient, datum=datum, metadata={},
+ function_name="test_function")
+
+ self.assertIsNotNone(obj)
+
+ def test_cmbd_deferred_bindings_enabled_decode(self):
+ binding = EventDataConverter
+ pb = protos.ParameterBinding(name='test',
+ data=protos.TypedData(
+ string='test'))
+ sample_mbd = MockMBD(version="1.0",
+ source="AzureEventHubsEventData",
+ content_type="application/octet-stream",
+ content=EVENTHUB_SAMPLE_CONTENT)
+ sample_cmbd = MockCMBD(model_binding_data=[sample_mbd, sample_mbd])
+ datum = datumdef.Datum(value=sample_cmbd, type='collection_model_binding_data')
+
+ obj = meta.deferred_bindings_decode(binding=binding, pb=pb,
+ pytype=EventData, datum=datum, metadata={},
+ function_name="test_function")
+
+ self.assertIsNotNone(obj)
+
+ async def test_check_deferred_bindings_enabled(self):
+ """
+ check_deferred_bindings_enabled checks if deferred bindings is enabled at fx
+ and single binding level.
+
+ The first bool represents if deferred bindings is enabled at a fx level. This
+ means that at least one binding in the function is a deferred binding type.
+
+ The second represents if the current binding is deferred binding. If this is
+ True, then deferred bindings must also be enabled at the function level.
+ """
+ meta.DEFERRED_BINDING_REGISTRY = clients.get_binding_registry()
+
+ # Type is not supported, deferred_bindings_enabled is not yet set
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ func.InputStream, False), (False, False))
+
+ # Type is not supported, deferred_bindings_enabled already set
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ func.InputStream, True), (True, False))
+
+ # Type is supported, deferred_bindings_enabled is not yet set
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ BlobClient, False), (True, True))
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ ContainerClient, False), (True, True))
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ StorageStreamDownloader, False), (True, True))
+
+ # Type is supported, deferred_bindings_enabled is already set
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ BlobClient, True), (True, True))
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ ContainerClient, True), (True, True))
+ self.assertEqual(meta.check_deferred_bindings_enabled(
+ StorageStreamDownloader, True), (True, True))
diff --git a/azure_functions_worker_v2/tests/unittests/test_handle_event.py b/azure_functions_worker_v2/tests/unittests/test_handle_event.py
new file mode 100644
index 000000000..69406d107
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_handle_event.py
@@ -0,0 +1,213 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from unittest.mock import patch
+
+import azure_functions_worker_v2.handle_event as handle_event
+import tests.protos as test_protos
+
+from azure_functions_worker_v2.handle_event import (worker_init_request,
+ functions_metadata_request,
+ function_environment_reload_request)
+from tests.utils import testutils
+from tests.utils.constants import UNIT_TESTS_FOLDER
+from tests.utils.mock_classes import FunctionRequest, Request, WorkerRequest
+
+
+BASIC_FUNCTION_DIRECTORY = UNIT_TESTS_FOLDER / "basic_function"
+STREAMING_FUNCTION_DIRECTORY = UNIT_TESTS_FOLDER / "streaming_function"
+INDEXING_EXCEPTION_FUNCTION_DIRECTORY = (UNIT_TESTS_FOLDER
+ / "indexing_exception_function")
+
+
+class TestHandleEvent(testutils.AsyncTestCase):
+ @patch("azure_functions_worker_v2.handle_event"
+ ".otel_manager.get_azure_monitor_available",
+ return_value=False)
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ async def test_worker_init_request(self, mock_load_function_metadata,
+ mock_get_azure_monitor_available):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ BASIC_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await worker_init_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual(result.capabilities, {'WorkerStatus': 'true',
+ 'RpcHttpBodyOnly': 'true',
+ 'SharedMemoryDataTransfer': 'true',
+ 'RpcHttpTriggerMetadataRemoved': 'true',
+ 'RawHttpBodyBytes': 'true',
+ 'TypedDataCollection': 'true'})
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ @patch("azure_functions_worker_v2.handle_event.HttpV2Registry.http_v2_enabled",
+ return_value=True)
+ @patch("azure_functions_worker_v2.handle_event.initialize_http_server",
+ return_value="http://mock_address")
+ async def test_worker_init_request_with_streaming(self,
+ mock_http_v2_enabled,
+ mock_initialize_http_server,
+ mock_load_function_metadata):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ STREAMING_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await worker_init_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual('http://mock_address', result.capabilities["HttpUri"])
+ self.assertEqual('true', result.capabilities["RequiresRouteParameters"])
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ @patch("azure_functions_worker_v2.handle_event"
+ ".otel_manager.get_azure_monitor_available",
+ return_value=True)
+ async def test_worker_init_request_with_otel(self,
+ mock_otel_enabled,
+ mock_load_function_metadata):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ BASIC_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await worker_init_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual('true', result.capabilities["WorkerOpenTelemetryEnabled"])
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event"
+ ".otel_manager.get_azure_monitor_available",
+ return_value=False)
+ async def test_worker_init_request_with_exception(self,
+ mock_otel_enabled):
+ # If an exception happens during indexing, we return failure
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ INDEXING_EXCEPTION_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await worker_init_request(worker_request)
+ self.assertEqual(result.capabilities, {'WorkerStatus': 'true',
+ 'RpcHttpBodyOnly': 'true',
+ 'SharedMemoryDataTransfer': 'true',
+ 'RpcHttpTriggerMetadataRemoved': 'true',
+ 'RawHttpBodyBytes': 'true',
+ 'TypedDataCollection': 'true'})
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 0)
+
+ async def test_functions_metadata_request(self):
+ # We always succeed in metadata request - exceptions are raised
+ # in init
+ handle_event.protos = test_protos
+ metadata_result = await functions_metadata_request(None)
+ self.assertEqual(metadata_result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event"
+ ".otel_manager.get_azure_monitor_available",
+ return_value=False)
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ async def test_function_environment_reload_request(
+ self,
+ mock_load_function_metadata,
+ mock_get_azure_monitor_available):
+ worker_request = WorkerRequest(name='function_environment_reload_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ BASIC_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ handle_event.protos = test_protos
+ result = await function_environment_reload_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual(result.capabilities, {})
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ @patch("azure_functions_worker_v2.handle_event.HttpV2Registry.http_v2_enabled",
+ return_value=True)
+ @patch("azure_functions_worker_v2.handle_event.initialize_http_server",
+ return_value="http://mock_address")
+ async def test_function_environment_reload_request_with_streaming(
+ self,
+ mock_http_v2_enabled,
+ mock_initialize_http_server,
+ mock_load_function_metadata):
+ handle_event.protos = test_protos
+ worker_request = WorkerRequest(name='function_environment_reload_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ STREAMING_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await function_environment_reload_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual('http://mock_address', result.capabilities["HttpUri"])
+ self.assertEqual('true', result.capabilities["RequiresRouteParameters"])
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ @patch("azure_functions_worker_v2.handle_event.load_function_metadata")
+ @patch("azure_functions_worker_v2.handle_event"
+ ".otel_manager.get_azure_monitor_available",
+ return_value=True)
+ async def test_function_environment_reload_request_with_otel(
+ self,
+ mock_otel_enabled,
+ mock_load_function_metadata):
+ handle_event.protos = test_protos
+ worker_request = WorkerRequest(name='function_environment_reload_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ BASIC_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await function_environment_reload_request(worker_request)
+ mock_load_function_metadata.assert_called_once()
+ self.assertEqual('true', result.capabilities["WorkerOpenTelemetryEnabled"])
+ self.assertEqual(result.worker_metadata.runtime_name, "python")
+ self.assertIsNotNone(result.worker_metadata.runtime_version)
+ self.assertIsNotNone(result.worker_metadata.worker_version)
+ self.assertIsNotNone(result.worker_metadata.worker_bitness)
+ self.assertEqual(result.result.status, 1)
+
+ async def test_function_environment_reload_request_with_exception(self):
+ # If an exception happens during indexing, the worker reports failure
+ handle_event.protos = test_protos
+ worker_request = WorkerRequest(name='function_environment_reload_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ INDEXING_EXCEPTION_FUNCTION_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': test_protos})
+ result = await function_environment_reload_request(worker_request)
+ self.assertEqual(result.result.status, 0)
diff --git a/azure_functions_worker_v2/tests/unittests/test_http_v2.py b/azure_functions_worker_v2/tests/unittests/test_http_v2.py
new file mode 100644
index 000000000..b21557394
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_http_v2.py
@@ -0,0 +1,245 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import asyncio
+import socket
+import unittest
+
+from azure_functions_worker_v2.http_v2 import (
+ AsyncContextReference,
+ SingletonMeta,
+ get_unused_tcp_port,
+ http_coordinator,
+)
+from tests.utils.mock_classes import MockHttpResponse, MockHttpRequest
+from unittest.mock import MagicMock, patch
+
+
+class TestHttpCoordinator(unittest.TestCase):
+ def setUp(self):
+ self.invoc_id = "test_invocation"
+ self.http_request = MockHttpRequest()
+ self.http_response = MockHttpResponse()
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ def tearDown(self) -> None:
+ http_coordinator._context_references.clear()
+ self.loop.close()
+
+ def test_set_http_request_new_invocation(self):
+ # Test setting a new HTTP request
+ http_coordinator.set_http_request(self.invoc_id, self.http_request)
+ context_ref = http_coordinator._context_references.get(self.invoc_id)
+ self.assertIsNotNone(context_ref)
+ self.assertEqual(context_ref.http_request, self.http_request)
+
+ def test_set_http_request_existing_invocation(self):
+ # Test updating an existing HTTP request
+ new_http_request = MagicMock()
+ http_coordinator.set_http_request(self.invoc_id, new_http_request)
+ context_ref = http_coordinator._context_references.get(self.invoc_id)
+ self.assertIsNotNone(context_ref)
+ self.assertEqual(context_ref.http_request, new_http_request)
+
+ def test_set_http_response_context_ref_null(self):
+ with self.assertRaises(Exception) as cm:
+ http_coordinator.set_http_response(self.invoc_id,
+ self.http_response)
+ self.assertEqual(cm.exception.args[0],
+ "No context reference found for invocation "
+ f"{self.invoc_id}")
+
+ def test_set_http_response(self):
+ http_coordinator.set_http_request(self.invoc_id, self.http_request)
+ http_coordinator.set_http_response(self.invoc_id, self.http_response)
+ context_ref = http_coordinator._context_references[self.invoc_id]
+ self.assertEqual(context_ref.http_response, self.http_response)
+
+ def test_get_http_request_async_existing_invocation(self):
+ # Test retrieving an existing HTTP request
+ http_coordinator.set_http_request(self.invoc_id,
+ self.http_request)
+ retrieved_request = self.loop.run_until_complete(
+ http_coordinator.get_http_request_async(self.invoc_id))
+ self.assertEqual(retrieved_request, self.http_request)
+
+ def test_get_http_request_async_wait_forever(self):
+ # Test handling error when invoc_id is not found
+ invalid_invoc_id = "invalid_invocation"
+
+ with self.assertRaises(asyncio.TimeoutError):
+ self.loop.run_until_complete(
+ asyncio.wait_for(
+ http_coordinator.get_http_request_async(
+ invalid_invoc_id),
+ timeout=1
+ )
+ )
+
+ def test_await_http_response_async_valid_invocation(self):
+ invoc_id = "valid_invocation"
+ expected_response = self.http_response
+
+ context_ref = AsyncContextReference(http_response=expected_response)
+
+ # Add the mock context reference to the coordinator
+ http_coordinator._context_references[invoc_id] = context_ref
+
+ http_coordinator.set_http_response(invoc_id, expected_response)
+
+ # Call the method and verify the returned response
+ response = self.loop.run_until_complete(
+ http_coordinator.await_http_response_async(invoc_id))
+ self.assertEqual(response, expected_response)
+ self.assertTrue(
+ http_coordinator._context_references.get(
+ invoc_id).http_response is None)
+
+ def test_await_http_response_async_invalid_invocation(self):
+ # Test handling error when invoc_id is not found
+ invalid_invoc_id = "invalid_invocation"
+ with self.assertRaises(Exception) as context:
+ self.loop.run_until_complete(
+ http_coordinator.await_http_response_async(invalid_invoc_id))
+ self.assertEqual(str(context.exception),
+ f"'No context reference found for invocation "
+ f"{invalid_invoc_id}'")
+
+ def test_await_http_response_async_response_not_set(self):
+ invoc_id = "invocation_with_no_response"
+ # Set up a mock context reference without setting the response
+ context_ref = AsyncContextReference()
+
+ # Add the mock context reference to the coordinator
+ http_coordinator._context_references[invoc_id] = context_ref
+
+ http_coordinator.set_http_response(invoc_id, None)
+ # Call the method and verify that it raises an exception
+ with self.assertRaises(Exception) as context:
+ self.loop.run_until_complete(
+ http_coordinator.await_http_response_async(invoc_id))
+ self.assertEqual(str(context.exception),
+ f"No http response found for invocation {invoc_id}")
+
+
+class TestAsyncContextReference(unittest.TestCase):
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ def tearDown(self) -> None:
+ self.loop.close()
+
+ def test_init(self):
+ ref = AsyncContextReference()
+ self.assertIsInstance(ref, AsyncContextReference)
+ self.assertTrue(ref.is_async)
+
+ def test_http_request_property(self):
+ ref = AsyncContextReference()
+ ref.http_request = object()
+ self.assertIsNotNone(ref.http_request)
+
+ def test_http_response_property(self):
+ ref = AsyncContextReference()
+ ref.http_response = object()
+ self.assertIsNotNone(ref.http_response)
+
+ def test_function_property(self):
+ ref = AsyncContextReference()
+ ref.function = object()
+ self.assertIsNotNone(ref.function)
+
+ def test_fi_context_property(self):
+ ref = AsyncContextReference()
+ ref.fi_context = object()
+ self.assertIsNotNone(ref.fi_context)
+
+ def test_http_trigger_param_name_property(self):
+ ref = AsyncContextReference()
+ ref.http_trigger_param_name = object()
+ self.assertIsNotNone(ref.http_trigger_param_name)
+
+ def test_args_property(self):
+ ref = AsyncContextReference()
+ ref.args = object()
+ self.assertIsNotNone(ref.args)
+
+ def test_http_request_available_event_property(self):
+ ref = AsyncContextReference()
+ self.assertIsNotNone(ref.http_request_available_event)
+
+ def test_http_response_available_event_property(self):
+ ref = AsyncContextReference()
+ self.assertIsNotNone(ref.http_response_available_event)
+
+ def test_full_args(self):
+ ref = AsyncContextReference(http_request=object(),
+ http_response=object(),
+ function=object(),
+ fi_context=object(),
+ args=object())
+ self.assertIsNotNone(ref.http_request)
+ self.assertIsNotNone(ref.http_response)
+ self.assertIsNotNone(ref.function)
+ self.assertIsNotNone(ref.fi_context)
+ self.assertIsNotNone(ref.args)
+
+
+class TestSingletonMeta(unittest.TestCase):
+
+ def test_singleton_instance(self):
+ class TestClass(metaclass=SingletonMeta):
+ pass
+
+ obj1 = TestClass()
+ obj2 = TestClass()
+
+ self.assertIs(obj1, obj2)
+
+ def test_singleton_with_arguments(self):
+ class TestClass(metaclass=SingletonMeta):
+ def __init__(self, arg):
+ self.arg = arg
+
+ obj1 = TestClass(1)
+ obj2 = TestClass(2)
+
+ self.assertEqual(obj1.arg, 1)
+ self.assertEqual(obj2.arg,
+ 1) # Should still refer to the same instance
+
+ def test_singleton_with_kwargs(self):
+ class TestClass(metaclass=SingletonMeta):
+ def __init__(self, **kwargs):
+ self.kwargs = kwargs
+
+ obj1 = TestClass(a=1)
+ obj2 = TestClass(b=2)
+
+ self.assertEqual(obj1.kwargs, {'a': 1})
+ self.assertEqual(obj2.kwargs,
+ {'a': 1}) # Should still refer to the same instance
+
+
+class TestGetUnusedTCPPort(unittest.TestCase):
+
+ @patch('socket.socket')
+ def test_get_unused_tcp_port(self, mock_socket):
+ # Mock the socket object and its methods
+ mock_socket_instance = mock_socket.return_value
+ mock_socket_instance.getsockname.return_value = ('localhost', 12345)
+
+ # Call the function
+ port = get_unused_tcp_port()
+
+ # Assert that socket.socket was called with the correct arguments
+ mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
+
+ # Assert that bind and close methods were called on the socket instance
+ mock_socket_instance.bind.assert_called_once_with(('', 0))
+ mock_socket_instance.close.assert_called_once()
+
+ # Assert that the returned port matches the expected value
+ self.assertEqual(port, 12345)
diff --git a/azure_functions_worker_v2/tests/unittests/test_logging.py b/azure_functions_worker_v2/tests/unittests/test_logging.py
new file mode 100644
index 000000000..85dc5ca69
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_logging.py
@@ -0,0 +1,32 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import unittest
+
+from azure_functions_worker_v2.logging import format_exception
+
+
+class TestLogging(unittest.TestCase):
+
+ def test_format_exception(self):
+ def call0(fn):
+ call1(fn)
+
+ def call1(fn):
+ call2(fn)
+
+ def call2(fn):
+ fn()
+
+ def raising_function():
+ raise ValueError("Value error being raised.", )
+
+ try:
+ call0(raising_function)
+ except ValueError as e:
+ processed_exception = format_exception(e)
+ self.assertIn("call0", processed_exception)
+ self.assertIn("call1", processed_exception)
+ self.assertIn("call2", processed_exception)
+ self.assertIn("f", processed_exception)
+ self.assertRegex(processed_exception,
+ r".*tests\\unittests\\test_logging.py.*")
diff --git a/azure_functions_worker_v2/tests/unittests/test_nullable_converters.py b/azure_functions_worker_v2/tests/unittests/test_nullable_converters.py
new file mode 100644
index 000000000..770355401
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_nullable_converters.py
@@ -0,0 +1,112 @@
+import datetime
+import pytest
+import unittest
+
+from google.protobuf.timestamp_pb2 import Timestamp
+
+import tests.protos as protos
+from azure_functions_worker_v2.bindings.nullable_converters import (
+ to_nullable_bool,
+ to_nullable_double,
+ to_nullable_string,
+ to_nullable_timestamp,
+)
+
+try:
+ from http.cookies import SimpleCookie
+except ImportError:
+ from Cookie import SimpleCookie
+
+headers = ['foo=bar; Path=/some/path; Secure',
+ 'foo2=42; Domain=123; Expires=Thu, 12-Jan-2017 13:55:08 GMT; '
+ 'Path=/; Max-Age=dd;']
+
+cookies = SimpleCookie('\r\n'.join(headers))
+
+
+class TestNullableConverters(unittest.TestCase):
+ def test_to_nullable_string_none(self):
+ self.assertEqual(to_nullable_string(None, "name", protos), None)
+
+ def test_to_nullable_string_valid(self):
+ self.assertEqual(to_nullable_string("dummy", "name", protos),
+ protos.NullableString(value="dummy"))
+
+ def test_to_nullable_string_wrong_type(self):
+ with pytest.raises(Exception) as e:
+ self.assertEqual(to_nullable_string(123, "name", protos),
+ protos.NullableString(value="dummy"))
+ self.assertEqual(type(e), TypeError)
+
+ def test_to_nullable_bool_none(self):
+ self.assertEqual(to_nullable_bool(None, "name", protos), None)
+
+ def test_to_nullable_bool_valid(self):
+ self.assertEqual(to_nullable_bool(True, "name", protos),
+ protos.NullableBool(value=True))
+
+ def test_to_nullable_bool_wrong_type(self):
+ with pytest.raises(Exception) as e:
+ to_nullable_bool("True", "name", protos)
+
+ self.assertEqual(e.type, TypeError)
+ self.assertEqual(e.value.args[0],
+ "A 'bool' type was expected instead of a '' type. "
+ "Cannot parse value True of 'name'.")
+
+ def test_to_nullable_double_str(self):
+ self.assertEqual(to_nullable_double("12", "name", protos),
+ protos.NullableDouble(value=12))
+
+ def test_to_nullable_double_empty_str(self):
+ self.assertEqual(to_nullable_double("", "name", protos), None)
+
+ def test_to_nullable_double_invalid_str(self):
+ with pytest.raises(TypeError) as e:
+ to_nullable_double("222d", "name", protos)
+
+ self.assertEqual(e.type, TypeError)
+ self.assertEqual(e.value.args[0],
+ "Cannot parse value 222d of 'name' to float.")
+
+ def test_to_nullable_double_int(self):
+ self.assertEqual(to_nullable_double(12, "name", protos),
+ protos.NullableDouble(value=12))
+
+ def test_to_nullable_double_float(self):
+ self.assertEqual(to_nullable_double(12.0, "name", protos),
+ protos.NullableDouble(value=12))
+
+ def test_to_nullable_double_none(self):
+ self.assertEqual(to_nullable_double(None, "name", protos), None)
+
+ def test_to_nullable_double_wrong_type(self):
+ with pytest.raises(Exception) as e:
+ to_nullable_double(object(), "name", protos)
+
+ self.assertIn(
+ "A 'int' or 'float' type was expected instead of a '' type",
+ e.value.args[0])
+ self.assertEqual(e.type, TypeError)
+
+ @unittest.skip("TODO: fix this test. Figure out what to do with Timestamp")
+ def test_to_nullable_timestamp_int(self):
+ self.assertEqual(to_nullable_timestamp(1000, "datetime", protos),
+ protos.NullableTimestamp(
+ value=Timestamp(seconds=int(1000))))
+
+ @unittest.skip("TODO: fix this test. Figure out what to do with Timestamp")
+ def test_to_nullable_timestamp_datetime(self):
+ now = datetime.datetime.now()
+ self.assertEqual(to_nullable_timestamp(now, "datetime", protos),
+ protos.NullableTimestamp(
+ value=Timestamp(seconds=int(now.timestamp()))))
+
+ def test_to_nullable_timestamp_wrong_type(self):
+ with self.assertRaises(TypeError):
+ to_nullable_timestamp("now", "datetime", protos)
+
+ def test_to_nullable_timestamp_none(self):
+ self.assertEqual(to_nullable_timestamp(None, "timestamp", protos), None)
diff --git a/azure_functions_worker_v2/tests/unittests/test_opentelemetry.py b/azure_functions_worker_v2/tests/unittests/test_opentelemetry.py
new file mode 100644
index 000000000..ffc06fe0c
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_opentelemetry.py
@@ -0,0 +1,209 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+import os
+import unittest
+
+import tests.protos as protos
+
+from azure_functions_worker_v2.handle_event import otel_manager, worker_init_request
+from azure_functions_worker_v2.otel import (initialize_azure_monitor,
+ update_opentelemetry_status)
+from azure_functions_worker_v2.logging import logger
+from tests.utils.constants import UNIT_TESTS_FOLDER
+from tests.utils.mock_classes import FunctionRequest, Request, WorkerRequest
+from unittest.mock import MagicMock, patch
+
+
+FUNCTION_APP_DIRECTORY = UNIT_TESTS_FOLDER / 'basic_functions'
+
+
+class TestOpenTelemetry(unittest.TestCase):
+
+ def test_update_opentelemetry_status_import_error(self):
+ with patch.dict('sys.modules', {
+ 'opentelemetry': None,
+ 'opentelemetry.context': None,
+ 'opentelemetry.trace': None,
+ 'opentelemetry.trace.propagation': None,
+ 'opentelemetry.trace.propagation.tracecontext': None,
+ }):
+ # Verify that context variables are None due to ImportError
+ with self.assertLogs(logger.name, 'ERROR') as cm:
+ update_opentelemetry_status()
+ self.assertTrue(
+ any("Cannot import OpenTelemetry libraries."
+ in message for message in cm.output)
+ )
+
+ @patch('builtins.__import__')
+ def test_update_opentelemetry_status_success(
+ self, mock_imports):
+ mock_imports.return_value = MagicMock()
+ update_opentelemetry_status()
+ self.assertIsNotNone(otel_manager.get_context_api())
+ self.assertIsNotNone(otel_manager.get_trace_context_propagator())
+
+ @patch('builtins.__import__')
+ @patch("azure_functions_worker_v2.otel.update_opentelemetry_status")
+ def test_initialize_azure_monitor_success(
+ self,
+ mock_update_ot,
+ mock_imports,
+ ):
+ mock_imports.return_value = MagicMock()
+ initialize_azure_monitor()
+ mock_update_ot.assert_called_once()
+ self.assertTrue(otel_manager.get_azure_monitor_available())
+
+ @patch("azure_functions_worker_v2.otel.update_opentelemetry_status")
+ def test_initialize_azure_monitor_import_error(
+ self,
+ mock_update_ot,
+ ):
+ with patch('builtins.__import__', side_effect=ImportError):
+ initialize_azure_monitor()
+ mock_update_ot.assert_called_once()
+ # Verify that azure_monitor_available is set to False due to ImportError
+ self.assertFalse(otel_manager.get_azure_monitor_available())
+
+ @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'true'})
+ @patch('builtins.__import__')
+ async def test_init_request_initialize_azure_monitor_enabled_app_setting(
+ self,
+ mock_imports,
+ ):
+ mock_imports.return_value = MagicMock()
+
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Verify azure_monitor_available is set to True
+ self.assertTrue(otel_manager.get_azure_monitor_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE
+ capabilities = init_response.capabilities
+ self.assertIn("WorkerOpenTelemetryEnabled", capabilities)
+ self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true")
+
+ @patch("azure_functions_worker_v2.handle_event."
+ "otel_manager.initialize_azure_monitor")
+ async def test_init_request_initialize_azure_monitor_default_app_setting(
+ self,
+ mock_initialize_azmon,
+ ):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Azure monitor initialized not called
+ # Since default behavior is not enabled
+ mock_initialize_azmon.assert_not_called()
+
+ # Verify azure_monitor_available is set to False
+ self.assertFalse(otel_manager.get_azure_monitor_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is not set
+ capabilities = init_response.capabilities
+ self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities)
+
+ @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'})
+ @patch("azure_functions_worker_v2.otel_manager.initialize_azure_monitor")
+ async def test_init_request_initialize_azure_monitor_disabled_app_setting(
+ self,
+ mock_initialize_azmon,
+ ):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Azure monitor initialized not called
+ mock_initialize_azmon.assert_not_called()
+
+ # Verify azure_monitor_available is set to False
+ self.assertFalse(otel_manager.get_azure_monitor_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is not set
+ capabilities = init_response.capabilities
+ self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities)
+
+ @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'})
+ async def test_init_request_enable_opentelemetry_enabled_app_setting(
+ self,
+ ):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Verify otel_libs_available is set to True
+ self.assertTrue(otel_manager.get_azure_monitor_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE
+ capabilities = init_response.capabilities
+ self.assertIn("WorkerOpenTelemetryEnabled", capabilities)
+ self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true")
+
+ @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'false'})
+ async def test_init_request_enable_opentelemetry_default_app_setting(
+ self,
+ ):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Verify otel_libs_available is set to False by default
+ self.assertFalse(otel_manager.get_otel_libs_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is not set
+ capabilities = init_response.capabilities
+ self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities)
+
+ @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'})
+ async def test_init_request_enable_azure_monitor_disabled_app_setting(
+ self,
+ ):
+ worker_request = WorkerRequest(name='worker_init_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ FUNCTION_APP_DIRECTORY)),
+ properties={'host': '123',
+ 'protos': protos})
+ init_response = await worker_init_request(worker_request)
+
+ self.assertEqual(init_response.result.status,
+ protos.StatusResult.Success)
+
+ # Verify otel_libs_available is set to False by default
+ self.assertFalse(otel_manager.get_azure_monitor_available())
+ # Verify that WorkerOpenTelemetryEnabled capability is not set
+ capabilities = init_response.capabilities
+ self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities)
diff --git a/azure_functions_worker_v2/tests/unittests/test_rpc_messages.py b/azure_functions_worker_v2/tests/unittests/test_rpc_messages.py
new file mode 100644
index 000000000..9cae4ecce
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_rpc_messages.py
@@ -0,0 +1,124 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+import subprocess
+import sys
+import tempfile
+import typing
+import unittest
+
+import tests.protos as protos
+
+from azure_functions_worker_v2.handle_event import function_environment_reload_request
+from tests.utils import testutils
+from tests.utils.constants import UNIT_TESTS_FOLDER
+from tests.utils.mock_classes import FunctionRequest, Request, WorkerRequest
+
+BASIC_FUNCTION_DIRECTORY = UNIT_TESTS_FOLDER / 'basic_function'
+
+
+class TestGRPC(testutils.AsyncTestCase):
+ pre_test_env = os.environ.copy()
+ pre_test_cwd = os.getcwd()
+
+ def _reset_environ(self):
+ for key, value in self.pre_test_env.items():
+ os.environ[key] = value
+ os.chdir(self.pre_test_cwd)
+
+ async def _verify_environment_reloaded(
+ self,
+ test_env: typing.Dict[str, str] = {},
+ test_cwd: str = os.getcwd()):
+ worker_request = WorkerRequest(name='function_environment_reload_request',
+ request=Request(FunctionRequest(
+ 'hello',
+ test_cwd,
+ test_env)),
+ properties={'host': '123',
+ 'protos': protos})
+ result = await function_environment_reload_request(worker_request)
+
+ status = result.result.status
+ exp = result.result.exception
+ self.assertEqual(status, protos.StatusResult.Success,
+ f"Exception in Reload request: {exp}")
+
+ environ_dict = os.environ.copy()
+ self.assertTrue(test_env.items() <= environ_dict.items())
+ self.assertEqual(os.getcwd(), test_cwd)
+
+ self._reset_environ()
+
+ async def test_multiple_env_vars_load(self):
+ test_env = {'TEST_KEY': 'foo', 'HELLO': 'world'}
+ await self._verify_environment_reloaded(test_env=test_env)
+
+ async def test_empty_env_vars_load(self):
+ test_env = {}
+ await self._verify_environment_reloaded(test_env=test_env)
+
+ @unittest.skipIf(sys.platform == 'darwin',
+ 'MacOS creates the processes specific var folder in '
+ '/private filesystem and not in /var like in linux '
+ 'systems.')
+ async def test_changing_current_working_directory(self):
+ test_cwd = tempfile.gettempdir()
+ await self._verify_environment_reloaded(test_cwd=test_cwd)
+
+ @unittest.skipIf(sys.platform == 'darwin',
+ 'MacOS creates the processes specific var folder in '
+ '/private filesystem and not in /var like in linux '
+ 'systems.')
+ async def test_reload_env_message(self):
+ test_env = {'TEST_KEY': 'foo', 'HELLO': 'world'}
+ test_cwd = tempfile.gettempdir()
+ await self._verify_environment_reloaded(test_env, test_cwd)
+
+ def _verify_sys_path_import(self, result, expected_output):
+ path_import_script = os.path.join(UNIT_TESTS_FOLDER,
+ 'path_import', 'test_path_import.sh')
+ try:
+ subprocess.run(['chmod +x ' + path_import_script], shell=True)
+
+ exported_path = ":".join(sys.path)
+ output = subprocess.check_output(
+ [path_import_script, result, exported_path],
+ stderr=subprocess.STDOUT)
+ decoded_output = output.decode(sys.stdout.encoding).strip()
+ self.assertTrue(expected_output in decoded_output)
+ finally:
+ subprocess.run(['chmod -x ' + path_import_script], shell=True)
+ self._reset_environ()
+
+ @unittest.skipIf(sys.platform == 'win32',
+ 'Linux .sh script only works on Linux')
+ def test_failed_sys_path_import(self):
+ self._verify_sys_path_import(
+ 'fail',
+ "No module named 'test_module'")
+
+ @unittest.skipIf(sys.platform == 'win32',
+ 'Linux .sh script only works on Linux')
+ def test_successful_sys_path_import(self):
+ self._verify_sys_path_import(
+ 'success',
+ 'This module was imported!')
+
+ def _verify_azure_namespace_import(self, result, expected_output):
+ print(os.getcwd())
+ path_import_script = os.path.join(UNIT_TESTS_FOLDER,
+ 'azure_namespace_import',
+ 'test_azure_namespace_import.sh')
+ try:
+ subprocess.run(['chmod +x ' + path_import_script], shell=True)
+
+ output = subprocess.check_output(
+ [path_import_script, result],
+ stderr=subprocess.STDOUT)
+ decoded_output = output.decode(sys.stdout.encoding).strip()
+ self.assertTrue(expected_output in decoded_output,
+ f"Decoded Output: {decoded_output}") # DNM
+ finally:
+ subprocess.run(['chmod -x ' + path_import_script], shell=True)
+ self._reset_environ()
diff --git a/azure_functions_worker_v2/tests/unittests/test_types.py b/azure_functions_worker_v2/tests/unittests/test_types.py
new file mode 100644
index 000000000..6438a0ee6
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_types.py
@@ -0,0 +1,211 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import unittest
+
+import tests.protos as protos
+
+from azure import functions as azf
+from azure.functions import http as bind_http
+from azure.functions import meta as bind_meta
+from azure_functions_worker_v2.bindings import datumdef
+from tests.utils.mock_classes import MockMBD, MockCMBD
+
+
+class TestFunctions(unittest.TestCase):
+
+ def test_http_request_bytes(self):
+ r = bind_http.HttpRequest(
+ 'get',
+ 'http://example.com/abc?a=1',
+ headers=dict(aaa='zzz', bAb='xYz'),
+ params=dict(a='b'),
+ route_params={'route': 'param'},
+ body_type='bytes',
+ body=b'abc')
+
+ self.assertEqual(r.method, 'GET')
+ self.assertEqual(r.url, 'http://example.com/abc?a=1')
+ self.assertEqual(r.params, {'a': 'b'})
+ self.assertEqual(r.route_params, {'route': 'param'})
+
+ with self.assertRaises(TypeError):
+ r.params['a'] = 'z'
+
+ self.assertEqual(r.get_body(), b'abc')
+
+ with self.assertRaisesRegex(ValueError, 'does not contain valid JSON'):
+ r.get_json()
+
+ h = r.headers
+ with self.assertRaises(AttributeError):
+ r.headers = dict()
+
+ self.assertEqual(h['aaa'], 'zzz')
+ self.assertEqual(h['aaA'], 'zzz')
+ self.assertEqual(h['bab'], 'xYz')
+ self.assertEqual(h['BaB'], 'xYz')
+
+ # test that request headers are read-only
+ with self.assertRaises(TypeError):
+ h['zzz'] = '123'
+
+ def test_http_request_json(self):
+ r = bind_http.HttpRequest(
+ 'POST',
+ 'http://example.com/abc?a=1',
+ headers={},
+ params={},
+ route_params={},
+ body_type='json',
+ body='{"a":1}')
+
+ self.assertEqual(r.method, 'POST')
+ self.assertEqual(r.url, 'http://example.com/abc?a=1')
+ self.assertEqual(r.params, {})
+ self.assertEqual(r.route_params, {})
+
+ self.assertEqual(r.get_body(), b'{"a":1}')
+ self.assertEqual(r.get_json(), {'a': 1})
+
+ def test_http_response(self):
+ r = azf.HttpResponse(
+ 'body™',
+ status_code=201,
+ headers=dict(aaa='zzz', bAb='xYz'))
+
+ self.assertEqual(r.status_code, 201)
+ self.assertEqual(r.get_body(), b'body\xe2\x84\xa2')
+
+ self.assertEqual(r.mimetype, 'text/plain')
+ self.assertEqual(r.charset, 'utf-8')
+
+ h = r.headers
+ with self.assertRaises(AttributeError):
+ r.headers = dict()
+
+ self.assertEqual(h['aaa'], 'zzz')
+ self.assertEqual(h['aaA'], 'zzz')
+ self.assertEqual(h['bab'], 'xYz')
+ self.assertEqual(h['BaB'], 'xYz')
+
+ # test that response headers are mutable
+ h['zZz'] = '123'
+ self.assertEqual(h['zzz'], '123')
+
+
+class Converter(bind_meta.InConverter, binding='foo'):
+ pass
+
+
+class TestTriggerMetadataDecoder(unittest.TestCase):
+
+ def test_scalar_typed_data_decoder_ok(self):
+ metadata = {
+ 'int_as_json': bind_meta.Datum(type='json', value='1'),
+ 'int_as_string': bind_meta.Datum(type='string', value='1'),
+ 'int_as_int': bind_meta.Datum(type='int', value=1),
+ 'string_as_json': bind_meta.Datum(type='json', value='"aaa"'),
+ 'string_as_string': bind_meta.Datum(type='string', value='aaa'),
+ 'dict_as_json': bind_meta.Datum(type='json', value='{"foo":"bar"}')
+ }
+
+ cases = [
+ ('int_as_json', int, 1),
+ ('int_as_string', int, 1),
+ ('int_as_int', int, 1),
+ ('string_as_json', str, 'aaa'),
+ ('string_as_string', str, 'aaa'),
+ ('dict_as_json', dict, {'foo': 'bar'}),
+ ]
+
+ for field, pytype, expected in cases:
+ with self.subTest(field=field):
+ value = Converter._decode_trigger_metadata_field(
+ metadata, field, python_type=pytype)
+
+ self.assertIsInstance(value, pytype)
+ self.assertEqual(value, expected)
+
+ def test_scalar_typed_data_decoder_not_ok(self):
+ metadata = {
+ 'unsupported_type':
+ bind_meta.Datum(type='bytes', value=b'aaa'),
+ 'unexpected_json':
+ bind_meta.Datum(type='json', value='[1, 2, 3]'),
+ 'unexpected_data':
+ bind_meta.Datum(type='json', value='"foo"'),
+ }
+
+ cases = [
+ (
+ 'unsupported_type', int, ValueError,
+ "unsupported type of field 'unsupported_type' in "
+ "trigger metadata: bytes"
+ ),
+ (
+ 'unexpected_json', int, ValueError,
+ "cannot convert value of field 'unexpected_json' in "
+ "trigger metadata into int"
+ ),
+ (
+ 'unexpected_data', int, ValueError,
+ "cannot convert value of field "
+ "'unexpected_data' in trigger metadata into int: "
+ "invalid literal for int"
+ ),
+ (
+ 'unexpected_data', (int, float), ValueError,
+ "unexpected value type in field "
+ "'unexpected_data' in trigger metadata: str, "
+ "expected one of: int, float"
+ ),
+ ]
+
+ for field, pytype, exc, msg in cases:
+ with self.subTest(field=field):
+ with self.assertRaisesRegex(exc, msg):
+ Converter._decode_trigger_metadata_field(
+ metadata, field, python_type=pytype)
+
+ def test_model_binding_data_datum_ok(self):
+ sample_mbd = MockMBD(version="1.0",
+ source="AzureStorageBlobs",
+ content_type="application/json",
+ content="{\"Connection\":\"python-worker-tests\","
+ "\"ContainerName\":\"test-blob\","
+ "\"BlobName\":\"test.txt\"}")
+
+ datum: bind_meta.Datum = bind_meta.Datum(value=sample_mbd,
+ type='model_binding_data')
+
+ self.assertEqual(datum.value, sample_mbd)
+ self.assertEqual(datum.type, "model_binding_data")
+
+ def test_model_binding_data_td_ok(self):
+ mock_mbd = protos.TypedData(model_binding_data={'version': '1.0'})
+ mbd_datum = datumdef.Datum.from_typed_data(mock_mbd)
+
+ self.assertEqual(mbd_datum.type, 'model_binding_data')
+
+ def test_collection_model_binding_data_datum_ok(self):
+ sample_mbd = MockMBD(version="1.0",
+ source="AzureStorageBlobs",
+ content_type="application/json",
+ content="{\"Connection\":\"python-worker-tests\","
+ "\"ContainerName\":\"test-blob\","
+ "\"BlobName\":\"test.txt\"}")
+ sample_cmbd = MockCMBD(model_binding_data=[sample_mbd, sample_mbd])
+
+ datum: bind_meta.Datum = bind_meta.Datum(value=sample_cmbd,
+ type='collection_model_binding_data')
+
+ self.assertEqual(datum.value, sample_cmbd)
+ self.assertEqual(datum.type, "collection_model_binding_data")
+
+ def test_collection_model_binding_data_td_ok(self):
+ mock_cmbd = protos.TypedData(
+ collection_model_binding_data={'model_binding_data': [{'version': '1.0'}]}
+ )
+ cmbd_datum = datumdef.Datum.from_typed_data(mock_cmbd)
+
+ self.assertEqual(cmbd_datum.type, 'collection_model_binding_data')
diff --git a/azure_functions_worker_v2/tests/unittests/test_typing_inspect.py b/azure_functions_worker_v2/tests/unittests/test_typing_inspect.py
new file mode 100644
index 000000000..5561d2831
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_typing_inspect.py
@@ -0,0 +1,142 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import unittest
+
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ Generic,
+ Iterable,
+ List,
+ Mapping,
+ MutableMapping,
+ NamedTuple,
+ Optional,
+ Sequence,
+ Tuple,
+ TypeVar,
+ Union,
+)
+from azure_functions_worker_v2.utils.typing_inspect import (
+ get_args,
+ get_generic_bases,
+ get_generic_type,
+ get_origin,
+ get_parameters,
+ is_callable_type,
+ is_classvar,
+ is_generic_type,
+ is_tuple_type,
+ is_typevar,
+ is_union_type,
+)
+
+
+class IsUtilityTestCase(unittest.TestCase):
+ def sample_test(self, fun, samples, nonsamples):
+ for s in samples:
+ self.assertTrue(fun(s), f"{s} type expected in {samples}")
+ for s in nonsamples:
+ self.assertFalse(fun(s), f"{s} type expected in {nonsamples}")
+
+ def test_generic(self):
+ T = TypeVar('T')
+ samples = [Generic, Generic[T], Iterable[int], Mapping,
+ MutableMapping[T, List[int]], Sequence[Union[str, bytes]]]
+ nonsamples = [int, Union[int, str], Union[int, T], ClassVar[List[int]],
+ Callable[..., T], ClassVar, Optional, bytes, list]
+ self.sample_test(is_generic_type, samples, nonsamples)
+
+ def test_callable(self):
+ class MyClass(Callable[[int], int]):
+ pass
+ samples = [Callable, Callable[..., int],
+ Callable[[int, int], Iterable[str]]]
+ nonsamples = [int, type, 42, [], List[int],
+ Union[callable, Callable[..., int]]]
+ self.sample_test(is_callable_type, samples, nonsamples)
+ self.assertTrue(is_callable_type(MyClass))
+
+ def test_tuple(self):
+ class MyClass(Tuple[str, int]):
+ pass
+ samples = [Tuple, Tuple[str, int], Tuple[Iterable, ...]]
+ nonsamples = [int, tuple, 42, List[int], NamedTuple('N', [('x', int)])]
+ self.sample_test(is_tuple_type, samples, nonsamples)
+ self.assertTrue(is_tuple_type(MyClass))
+
+ def test_union(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ samples = [Union, Union[T, int], Union[int, Union[T, S]]]
+ nonsamples = [int, Union[int, int], [], Iterable[Any]]
+ self.sample_test(is_union_type, samples, nonsamples)
+
+ def test_typevar(self):
+ T = TypeVar('T')
+ S_co = TypeVar('S_co', covariant=True)
+ samples = [T, S_co]
+ nonsamples = [int, Union[T, int], Union[T, S_co], type, ClassVar[int]]
+ self.sample_test(is_typevar, samples, nonsamples)
+
+ def test_classvar(self):
+ T = TypeVar('T')
+ samples = [ClassVar, ClassVar[int], ClassVar[List[T]]]
+ nonsamples = [int, 42, Iterable, List[int], type, T]
+ self.sample_test(is_classvar, samples, nonsamples)
+
+
+class GetUtilityTestCase(unittest.TestCase):
+
+ def test_origin(self):
+ T = TypeVar('T')
+
+ class MyClass(Generic[T]):
+ pass
+
+ self.assertEqual(get_origin(int), None)
+ self.assertEqual(get_origin(ClassVar[int]), None)
+ self.assertEqual(get_origin(Generic), Generic)
+ self.assertEqual(get_origin(Generic[T]), Generic)
+ self.assertEqual(get_origin(List[Tuple[T, T]][int]), list)
+ self.assertEqual(get_origin(MyClass), None)
+
+ def test_parameters(self):
+ T = TypeVar('T')
+ S_co = TypeVar('S_co', covariant=True)
+ U = TypeVar('U')
+ self.assertEqual(get_parameters(int), ())
+ self.assertEqual(get_parameters(Generic), ())
+ self.assertEqual(get_parameters(Union), ())
+ self.assertEqual(get_parameters(List[int]), ())
+ self.assertEqual(get_parameters(Generic[T]), (T,))
+ self.assertEqual(get_parameters(Tuple[List[T], List[S_co]]), (T, S_co))
+ self.assertEqual(get_parameters(Union[S_co, Tuple[T, T]][int, U]), (U,))
+ self.assertEqual(get_parameters(Mapping[T, Tuple[S_co, T]]), (T, S_co))
+
+ def test_args_evaluated(self):
+ T = TypeVar('T')
+ self.assertEqual(get_args(Union[int, Tuple[T, int]][str], evaluate=True),
+ (int, Tuple[str, int]))
+ self.assertEqual(get_args(Dict[int, Tuple[T, T]][Optional[int]], evaluate=True),
+ (int, Tuple[Optional[int], Optional[int]]))
+ self.assertEqual(get_args(Callable[[], T][int], evaluate=True), ([], int,))
+
+ def test_generic_type(self):
+ T = TypeVar('T')
+
+ class Node(Generic[T]):
+ pass
+ self.assertIs(get_generic_type(Node()), Node)
+ self.assertIs(get_generic_type(Node[int]()), Node[int])
+ self.assertIs(get_generic_type(Node[T]()), Node[T],)
+ self.assertIs(get_generic_type(1), int)
+
+ def test_generic_bases(self):
+ class MyClass(List[int], Mapping[str, List[int]]):
+ pass
+ self.assertEqual(get_generic_bases(MyClass),
+ (List[int], Mapping[str, List[int]]))
+ self.assertEqual(get_generic_bases(int), ())
diff --git a/azure_functions_worker_v2/tests/unittests/test_utilities.py b/azure_functions_worker_v2/tests/unittests/test_utilities.py
new file mode 100644
index 000000000..ac17c08fe
--- /dev/null
+++ b/azure_functions_worker_v2/tests/unittests/test_utilities.py
@@ -0,0 +1,338 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import os
+import sys
+import typing
+import unittest
+from unittest.mock import patch
+
+from azure_functions_worker_v2.utils import (app_setting_manager,
+ helpers,
+ validators,
+ wrappers)
+
+TEST_APP_SETTING_NAME = "TEST_APP_SETTING_NAME"
+TEST_FEATURE_FLAG = "APP_SETTING_FEATURE_FLAG"
+FEATURE_DEFAULT = 42
+
+
+class MockFeature:
+ @wrappers.enable_feature_by(TEST_FEATURE_FLAG)
+ def mock_feature_enabled(self, output: typing.List[str]) -> str:
+ result = 'mock_feature_enabled'
+ output.append(result)
+ return result
+
+ @wrappers.enable_feature_by(TEST_FEATURE_FLAG, flag_default=True)
+ def mock_enabled_default_true(self, output: typing.List[str]) -> str:
+ result = 'mock_enabled_default_true'
+ output.append(result)
+ return result
+
+ @wrappers.disable_feature_by(TEST_FEATURE_FLAG)
+ def mock_feature_disabled(self, output: typing.List[str]) -> str:
+ result = 'mock_feature_disabled'
+ output.append(result)
+ return result
+
+ @wrappers.disable_feature_by(TEST_FEATURE_FLAG, flag_default=True)
+ def mock_disabled_default_true(self, output: typing.List[str]) -> str:
+ result = 'mock_disabled_default_true'
+ output.append(result)
+ return result
+
+ @wrappers.enable_feature_by(TEST_FEATURE_FLAG, FEATURE_DEFAULT)
+ def mock_feature_default(self, output: typing.List[str]) -> str:
+ result = 'mock_feature_default'
+ output.append(result)
+ return result
+
+
+class MockMethod:
+ @wrappers.attach_message_to_exception(ImportError, 'success')
+ def mock_load_function_success(self):
+ return True
+
+ @wrappers.attach_message_to_exception(ImportError, 'module_not_found')
+ def mock_load_function_module_not_found(self):
+ raise ModuleNotFoundError('MODULE_NOT_FOUND')
+
+ @wrappers.attach_message_to_exception(ImportError, 'import_error')
+ def mock_load_function_import_error(self):
+ # ImportError is a subclass of ModuleNotFoundError
+ raise ImportError('IMPORT_ERROR')
+
+ @wrappers.attach_message_to_exception(ImportError, 'value_error')
+ def mock_load_function_value_error(self):
+ # ValueError is not a subclass of ImportError
+ raise ValueError('VALUE_ERROR')
+
+
+class TestUtilities(unittest.TestCase):
+
+ def setUp(self):
+ self._dummy_sdk_sys_path = os.path.join(
+ os.path.dirname(__file__),
+ 'resources',
+ 'mock_azure_functions'
+ )
+
+ self.mock_environ = patch.dict('os.environ', os.environ.copy())
+ self.mock_sys_module = patch.dict('sys.modules', sys.modules.copy())
+ self.mock_sys_path = patch('sys.path', sys.path.copy())
+ self.mock_environ.start()
+ self.mock_sys_module.start()
+ self.mock_sys_path.start()
+
+ def tearDown(self):
+ self.mock_sys_path.stop()
+ self.mock_sys_module.stop()
+ self.mock_environ.stop()
+
+ def test_is_true_like_accepted(self):
+ self.assertTrue(app_setting_manager.is_true_like('1'))
+ self.assertTrue(app_setting_manager.is_true_like('true'))
+ self.assertTrue(app_setting_manager.is_true_like('T'))
+ self.assertTrue(app_setting_manager.is_true_like('YES'))
+ self.assertTrue(app_setting_manager.is_true_like('y'))
+
+ def test_is_true_like_rejected(self):
+ self.assertFalse(app_setting_manager.is_true_like(None))
+ self.assertFalse(app_setting_manager.is_true_like(''))
+ self.assertFalse(app_setting_manager.is_true_like('secret'))
+
+ def test_is_false_like_accepted(self):
+ self.assertTrue(app_setting_manager.is_false_like('0'))
+ self.assertTrue(app_setting_manager.is_false_like('false'))
+ self.assertTrue(app_setting_manager.is_false_like('F'))
+ self.assertTrue(app_setting_manager.is_false_like('NO'))
+ self.assertTrue(app_setting_manager.is_false_like('n'))
+
+ def test_is_false_like_rejected(self):
+ self.assertFalse(app_setting_manager.is_false_like(None))
+ self.assertFalse(app_setting_manager.is_false_like(''))
+ self.assertFalse(app_setting_manager.is_false_like('secret'))
+
+ def test_is_envvar_true(self):
+ os.environ[TEST_FEATURE_FLAG] = 'true'
+ self.assertTrue(app_setting_manager.is_envvar_true(TEST_FEATURE_FLAG))
+
+ def test_is_envvar_not_true_on_unset(self):
+ self._unset_feature_flag()
+ self.assertFalse(app_setting_manager.is_envvar_true(TEST_FEATURE_FLAG))
+
+ def test_is_envvar_false(self):
+ os.environ[TEST_FEATURE_FLAG] = 'false'
+ self.assertTrue(app_setting_manager.is_envvar_false(TEST_FEATURE_FLAG))
+
+ def test_is_envvar_not_false_on_unset(self):
+ self._unset_feature_flag()
+ self.assertFalse(app_setting_manager.is_envvar_true(TEST_FEATURE_FLAG))
+
+ def test_disable_feature_with_no_feature_flag(self):
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_enabled(output)
+ self.assertIsNone(result)
+ self.assertListEqual(output, [])
+
+ def test_disable_feature_with_default_value(self):
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_disabled_default_true(output)
+ self.assertIsNone(result)
+ self.assertListEqual(output, [])
+
+ def test_enable_feature_with_feature_flag(self):
+ feature_flag = TEST_FEATURE_FLAG
+ os.environ[feature_flag] = '1'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_enabled(output)
+ self.assertEqual(result, 'mock_feature_enabled')
+ self.assertListEqual(output, ['mock_feature_enabled'])
+
+ def test_enable_feature_with_default_value(self):
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_enabled_default_true(output)
+ self.assertEqual(result, 'mock_enabled_default_true')
+ self.assertListEqual(output, ['mock_enabled_default_true'])
+
+ def test_enable_feature_with_no_rollback_flag(self):
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_disabled(output)
+ self.assertEqual(result, 'mock_feature_disabled')
+ self.assertListEqual(output, ['mock_feature_disabled'])
+
+ def test_ignore_disable_default_value_when_set_explicitly(self):
+ feature_flag = TEST_FEATURE_FLAG
+ os.environ[feature_flag] = '0'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_disabled_default_true(output)
+ self.assertEqual(result, 'mock_disabled_default_true')
+ self.assertListEqual(output, ['mock_disabled_default_true'])
+
+ def test_disable_feature_with_rollback_flag(self):
+ rollback_flag = TEST_FEATURE_FLAG
+ os.environ[rollback_flag] = '1'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_disabled(output)
+ self.assertIsNone(result)
+ self.assertListEqual(output, [])
+
+ def test_enable_feature_with_rollback_flag_is_false(self):
+ rollback_flag = TEST_FEATURE_FLAG
+ os.environ[rollback_flag] = 'false'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_disabled(output)
+ self.assertEqual(result, 'mock_feature_disabled')
+ self.assertListEqual(output, ['mock_feature_disabled'])
+
+ def test_ignore_enable_default_value_when_set_explicitly(self):
+ feature_flag = TEST_FEATURE_FLAG
+ os.environ[feature_flag] = '0'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_enabled_default_true(output)
+ self.assertIsNone(result)
+ self.assertListEqual(output, [])
+
+ def test_fail_to_enable_feature_return_default_value(self):
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_default(output)
+ self.assertEqual(result, FEATURE_DEFAULT)
+ self.assertListEqual(output, [])
+
+ def test_disable_feature_with_false_flag_return_default_value(self):
+ feature_flag = TEST_FEATURE_FLAG
+ os.environ[feature_flag] = 'false'
+ mock_feature = MockFeature()
+ output = []
+ result = mock_feature.mock_feature_default(output)
+ self.assertEqual(result, FEATURE_DEFAULT)
+ self.assertListEqual(output, [])
+
+ def test_exception_message_should_not_be_extended_on_success(self):
+ mock_method = MockMethod()
+ result = mock_method.mock_load_function_success()
+ self.assertTrue(result)
+
+ def test_exception_message_should_be_extended_on_subexception(self):
+ mock_method = MockMethod()
+ with self.assertRaises(Exception) as e:
+ mock_method.mock_load_function_module_not_found()
+ self.assertIn('module_not_found', e.msg)
+ self.assertEqual(type(e), ModuleNotFoundError)
+
+ def test_exception_message_should_be_extended_on_exact_exception(self):
+ mock_method = MockMethod()
+ with self.assertRaises(Exception) as e:
+ mock_method.mock_load_function_module_not_found()
+ self.assertIn('import_error', e.msg)
+ self.assertEqual(type(e), ImportError)
+
+ def test_exception_message_should_not_be_extended_on_other_exception(self):
+ mock_method = MockMethod()
+ with self.assertRaises(Exception) as e:
+ mock_method.mock_load_function_value_error()
+ self.assertNotIn('import_error', e.msg)
+ self.assertEqual(type(e), ValueError)
+
+ def test_app_settings_not_set_should_return_none(self):
+ app_setting = app_setting_manager.get_app_setting(TEST_APP_SETTING_NAME)
+ self.assertIsNone(app_setting)
+
+ def test_app_settings_should_return_value(self):
+ # Set application setting by os.setenv
+ os.environ.update({TEST_APP_SETTING_NAME: '42'})
+
+ # Try using utility to acquire application setting
+ app_setting = app_setting_manager.get_app_setting(TEST_APP_SETTING_NAME)
+ self.assertEqual(app_setting, '42')
+
+ def test_app_settings_not_set_should_return_default_value(self):
+ app_setting = app_setting_manager.get_app_setting(TEST_APP_SETTING_NAME,
+ 'default')
+ self.assertEqual(app_setting, 'default')
+
+ def test_app_settings_should_ignore_default_value(self):
+ # Set application setting by os.setenv
+ os.environ.update({TEST_APP_SETTING_NAME: '42'})
+
+ # Try using utility to acquire application setting
+ app_setting = app_setting_manager.get_app_setting(TEST_APP_SETTING_NAME,
+ 'default')
+ self.assertEqual(app_setting, '42')
+
+ def test_app_settings_should_not_trigger_validator_when_not_set(self):
+ def raise_excpt(value: str):
+ raise Exception('Should not raise on app setting not found')
+
+ app_setting_manager.get_app_setting(TEST_APP_SETTING_NAME,
+ validator=raise_excpt)
+
+ def test_app_settings_return_default_value_when_validation_fail(self):
+ def parse_int_no_raise(value: str):
+ try:
+ int(value)
+ return True
+ except ValueError:
+ return False
+
+ # Set application setting to an invalid value
+ os.environ.update({TEST_APP_SETTING_NAME: 'invalid'})
+
+ app_setting = app_setting_manager.get_app_setting(
+ TEST_APP_SETTING_NAME,
+ default_value='1',
+ validator=parse_int_no_raise
+ )
+
+ # Because 'invalid' is not an interger, falls back to default value
+ self.assertEqual(app_setting, '1')
+
+ def test_app_settings_return_setting_value_when_validation_succeed(self):
+ def parse_int_no_raise(value: str):
+ try:
+ int(value)
+ return True
+ except ValueError:
+ return False
+
+ # Set application setting to an invalid value
+ os.environ.update({TEST_APP_SETTING_NAME: '42'})
+
+ app_setting = app_setting_manager.get_app_setting(
+ TEST_APP_SETTING_NAME,
+ default_value='1',
+ validator=parse_int_no_raise
+ )
+
+ # Because 'invalid' is not an interger, falls back to default value
+ self.assertEqual(app_setting, '42')
+
+ def test_valid_script_file_name(self):
+ file_name = 'test.py'
+ validators.validate_script_file_name(file_name)
+
+ def test_invalid_script_file_name(self):
+ file_name = 'test'
+ with self.assertRaises(validators.InvalidFileNameError):
+ validators.validate_script_file_name(file_name)
+
+ def test_set_get_sdk_version(self):
+ test_version = '1.2.3'
+ helpers.set_sdk_version(test_version)
+ self.assertEqual(helpers.get_sdk_version(), test_version)
+
+ def _unset_feature_flag(self):
+ try:
+ os.environ.pop(TEST_FEATURE_FLAG)
+ except KeyError:
+ pass
diff --git a/azure_functions_worker_v2/tests/utils/__init__.py b/azure_functions_worker_v2/tests/utils/__init__.py
new file mode 100644
index 000000000..5b7f7a925
--- /dev/null
+++ b/azure_functions_worker_v2/tests/utils/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
diff --git a/azure_functions_worker_v2/tests/utils/constants.py b/azure_functions_worker_v2/tests/utils/constants.py
new file mode 100644
index 000000000..e339bedaf
--- /dev/null
+++ b/azure_functions_worker_v2/tests/utils/constants.py
@@ -0,0 +1,7 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+import pathlib
+
+PROJECT_ROOT = pathlib.Path(__file__).parent.parent.parent
+TESTS_ROOT = PROJECT_ROOT / 'tests'
+UNIT_TESTS_FOLDER = TESTS_ROOT / pathlib.Path('unittests')
diff --git a/azure_functions_worker_v2/tests/utils/mock_classes.py b/azure_functions_worker_v2/tests/utils/mock_classes.py
new file mode 100644
index 000000000..0cc89b530
--- /dev/null
+++ b/azure_functions_worker_v2/tests/utils/mock_classes.py
@@ -0,0 +1,50 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+from typing import Any, List, Optional
+
+
+# This represents the top level protos request sent from the host
+class WorkerRequest:
+ def __init__(self, name: str, request: Any, properties: dict):
+ self.name = name
+ self.request = request
+ self.properties = properties
+
+
+# This represents the inner request
+class Request:
+ def __init__(self, name: Any):
+ self.worker_init_request = name
+ self.function_environment_reload_request = name
+
+
+# This represents the Function Init/Metadata/Load/Invocation request
+class FunctionRequest:
+ def __init__(self, capabilities: Any,
+ function_app_directory: Any,
+ environment_variables: Optional[Any] = {}):
+ self.capabilities = capabilities
+ self.function_app_directory = function_app_directory
+ self.environment_variables = environment_variables
+
+
+class MockMBD:
+ def __init__(self, version: str, source: str,
+ content_type: str, content: str):
+ self.version = version
+ self.source = source
+ self.content_type = content_type
+ self.content = content
+
+
+class MockCMBD:
+ def __init__(self, model_binding_data: List[MockMBD]):
+ self.model_binding_data = model_binding_data
+
+
+class MockHttpRequest:
+ pass
+
+
+class MockHttpResponse:
+ pass
diff --git a/azure_functions_worker_v2/tests/utils/testutils.py b/azure_functions_worker_v2/tests/utils/testutils.py
new file mode 100644
index 000000000..de229fc3e
--- /dev/null
+++ b/azure_functions_worker_v2/tests/utils/testutils.py
@@ -0,0 +1,34 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+"""Unittest helpers.
+
+All functions in this file should be considered private APIs,
+and can be changed without a notice.
+"""
+
+import asyncio
+import functools
+import inspect
+import unittest
+
+
+class AsyncTestCaseMeta(type(unittest.TestCase)):
+ def __new__(mcls, name, bases, ns):
+ for attrname, attr in ns.items():
+ if (attrname.startswith('test_')
+ and inspect.iscoroutinefunction(attr)):
+ ns[attrname] = mcls._sync_wrap(attr)
+
+ return super().__new__(mcls, name, bases, ns)
+
+ @staticmethod
+ def _sync_wrap(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ return asyncio.run(func(*args, **kwargs))
+
+ return wrapper
+
+
+class AsyncTestCase(unittest.TestCase, metaclass=AsyncTestCaseMeta):
+ pass
diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml
index 7a555e8a4..ca1300b1c 100644
--- a/eng/ci/official-build.yml
+++ b/eng/ci/official-build.yml
@@ -46,34 +46,49 @@ extends:
break: true
stages:
- - stage: Build
+ # Python Worker Build and Test Stages
+ - stage: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/build-artifacts.yml@self
-
- - stage: RunE2ETests
- dependsOn: Build
+ - stage: RunWorkerE2ETests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/ci-e2e-tests.yml@self
- - stage: RunEmulatorTests
- dependsOn: Build
+ - stage: RunWorkerEmulatorTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/jobs/ci-emulator-tests.yml@self
parameters:
PoolName: 1es-pool-azfunc
- - stage: RunUnitTests
- dependsOn: Build
+ - stage: RunWorkerUnitTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/jobs/ci-unit-tests.yml@self
- - stage: RunDockerConsumptionTests
- dependsOn: Build
+ - stage: RunWorkerDockerConsumptionTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self
- - stage: RunDockerDedicatedTests
- dependsOn: Build
+ - stage: RunWorkerDockerDedicatedTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self
# Skipping consumption tests till pipeline is fixed
-# - stage: RunLinuxConsumptionTests
-# dependsOn: Build
+# - stage: RunWorkerLinuxConsumptionTests
+# dependsOn: BuildPythonWorker
# jobs:
# - template: /eng/templates/official/jobs/ci-lc-tests.yml@self
+
+ # Python V2 Library Build and Test Stages
+ - stage: BuildV2Library
+ dependsOn: []
+ jobs:
+ - template: /eng/templates/official/jobs/build-library.yml@self
+ parameters:
+ PROJECT_NAME: 'Python V2 Library'
+ - stage: RunV2LibraryUnitTests
+ dependsOn: BuildV2Library
+ jobs:
+ - template: /eng/templates/jobs/ci-library-unit-tests.yml@self
+ parameters:
+ PROJECT_NAME: 'Python V2 Library'
+ PROJECT_DIRECTORY: 'azure_functions_worker_v2'
diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml
index 470a94f9c..fc1367fcc 100644
--- a/eng/ci/public-build.yml
+++ b/eng/ci/public-build.yml
@@ -49,18 +49,42 @@ extends:
skipBuildTagsForGitHubPullRequests: ${{ variables['System.PullRequest.IsFork'] }}
stages:
- - stage: Build
+ # Python Worker Build and Test Stages
+ - stage: BuildPythonWorker
jobs:
- template: /eng/templates/jobs/build.yml@self
+ parameters:
+ PYTHON_VERSION: '3.11'
+ PROJECT_NAME: 'Azure Functions Python Worker'
+ PROJECT_DIRECTORY: 'workers'
# Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
- - stage: RunUnitTests
- dependsOn: Build
+ - stage: RunWorkerUnitTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/jobs/ci-unit-tests.yml@self
- - stage: RunEmulatorTests
- dependsOn: Build
+ parameters:
+ PROJECT_DIRECTORY: 'workers'
+ - stage: RunWorkerEmulatorTests
+ dependsOn: BuildPythonWorker
jobs:
- template: /eng/templates/jobs/ci-emulator-tests.yml@self
parameters:
- PoolName: 1es-pool-azfunc-public
\ No newline at end of file
+ PoolName: 1es-pool-azfunc-public
+
+ # Python V2 Library Build and Test Stages
+ - stage: BuildV2Library
+ dependsOn: []
+ jobs:
+ - template: /eng/templates/jobs/build.yml@self
+ parameters:
+ PYTHON_VERSION: '3.13'
+ PROJECT_NAME: 'V2 Library'
+ PROJECT_DIRECTORY: 'azure_functions_worker_v2'
+ - stage: RunV2LibraryUnitTests
+ dependsOn: BuildV2Library
+ jobs:
+ - template: /eng/templates/jobs/ci-library-unit-tests.yml@self
+ parameters:
+ PROJECT_NAME: 'V2 Library'
+ PROJECT_DIRECTORY: 'azure_functions_worker_v2'
\ No newline at end of file
diff --git a/eng/ci/worker-release.yml b/eng/ci/worker-release.yml
index d189be866..f81834c6f 100644
--- a/eng/ci/worker-release.yml
+++ b/eng/ci/worker-release.yml
@@ -1,5 +1,15 @@
pr: none
+parameters:
+ - name: WorkerRelease
+ displayName: 'Worker Release (Nuget)'
+ type: boolean
+ default: false
+ - name: LibraryRelease
+ displayName: 'Library Release (PyPI)'
+ type: boolean
+ default: false
+
resources:
repositories:
- repository: 1es
@@ -24,6 +34,14 @@ extends:
os: windows
stages:
- - stage: Release
+ - stage: ReleasePythonWorker
+ displayName: 'Release Python Worker (Nuget)'
jobs:
- template: /eng/templates/official/jobs/publish-release.yml@self
+ condition: eq(${{ parameters.WorkerRelease }}, True)
+ - stage: ReleasePythonV2Library
+ dependsOn: []
+ displayName: 'Release V2 Library (PyPI)'
+ jobs:
+ - template: /eng/templates/official/jobs/publish-library-release.yml@self
+ condition: eq(${{ parameters.LibraryRelease }}, True)
diff --git a/eng/scripts/install-dependencies.sh b/eng/scripts/install-dependencies.sh
index 5a9d1ca40..cc5e4ecaf 100644
--- a/eng/scripts/install-dependencies.sh
+++ b/eng/scripts/install-dependencies.sh
@@ -2,11 +2,11 @@
python -m pip install --upgrade pip
python -m pip install -U azure-functions --pre
-python -m pip install -U -e workers/[dev]
+python -m pip install -U -e $2/[dev]
if [[ $1 != "3.7" ]]; then
- python -m pip install --pre -U -e workers/[test-http-v2]
+ python -m pip install --pre -U -e $2/[test-http-v2]
fi
if [[ $1 != "3.7" && $1 != "3.8" ]]; then
- python -m pip install --pre -U -e workers/[test-deferred-bindings]
+ python -m pip install --pre -U -e $2/[test-deferred-bindings]
fi
diff --git a/eng/scripts/test-extensions.sh b/eng/scripts/test-extensions.sh
index ba92c8aa3..c885a1381 100644
--- a/eng/scripts/test-extensions.sh
+++ b/eng/scripts/test-extensions.sh
@@ -4,11 +4,11 @@ cd workers
python -m pip install --upgrade pip
if [[ $2 != "3.7" ]]; then
python -m pip install -e $1/PythonExtensionArtifact/$3
- python -m pip install --pre -e .[test-http-v2]
+ python -m pip install --pre -e workers/[test-http-v2]
fi
if [[ $2 != "3.7" && $2 != "3.8" ]]; then
python -m pip install -e $1/PythonExtensionArtifact/$3
- python -m pip install --pre -U -e .[test-deferred-bindings]
+ python -m pip install --pre -U -e workers/[test-deferred-bindings]
fi
-python -m pip install -U -e .[dev]
\ No newline at end of file
+python -m pip install -U -e workers/[dev]
\ No newline at end of file
diff --git a/eng/scripts/test-sdk.sh b/eng/scripts/test-sdk.sh
index df6185f4a..1fcc58528 100644
--- a/eng/scripts/test-sdk.sh
+++ b/eng/scripts/test-sdk.sh
@@ -3,11 +3,11 @@
cd workers
python -m pip install --upgrade pip
python -m pip install -e $1/PythonSdkArtifact
-python -m pip install -e .[dev]
+python -m pip install -e workers/[dev]
if [[ $2 != "3.7" ]]; then
- python -m pip install --pre -U -e .[test-http-v2]
+ python -m pip install --pre -U -e workers/[test-http-v2]
fi
if [[ $2 != "3.7" && $2 != "3.8" ]]; then
- python -m pip install --pre -U -e .[test-deferred-bindings]
+ python -m pip install --pre -U -e workers/[test-deferred-bindings]
fi
\ No newline at end of file
diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml
index b50f81d2a..87cd2c18d 100644
--- a/eng/templates/jobs/build.yml
+++ b/eng/templates/jobs/build.yml
@@ -1,6 +1,11 @@
+parameters:
+ PYTHON_VERSION: ''
+ PROJECT_NAME: ''
+ PROJECT_DIRECTORY: ''
+
jobs:
- job: "Build"
- displayName: 'Build python worker'
+ displayName: 'Build'
pool:
name: 1es-pool-azfunc-public
@@ -8,21 +13,8 @@ jobs:
os: linux
steps:
- - task: UsePythonVersion@0
- inputs:
- versionSpec: "3.11"
- - bash: |
- python --version
- displayName: 'Check python version'
- - bash: |
- python -m venv .env
- .env\Scripts\Activate.ps1
- python -m pip install --upgrade pip
- cd workers
- python -m pip install .
- displayName: 'Build python worker'
- - bash: |
- pip install pip-audit
- cd workers
- pip-audit -r requirements.txt
- displayName: 'Run vulnerability scan'
+ - template: /eng/templates/shared/build-steps.yml@self
+ parameters:
+ PYTHON_VERSION: ${{ parameters.PYTHON_VERSION }}
+ PROJECT_NAME: ${{ parameters.PROJECT_NAME }}
+ PROJECT_DIRECTORY: ${{ parameters.PROJECT_DIRECTORY }}
diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml
index 99f788740..0684def45 100644
--- a/eng/templates/jobs/ci-emulator-tests.yml
+++ b/eng/templates/jobs/ci-emulator-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Emulator Tests"
@@ -35,7 +38,7 @@ jobs:
chmod +x eng/scripts/install-dependencies.sh
chmod +x eng/scripts/test-setup.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
eng/scripts/test-setup.sh
displayName: 'Install dependencies and the worker'
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
@@ -81,8 +84,8 @@ jobs:
docker ps
displayName: "Start CosmosDB Emulator"
- bash: |
- docker compose -f workers/tests/emulator_tests/utils/eventhub/docker-compose.yml pull
- docker compose -f workers/tests/emulator_tests/utils/eventhub/docker-compose.yml up -d
+ docker compose -f ${{ parameters.PROJECT_DIRECTORY }}/tests/emulator_tests/utils/eventhub/docker-compose.yml pull
+ docker compose -f ${{ parameters.PROJECT_DIRECTORY }}/tests/emulator_tests/utils/eventhub/docker-compose.yml up -d
displayName: 'Install Azurite and Start EventHub Emulator'
- bash: |
python -m pytest -q -n auto --dist loadfile --reruns 4 --ignore=tests/emulator_tests/test_servicebus_functions.py tests/emulator_tests
@@ -92,14 +95,14 @@ jobs:
AzureWebJobsCosmosDBConnectionString: $(EmulatorCosmosDBConnectionString)
CosmosDBEmulatorUrl: $(CosmosDBEmulatorUrl)
CosmosDBEmulatorKey: $(CosmosDBEmulatorKey)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
displayName: "Running $(PYTHON_VERSION) Python Linux Emulator Tests"
- bash: |
# Stop and remove EventHub Emulator container to free up the port
docker stop eventhubs-emulator
docker container rm --force eventhubs-emulator
- docker compose -f workers/tests/emulator_tests/utils/servicebus/docker-compose.yml pull
- docker compose -f workers/tests/emulator_tests/utils/servicebus/docker-compose.yml up -d
+ docker compose -f ${{ parameters.PROJECT_DIRECTORY }}/tests/emulator_tests/utils/servicebus/docker-compose.yml pull
+ docker compose -f ${{ parameters.PROJECT_DIRECTORY }}/tests/emulator_tests/utils/servicebus/docker-compose.yml up -d
env:
AzureWebJobsSQLPassword: $(AzureWebJobsSQLPassword)
displayName: 'Install Azurite and Start ServiceBus Emulator'
@@ -108,5 +111,5 @@ jobs:
env:
AzureWebJobsStorage: "UseDevelopmentStorage=true"
AzureWebJobsServiceBusConnectionString: $(EmulatorServiceBusConnectionString)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
displayName: "Running $(PYTHON_VERSION) Python ServiceBus Linux Emulator Tests"
diff --git a/eng/templates/jobs/ci-library-unit-tests.yml b/eng/templates/jobs/ci-library-unit-tests.yml
new file mode 100644
index 000000000..3b54761e8
--- /dev/null
+++ b/eng/templates/jobs/ci-library-unit-tests.yml
@@ -0,0 +1,31 @@
+parameters:
+ PROJECT_NAME: ''
+ PROJECT_DIRECTORY: ''
+
+jobs:
+ - job: "TestPython"
+ displayName: "Run ${{ parameters.PROJECT_NAME }} Unit Tests"
+
+ strategy:
+ matrix:
+ Python313:
+ PYTHON_VERSION: '3.13'
+
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: $(PYTHON_VERSION)
+ - task: UseDotNet@2
+ displayName: 'Install .NET 8'
+ inputs:
+ version: 8.0.x
+ - bash: |
+ chmod +x eng/scripts/install-dependencies.sh
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
+ displayName: 'Install dependencies'
+ - bash: |
+ python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker_v2 --cov-report xml --cov-branch tests/unittests
+ displayName: "Running $(PYTHON_VERSION) Unit Tests"
+ env:
+ AzureWebJobsStorage: $(LinuxStorageConnectionString312)
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml
index b8f27248a..1d9f18a01 100644
--- a/eng/templates/jobs/ci-unit-tests.yml
+++ b/eng/templates/jobs/ci-unit-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Unit Tests"
@@ -30,7 +33,7 @@ jobs:
chmod +x eng/scripts/install-dependencies.sh
chmod +x eng/scripts/test-setup.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
eng/scripts/test-setup.sh
displayName: 'Install dependencies'
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
@@ -55,5 +58,5 @@ jobs:
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
env:
PYTHON_VERSION: $(PYTHON_VERSION)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
\ No newline at end of file
diff --git a/eng/templates/official/jobs/build-library.yml b/eng/templates/official/jobs/build-library.yml
new file mode 100644
index 000000000..5439b7800
--- /dev/null
+++ b/eng/templates/official/jobs/build-library.yml
@@ -0,0 +1,42 @@
+parameters:
+ PROJECT_NAME: ''
+
+jobs:
+ - job: "Build"
+ displayName: 'Build Python V2 Library'
+
+ pool:
+ name: 1es-pool-azfunc-public
+ image: 1es-ubuntu-22.04
+ os: linux
+
+ strategy:
+ matrix:
+ directory:
+ PROJECT_DIRECTORY: 'azure_functions_worker_v2'
+
+ templateContext:
+ outputParentDirectory: $(Build.ArtifactStagingDirectory)
+ outputs:
+ - output: pipelineArtifact
+ targetPath: $(Build.SourcesDirectory)
+ artifactName: "azure-functions-runtime"
+
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: "3.13"
+ - bash: |
+ python --version
+ displayName: 'Check python version'
+ - bash: |
+ python -m pip install -U pip
+ python -m pip install build
+ cd $(PROJECT_DIRECTORY)
+ python -m build
+ displayName: 'Build ${{ parameters.PROJECT_NAME }}'
+ - bash: |
+ pip install pip-audit
+ cd $(PROJECT_DIRECTORY)
+ pip-audit .
+ displayName: 'Run vulnerability scan'
\ No newline at end of file
diff --git a/eng/templates/official/jobs/ci-custom-image-tests.yml b/eng/templates/official/jobs/ci-custom-image-tests.yml
index 449fc074d..75095e0f0 100644
--- a/eng/templates/official/jobs/ci-custom-image-tests.yml
+++ b/eng/templates/official/jobs/ci-custom-image-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Docker Custom Tests"
@@ -14,8 +17,8 @@ jobs:
- bash: |
chmod +x eng/scripts/install-dependencies.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
- cd workers/tests
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
+ cd ${{ parameters.PROJECT_DIRECTORY }}/tests
python -m invoke -c test_setup build-protos
displayName: 'Install dependencies'
- bash: |
@@ -31,5 +34,5 @@ jobs:
AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311)
AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311)
AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
displayName: "Running Python DockerCustom tests"
\ No newline at end of file
diff --git a/eng/templates/official/jobs/ci-docker-consumption-tests.yml b/eng/templates/official/jobs/ci-docker-consumption-tests.yml
index 94a74ed65..6f8a74c98 100644
--- a/eng/templates/official/jobs/ci-docker-consumption-tests.yml
+++ b/eng/templates/official/jobs/ci-docker-consumption-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Docker Consumption Tests"
@@ -53,8 +56,8 @@ jobs:
- bash: |
chmod +x eng/scripts/install-dependencies.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
- cd workers/tests
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
+ cd ${{ parameters.PROJECT_DIRECTORY }}/tests
python -m invoke -c test_setup build-protos
displayName: 'Install dependencies'
- bash: |
@@ -68,5 +71,5 @@ jobs:
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
displayName: "Running $(PYTHON_VERSION) Docker Consumption tests"
\ No newline at end of file
diff --git a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
index cf93b2815..9c8ec4087 100644
--- a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
+++ b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Docker Dedicated Tests"
@@ -53,8 +56,8 @@ jobs:
- bash: |
chmod +x eng/scripts/install-dependencies.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
- cd workers/tests
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
+ cd ${{ parameters.PROJECT_DIRECTORY }}/tests
python -m invoke -c test_setup build-protos
displayName: 'Install dependencies'
- bash: |
@@ -68,5 +71,5 @@ jobs:
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
displayName: "Running $(PYTHON_VERSION) Docker Dedicated tests"
\ No newline at end of file
diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml
index d825d20f4..79ec2fd7d 100644
--- a/eng/templates/official/jobs/ci-e2e-tests.yml
+++ b/eng/templates/official/jobs/ci-e2e-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python E2E Tests"
@@ -84,7 +87,7 @@ jobs:
chmod +x eng/scripts/install-dependencies.sh
chmod +x eng/scripts/test-setup.sh
- eng/scripts/install-dependencies.sh $(PYTHON_VERSION)
+ eng/scripts/install-dependencies.sh $(PYTHON_VERSION) ${{ parameters.PROJECT_DIRECTORY }}
eng/scripts/test-setup.sh
displayName: 'Install dependencies and the worker'
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
@@ -162,4 +165,4 @@ jobs:
skipTest: $(skipTest)
PYAZURE_WEBHOST_DEBUG: true
displayName: "Running $(PYTHON_VERSION) Python E2E Tests"
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml
index 6ea0839b2..14605f51f 100644
--- a/eng/templates/official/jobs/ci-lc-tests.yml
+++ b/eng/templates/official/jobs/ci-lc-tests.yml
@@ -1,3 +1,6 @@
+parameters:
+ PROJECT_DIRECTORY: 'workers'
+
jobs:
- job: "TestPython"
displayName: "Run Python Linux Consumption Tests"
@@ -26,9 +29,9 @@ jobs:
versionSpec: $(PYTHON_VERSION)
- bash: |
python -m pip install --upgrade pip
- python -m pip install -U -e .[dev]
+ python -m pip install -U -e ${{ parameters.PROJECT_DIRECTORY }}/[dev]
- cd workers/tests
+ cd ${{ parameters.PROJECT_DIRECTORY }}/tests
python -m invoke -c test_setup build-protos
displayName: 'Install dependencies and the worker'
# Skip the installation stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version
@@ -39,5 +42,5 @@ jobs:
AzureWebJobsStorage: $(LinuxStorageConnectionString312)
_DUMMY_CONT_KEY: $(_DUMMY_CONT_KEY)
displayName: "Running $(PYTHON_VERSION) Linux Consumption tests"
- workingDirectory: $(Build.SourcesDirectory)/workers
+ workingDirectory: $(Build.SourcesDirectory)/${{ parameters.PROJECT_DIRECTORY }}
condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false))
\ No newline at end of file
diff --git a/eng/templates/official/jobs/publish-library-release.yml b/eng/templates/official/jobs/publish-library-release.yml
new file mode 100644
index 000000000..c802c40df
--- /dev/null
+++ b/eng/templates/official/jobs/publish-library-release.yml
@@ -0,0 +1,155 @@
+jobs:
+
+- job: "CreateReleaseBranch"
+ displayName: 'Create Release Branch'
+ pool:
+ name: 1es-pool-azfunc
+ image: 1es-ubuntu-22.04
+ os: linux
+ steps:
+ - template: /eng/templates/shared/github-release-branch.yml@self
+ parameters:
+ PROJECT_DIRECTORY: 'azure_functions_worker_v2'
+ PROJECT_NAME: 'azure_functions_worker_v2'
+ BRANCH_NAME: 'release-v2'
+
+- job: "CheckReleaseBranch"
+ dependsOn: ['CreateReleaseBranch']
+ displayName: '(Manual) Check Release Branch'
+ pool: server
+ steps:
+ - task: ManualValidation@1
+ displayName: '(Optional) Modify release-v2/x.y.z branch'
+ inputs:
+ notifyUsers: '' # No email notifications sent
+ instructions: |
+ 1. Check if the https://github.com/Azure/azure-functions-python-worker/tree/release-v2/$(NewLibraryVersion) build succeeds and passes all unit tests.
+ 2. If not, modify the release-v2/$(NewLibraryVersion) branch.
+ 3. Ensure release-v2/$(NewLibraryVersion) branch contains all necessary changes.
+
+- job: "CreateReleaseTag"
+ dependsOn: ['CheckReleaseBranch']
+ steps:
+ - template: /eng/templates/shared/github-release-note.yml@self
+ parameters:
+ BRANCH_NAME: 'release-v2'
+ PROJECT_NAME: 'azure_functions_worker_v2'
+
+- job: "CheckGitHubRelease"
+ dependsOn: ['CreateReleaseTag']
+ displayName: '(Manual) Check GitHub release note'
+ pool: server
+ steps:
+ - task: ManualValidation@1
+ displayName: 'Write GitHub release note'
+ inputs:
+ notifyUsers: ''
+ instructions: 'Please head to https://github.com/Azure/azure-functions-python-worker/releases to finish the release note'
+
+- job: "TestWithWorker"
+ dependsOn: ['CheckGitHubRelease']
+ displayName: 'Test with Worker'
+ steps:
+ - powershell: |
+ $githubUser = "$(GithubUser)"
+ $githubToken = "$(GithubPat)"
+ $newLibraryVersion = "$(NewWorkerVersion)"
+ $newBranch = "runtime-v2/$newLibraryVersion"
+
+ if($newLibraryVersion -match '(\d)+.(\d)+.(\d)+') {
+ # Create GitHub credential
+ git config --global user.name "AzureFunctionsPython"
+ git config --global user.email "azfunc@microsoft.com"
+
+ # Create GitHub credential
+ $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}"))
+
+ # Clone Repository
+ git clone https://$githubToken@github.com/Azure/azure-functions-python-worker
+ Write-Host "Cloned azure-functions-python-worker into local and checkout $newBranch branch"
+ Set-Location "azure-functions-python-worker"
+ git checkout -b $newBranch "origin/dev"
+
+ # Modify Runtime Version in pyproject.toml
+ Write-Host "Replacing Runtime version in worker's pyproject.toml"
+ ((Get-Content workers/pyproject.toml) -replace '"azure-functions-runtime==[^";]+', "`"azure-functions-runtime==$newLibraryVersion") -join "`n" | Set-Content -NoNewline workers/pyproject.toml
+
+ # Commit Python Version
+ Write-Host "Pushing $newBranch to azure-functions-python-worker repo"
+ git add workers/pyproject.toml
+ git commit -m "Update Python Runtime Version to $newLibraryVersion"
+ git push origin $newBranch
+
+ # Create PR
+ Write-Host "Creating PR draft in GitHub"
+ $body = (@{head="$newBranch";base="dev";body="Python Runtime Version [$newLibraryVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newLibraryVersion)";draft=$true;maintainer_can_modify=$true;title="build: update Python Runtime Version to $newLibraryVersion"} | ConvertTo-Json -Compress)
+ $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/pulls"
+
+ # Return Value
+ if ($response.StatusCode -ne 201) {
+ Write-Host "Failed to create PR in Azure Functions Python Worker"
+ exit -1
+ }
+
+ $draftUrl = $response | ConvertFrom-Json | Select -expand url
+ Write-Host "PR draft created in $draftUrl"
+ } else {
+ Write-Host "NewLibraryVersion $newLibraryVersion is malformed (example: 1.1.8)"
+ exit -1
+ }
+ displayName: 'Create PR in Worker Repo'
+
+- job: "WaitForPythonWorkerPR"
+ dependsOn: ['TestWithWorker']
+ displayName: '(Manual) Check Python Worker PR'
+ pool: server
+ steps:
+ - task: ManualValidation@1
+ displayName: 'Check Python Worker PR'
+ inputs:
+ notifyUsers: ''
+ instructions: |
+ 1. Please wait and check if all goes green in the https://github.com/Azure/azure-functions-python-worker/pulls
+ 2. Merge the PR into worker dev branch
+
+- job: "PyPIPackage"
+ dependsOn: ['WaitForPythonWorkerPR']
+ displayName: 'PyPI Package'
+ steps:
+ - task: DownloadPipelineArtifact@2
+ displayName: 'Download Python V2 Library release-v2/x.y.z Artifact'
+ inputs:
+ buildType: specific
+ project: '3f99e810-c336-441f-8892-84983093ad7f'
+ definition: 652
+ specificBuildWithTriggering: true
+ buildVersionToDownload: latestFromBranch
+ branchName: refs/heads/release-v2
+ targetPath: PythonRuntimeArtifact
+ - task: UsePythonVersion@0
+ displayName: 'Use Python 3.13'
+ inputs:
+ versionSpec: 3.13
+ - powershell: |
+ $newLibraryVersion = "$(NewLibraryVersion)"
+ $pypiToken = "$(PypiToken)"
+
+ # Setup local Python environment
+ Write-Host "Setup local Python environment"
+ python -m pip install -U pip
+ pip install twine
+
+ # Publish artifacts to PyPi
+ twine upload --repository-url https://upload.pypi.org/legacy/ --username "__token__" --password "$pypiToken" PythonRuntimeArtifact/azure_functions_worker_v2/dist/*
+ Start-Sleep -Seconds 3
+
+ # Checking if the new version is uploaded
+ Write-Host "Check if new version is uploaded"
+ $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache"} -Method Get -Uri "https://pypi.org/project/azure-functions-runtime/$newLibraryVersion/"
+
+ # Return Value
+ if ($response.StatusCode -ne 200) {
+ Write-Host "Failed to verify https://pypi.org/project/azure-functions-runtime/$newLibraryVersion/"
+ exit -1
+ }
+ displayName: 'Publish package to pypi.org'
diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml
index ab89b2992..46c704fb9 100644
--- a/eng/templates/official/jobs/publish-release.yml
+++ b/eng/templates/official/jobs/publish-release.yml
@@ -7,36 +7,12 @@ jobs:
image: 1es-ubuntu-22.04
os: linux
steps:
- - powershell: |
- $githubToken = "$(GithubPat)"
- $newWorkerVersion = "$(NewWorkerVersion)"
- $versionFile = "azure_functions_worker/version.py"
-
- if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') {
- # Create GitHub credential
- git config --global user.name "AzureFunctionsPython"
- git config --global user.email "azfunc@microsoft.com"
-
- # Heading to Artifact Repository
- Write-Host "Operating based on $stagingDirectory/azure-functions-python-worker"
- git checkout -b "release/$newWorkerVersion"
- cd workers
-
- # Change azure_functions_worker/version.py version
- Write-Host "Change version number in version.py to $newWorkerVersion"
- ((Get-Content $versionFile) -replace "VERSION = '(\d+).(\d+).*'", "VERSION = '$newWorkerVersion'" -join "`n") + "`n" | Set-Content -NoNewline $versionFile
- git add $versionFile
- git commit -m "build: update Python Worker Version to $newWorkerVersion"
-
- # Create release branch release/X.Y.Z
- Write-Host "Creating release branch release/$newWorkerVersion"
- git push --repo="https://$githubToken@github.com/Azure/azure-functions-python-worker.git"
- } else {
- Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)"
- exit -1
- }
- displayName: 'Push release/x.y.z'
-
+ - template: /eng/templates/shared/github-release-branch.yml@self
+ parameters:
+ PROJECT_DIRECTORY: 'workers'
+ PROJECT_NAME: 'azure_functions_worker'
+ BRANCH_NAME: 'release'
+
- job: "CheckReleaseBranch"
dependsOn: ['CreateReleaseBranch']
displayName: '(Manual) Check Release Branch'
@@ -54,60 +30,11 @@ jobs:
- job: "CreateReleaseTag"
dependsOn: ['CheckReleaseBranch']
steps:
- - powershell: |
- $githubToken = "$(GithubPat)"
- $newWorkerVersion = "$(NewWorkerVersion)"
-
- if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') {
- # Create GitHub credential
- git config --global user.name "AzureFunctionsPython"
- git config --global user.email "azfunc@microsoft.com"
-
- # Clone Repository
- git clone https://$githubToken@github.com/Azure/azure-functions-python-worker
- Write-Host "Cloned azure-functions-python-worker into local"
- Set-Location "azure-functions-python-worker"
- git checkout "origin/release/$newWorkerVersion"
-
- # Create release tag X.Y.Z
- Write-Host "Creating release tag $newWorkerVersion"
- git tag -a "$newWorkerVersion" -m "$newWorkerVersion"
-
- # Push tag to remote
- git push origin $newWorkerVersion
- } else {
- Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)"
- exit -1
- }
- displayName: 'Create and push release tag x.y.z'
- - powershell: |
- $githubUser = "$(GithubUser)"
- $githubToken = "$(GithubPat)"
- $newWorkerVersion = "$(NewWorkerVersion)"
-
- if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') {
- # Create GitHub credential
- $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}"))
-
- # Create Release Note
- Write-Host "Creating release note in GitHub"
- $body = (@{tag_name="$newWorkerVersion";name="Release $newWorkerVersion";body="- Fill in Release Note Here";draft=$true} | ConvertTo-Json -Compress)
- $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/releases"
-
- # Return Value
- if ($response.StatusCode -ne 201) {
- Write-Host "Failed to create release note in GitHub"
- exit -1
- }
-
- $draftUrl = $response | ConvertFrom-Json | Select -expand url
- Write-Host "Release draft created in $draftUrl"
- } else {
- Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)"
- exit -1
- }
- displayName: 'Create GitHub release draft'
-
+ - template: /eng/templates/shared/github-release-note.yml@self
+ parameters:
+ BRANCH_NAME: 'release'
+ PROJECT_NAME: 'azure_functions_worker'
+
- job: "CheckGitHubRelease"
dependsOn: ['CreateReleaseTag']
displayName: '(Manual) Check GitHub release note'
@@ -130,7 +57,6 @@ jobs:
notifyUsers: ''
instructions: 'Ensure the build of release/4.x.y.z finishes in https://dev.azure.com/azfunc/internal/_build?definitionId=652 and verify if PackageWorkers task is completed.'
-
- job: "PublishNuget"
dependsOn: ['WaitForPythonWorkerBuild']
displayName: 'Publish Nuget'
@@ -252,7 +178,7 @@ jobs:
displayName: 'Create Host PR for dev'
- job: "CheckHostPRs"
- dependsOn: ['HostRepoPRs']
+ dependsOn: ['HostRepoPR']
displayName: '(Manual) Check Host PRs'
pool: server
steps:
@@ -262,4 +188,4 @@ jobs:
notifyUsers: ''
instructions: |
Go to https://github.com/Azure/azure-functions-host/pulls and finish the host v4 PR.
- If the content misses something, checkout "python/x.y.z" from remote and make new commits to it.
\ No newline at end of file
+ If the content misses something, checkout "python/x.y.z" from remote and make new commits to it.
diff --git a/eng/templates/shared/build-steps.yml b/eng/templates/shared/build-steps.yml
new file mode 100644
index 000000000..4ecd148c3
--- /dev/null
+++ b/eng/templates/shared/build-steps.yml
@@ -0,0 +1,23 @@
+parameters:
+ PYTHON_VERSION: ''
+ PROJECT_NAME: ''
+ PROJECT_DIRECTORY: ''
+
+steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: ${{ parameters.PYTHON_VERSION }}
+ - bash: |
+ python --version
+ displayName: 'Check python version'
+ - bash: |
+ python -m pip install --upgrade pip
+ python -m pip install build
+ cd ${{ parameters.PROJECT_DIRECTORY }}
+ python -m build
+ displayName: 'Build Python ${{ parameters.PROJECT_NAME }}'
+ - bash: |
+ pip install pip-audit
+ cd ${{ parameters.PROJECT_DIRECTORY }}
+ pip-audit -r requirements.txt
+ displayName: 'Run vulnerability scan'
\ No newline at end of file
diff --git a/eng/templates/shared/github-release-branch.yml b/eng/templates/shared/github-release-branch.yml
new file mode 100644
index 000000000..d77b6dc50
--- /dev/null
+++ b/eng/templates/shared/github-release-branch.yml
@@ -0,0 +1,60 @@
+parameters:
+ PROJECT_DIRECTORY: ''
+ PROJECT_NAME: ''
+ BRANCH_NAME: ''
+
+steps:
+ - powershell: |
+ $githubToken = "$(GithubPat)"
+ $newWorkerVersion = "$(NewWorkerVersion)"
+ $versionFile = "${{ parameters.PROJECT_NAME}}/version.py"
+ $newBranch = "${{ parameters.BRANCH_NAME}}/$newWorkerVersion"
+
+ if ($newWorkerVersion -match '^(\d+)\.(\d+)\.(\d+)([a-zA-Z0-9\-\.]*)?$') {
+ # Create GitHub credential
+ git config --global user.name "AzureFunctionsPython"
+ git config --global user.email "azfunc@microsoft.com"
+ $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}"))
+
+ # Heading to Artifact Repository
+ Write-Host "Operating based on $stagingDirectory/azure-functions-python-worker"
+ git checkout -b "$newBranch"
+ cd ${{ parameters.PROJECT_DIRECTORY}}
+
+ # Change ${{ parameters.PROJECT_NAME}}/version.py version
+ Write-Host "Change version number in version.py to $newWorkerVersion"
+ ((Get-Content $versionFile) -replace "VERSION = '(\d+).(\d+).*'", "VERSION = '$newWorkerVersion'" -join "`n") + "`n" | Set-Content -NoNewline $versionFile
+ git add $versionFile
+ git commit -m "build: update ${{ parameters.PROJECT_NAME}} version to $newWorkerVersion"
+
+ # Create release branch ${{ parameters.BRANCH_NAME}}/X.Y.Z
+ Write-Host "Creating release branch $newBranch"
+ git push --repo="https://$githubToken@github.com/Azure/azure-functions-python-worker.git"
+
+ # Create PR
+ Write-Host "Creating PR draft in GitHub"
+ $body = @{
+ head = "$newBranch"
+ base = "dev"
+ title = "build: update $($parameters.PROJECT_NAME) version to $newWorkerVersion"
+ body = "Version $newWorkerVersion"
+ draft = $true
+ maintainer_can_modify = $true
+ } | ConvertTo-Json -Compress
+
+ $headers = @{
+ "Authorization" = "Basic $credential"
+ "Content-Type" = "application/json"
+ "Accept" = "application/vnd.github.v3+json"
+ "User-Agent" = "AzureDevOpsPipeline"
+ }
+
+ $response = Invoke-WebRequest -Headers $headers -Method Post -Body $body -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/pulls"
+
+ $draftUrl = $response | ConvertFrom-Json | Select -expand url
+ Write-Host "PR draft created in $draftUrl"
+ } else {
+ Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)"
+ exit -1
+ }
+ displayName: 'Push ${{ parameters.BRANCH_NAME}}/x.y.z'
diff --git a/eng/templates/shared/github-release-note.yml b/eng/templates/shared/github-release-note.yml
new file mode 100644
index 000000000..f15acd8d2
--- /dev/null
+++ b/eng/templates/shared/github-release-note.yml
@@ -0,0 +1,49 @@
+parameters:
+ BRANCH_NAME: ''
+ PROJECT_NAME: ''
+
+steps:
+ - powershell: |
+ $githubToken = "$(GithubPat)"
+ $newWorkerVersion = "$(NewWorkerVersion)"
+
+ # Create GitHub credential
+ git config --global user.name "AzureFunctionsPython"
+ git config --global user.email "azfunc@microsoft.com"
+
+ # Clone Repository
+ git clone https://$githubToken@github.com/Azure/azure-functions-python-worker
+ Write-Host "Cloned azure-functions-python-worker into local"
+ Set-Location "azure-functions-python-worker"
+ git checkout "origin/${{ parameters.BRANCH_NAME}}/$newWorkerVersion"
+
+ # Create release tag X.Y.Z
+ Write-Host "Creating release tag ${{ parameters.PROJECT_NAME}}-$newWorkerVersion"
+ git tag -a "${{ parameters.PROJECT_NAME}}-$newWorkerVersion" -m "$newWorkerVersion"
+
+ # Push tag to remote
+
+ displayName: 'Create and push release tag x.y.z'
+ - powershell: |
+ $githubUser = "$(GithubUser)"
+ $githubToken = "$(GithubPat)"
+ $newWorkerVersion = "$(NewWorkerVersion)"
+
+ # Create GitHub credential
+ $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}"))
+
+ # Create Release Note
+ Write-Host "Creating release note in GitHub"
+ $body = (@{tag_name="$newWorkerVersion";name="Release $newWorkerVersion";body="- Fill in Release Note Here";draft=$true} | ConvertTo-Json -Compress)
+ $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/releases"
+
+ # Return Value
+ if ($response.StatusCode -ne 201) {
+ Write-Host "Failed to create release note in GitHub"
+ exit -1
+ }
+
+ $draftUrl = $response | ConvertFrom-Json | Select -expand url
+ Write-Host "Release draft created in $draftUrl"
+
+ displayName: 'Create GitHub release draft'
diff --git a/workers/proxy_worker/dispatcher.py b/workers/proxy_worker/dispatcher.py
index 257b68b14..d92a396f8 100644
--- a/workers/proxy_worker/dispatcher.py
+++ b/workers/proxy_worker/dispatcher.py
@@ -243,7 +243,7 @@ def gen(resp_queue):
async def _dispatch_grpc_request(self, request):
content_type = request.WhichOneof("content")
- match content_type:
+ match content_type: # noqa
case "worker_init_request":
request_handler = self._handle__worker_init_request
case "function_environment_reload_request":
@@ -409,13 +409,17 @@ async def _handle__worker_init_request(self, request):
self.request_id)
if DependencyManager.is_in_linux_consumption():
- import azure_functions_worker_v2
+ import azure_functions_worker_v2 # NoQA
if DependencyManager.should_load_cx_dependencies():
DependencyManager.prioritize_customer_dependencies()
directory = request.worker_init_request.function_app_directory
self.reload_library_worker(directory)
+ logger.info('Using library: %s, '
+ 'library version: %s',
+ _library_worker,
+ _library_worker.version.VERSION) # type: ignore[union-attr]
init_request = WorkerRequest(name="WorkerInitRequest",
request=request,
@@ -442,6 +446,10 @@ async def _handle__function_environment_reload_request(self, request):
DependencyManager.prioritize_customer_dependencies(directory)
self.reload_library_worker(directory)
+ logger.info('Using library: %s, '
+ 'library version: %s',
+ _library_worker,
+ _library_worker.version.VERSION) # type: ignore[union-attr]
env_reload_request = WorkerRequest(name="FunctionEnvironmentReloadRequest",
request=request,
@@ -485,7 +493,7 @@ async def _handle__function_load_request(self, request):
function_name = function_metadata.name
logger.info(
- 'Received WorkerLoadRequest, request ID %s, function_id: %s,'
+ 'Received WorkerLoadRequest, request ID %s, function_id: %s, '
'function_name: %s, worker_id: %s',
self.request_id, function_id, function_name, self.worker_id)
@@ -504,7 +512,7 @@ async def _handle__invocation_request(self, request):
function_id = invoc_request.function_id
logger.info(
- 'Received FunctionInvocationRequest, request ID %s, function_id: %s,'
+ 'Received FunctionInvocationRequest, request ID %s, function_id: %s, '
'invocation_id: %s, worker_id: %s',
self.request_id, function_id, invocation_id, self.worker_id)
diff --git a/workers/proxy_worker/start_worker.py b/workers/proxy_worker/start_worker.py
index d468cef69..001e54f95 100644
--- a/workers/proxy_worker/start_worker.py
+++ b/workers/proxy_worker/start_worker.py
@@ -7,6 +7,7 @@
_GRPC_CONNECTION_TIMEOUT = 5.0
+
def parse_args():
parser = argparse.ArgumentParser(
description='Python Azure Functions Worker')
diff --git a/workers/proxy_worker/utils/constants.py b/workers/proxy_worker/utils/constants.py
index c5e0dd2ab..53c91062a 100644
--- a/workers/proxy_worker/utils/constants.py
+++ b/workers/proxy_worker/utils/constants.py
@@ -12,4 +12,3 @@
# new programming model default script file name
PYTHON_SCRIPT_FILE_NAME = "PYTHON_SCRIPT_FILE_NAME"
PYTHON_SCRIPT_FILE_NAME_DEFAULT = "function_app.py"
-
diff --git a/workers/proxy_worker/utils/dependency.py b/workers/proxy_worker/utils/dependency.py
index b5b07dbd5..afa4e1296 100644
--- a/workers/proxy_worker/utils/dependency.py
+++ b/workers/proxy_worker/utils/dependency.py
@@ -134,21 +134,18 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None):
if not cx_deps_path:
cx_deps_path = cls.cx_deps_path
- logger.info(
- 'Applying prioritize_customer_dependencies: '
- 'worker_dependencies_path: %s, customer_dependencies_path: %s, '
- 'working_directory: %s, Linux Consumption: %s, Placeholder: %s, '
- 'sys.path: %s',
- cls.worker_deps_path, cx_deps_path, working_directory,
- DependencyManager.is_in_linux_consumption(),
- is_envvar_true("WEBSITE_PLACEHOLDER_MODE"), sys.path)
-
cls._remove_from_sys_path(cls.worker_deps_path)
cls._add_to_sys_path(cls.worker_deps_path, True)
cls._add_to_sys_path(cls.cx_deps_path, True)
cls._add_to_sys_path(working_directory, False)
- logger.info(f'Finished prioritize_customer_dependencies: {sys.path}')
+ logger.info(
+ 'Finished prioritize_customer_dependencies: '
+ 'worker_dependencies_path: %s, customer_dependencies_path: %s, '
+ 'working_directory: %s, Placeholder: %s, '
+ 'sys.path: %s',
+ cls.worker_deps_path, cx_deps_path, working_directory,
+ is_envvar_true("WEBSITE_PLACEHOLDER_MODE"), sys.path)
@classmethod
def _add_to_sys_path(cls, path: str, add_to_first: bool):
diff --git a/workers/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py b/workers/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py
index 7e9b97e0d..709fd6ca1 100644
--- a/workers/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py
+++ b/workers/tests/endtoend/dependency_isolation_functions/report_dependencies/__init__.py
@@ -1,7 +1,6 @@
import json
import os
import sys
-import logging
import azure.functions as func
import google.protobuf as proto
diff --git a/workers/tests/unittest_proxy/test_dependency.py b/workers/tests/unittest_proxy/test_dependency.py
index cea4c5af0..27d4227bf 100644
--- a/workers/tests/unittest_proxy/test_dependency.py
+++ b/workers/tests/unittest_proxy/test_dependency.py
@@ -53,12 +53,6 @@ def test_prioritize_customer_dependencies(mock_logger, mock_env, mock_linux,
expected_path = os.path.abspath("/override/cx")
assert expected_path in sys.path
- # Relaxed log validation: look for matching prefix
- assert any(
- "Applying prioritize_customer_dependencies" in str(call[0][0])
- for call in mock_logger.info.call_args_list
- )
-
assert any(
"Finished prioritize_customer_dependencies" in str(call[0][0])
for call in mock_logger.info.call_args_list
diff --git a/workers/tests/unittest_proxy/test_dispatcher.py b/workers/tests/unittest_proxy/test_dispatcher.py
index 22c38fa0a..e8b6cad9c 100644
--- a/workers/tests/unittest_proxy/test_dispatcher.py
+++ b/workers/tests/unittest_proxy/test_dispatcher.py
@@ -79,6 +79,8 @@ def fake_import(name, globals=None, locals=None, fromlist=(), level=0):
mock_module.worker_init_request = AsyncMock(return_value="fake_response")
mock_module.function_environment_reload_request = AsyncMock(
return_value="mocked_env_reload_response")
+ mock_module.version = AsyncMock(return_value="fake_response")
+ mock_module.version.VERSION = AsyncMock(return_value="1.0.0")
if name in ["azure_functions_worker_v2", "azure_functions_worker_v1"]:
return mock_module
return builtins.__import__(name, globals, locals, fromlist, level)
@@ -223,8 +225,8 @@ async def test_handle_function_load_request(mock_logger, mock_streaming):
assert result == "mocked_stream_response"
mock_logger.info.assert_called_with(
- 'Received WorkerLoadRequest, request ID %s, function_id: %s,function_name: %s, '
- 'worker_id: %s', "req789", "func123", "hello_function", "worker123"
+ 'Received WorkerLoadRequest, request ID %s, function_id: %s, function_name: %s,'
+ ' worker_id: %s', "req789", "func123", "hello_function", "worker123"
)
@@ -248,7 +250,7 @@ async def test_handle_invocation_request(mock_logger, mock_streaming):
assert result == "mocked_streaming_response"
mock_logger.info.assert_called_with(
- 'Received FunctionInvocationRequest, request ID %s, function_id: %s,'
+ 'Received FunctionInvocationRequest, request ID %s, function_id: %s, '
'invocation_id: %s, worker_id: %s',
"req789", "func123", "inv123", "worker123"
)
diff --git a/workers/tests/unittests/test_code_quality.py b/workers/tests/unittests/test_code_quality.py
index 499ed577d..d3ab25fa1 100644
--- a/workers/tests/unittests/test_code_quality.py
+++ b/workers/tests/unittests/test_code_quality.py
@@ -5,7 +5,7 @@
import sys
import unittest
-ROOT_PATH = pathlib.Path(__file__).parent.parent.parent
+ROOT_PATH = pathlib.Path(__file__).parent.parent.parent.parent
class TestCodeQuality(unittest.TestCase):
@@ -17,7 +17,7 @@ def test_mypy(self):
try:
subprocess.run(
- [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker'],
+ [sys.executable, '-m', 'mypy', '-m', 'workers'],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
@@ -44,7 +44,7 @@ def test_flake8(self):
try:
subprocess.run(
[sys.executable, '-m', 'flake8', '--config', str(config_path),
- 'azure_functions_worker',],
+ 'workers'],
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,