From 1ae733c9b9e34f8571b2fa3d812511e7af0d9c62 Mon Sep 17 00:00:00 2001 From: Evan <66287338+EvanR-Dev@users.noreply.github.com> Date: Mon, 31 Mar 2025 10:02:19 -0700 Subject: [PATCH 01/11] build: recognize collection_model_binding_data for batch inputs (#1655) * add cmbd * Add * Add * Rm newline * Add tests * Fix cmbd * Fix test * Lint * Rm * Rm * Add back newline * rm ws * Rm list * Rm cmbd from cache * Avoid caching * Keep cmbd check * Add comment * Lint --------- Co-authored-by: Evan Roman Co-authored-by: hallvictoria <59299039+hallvictoria@users.noreply.github.com> --- azure_functions_worker/bindings/datumdef.py | 2 ++ azure_functions_worker/bindings/meta.py | 9 ++++++++ .../test_deferred_bindings.py | 2 +- tests/unittests/test_types.py | 23 +++++++++++++++++++ 4 files changed, 35 insertions(+), 1 deletion(-) diff --git a/azure_functions_worker/bindings/datumdef.py b/azure_functions_worker/bindings/datumdef.py index 34fb9b0af..0e2a9c538 100644 --- a/azure_functions_worker/bindings/datumdef.py +++ b/azure_functions_worker/bindings/datumdef.py @@ -102,6 +102,8 @@ def from_typed_data(cls, td: protos.TypedData): val = td.collection_sint64 elif tt == 'model_binding_data': val = td.model_binding_data + elif tt == 'collection_model_binding_data': + val = td.collection_model_binding_data elif tt is None: return None else: diff --git a/azure_functions_worker/bindings/meta.py b/azure_functions_worker/bindings/meta.py index ae40ce398..5d84a06f5 100644 --- a/azure_functions_worker/bindings/meta.py +++ b/azure_functions_worker/bindings/meta.py @@ -300,6 +300,15 @@ def deferred_bindings_decode(binding: typing.Any, """ global deferred_bindings_cache + # Only applies to Event Hub and Service Bus - cannot cache + # These types will always produce different content and are not clients + if (datum.type == "collection_model_binding_data" + or datum.value.source == "AzureEventHubsEventData" + or datum.value.source == "AzureServiceBusReceivedMessage"): + return binding.decode(datum, + trigger_metadata=metadata, + pytype=pytype) + if deferred_bindings_cache.get((pb.name, pytype, datum.value.content, diff --git a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py index 1899f9e75..c527cb680 100644 --- a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py +++ b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py @@ -137,7 +137,7 @@ async def test_deferred_bindings_dual_enabled_log(self): "is only supported for 3.9+.") class TestDeferredBindingsHelpers(testutils.AsyncTestCase): - def test_deferred_bindings_enabled_decode(self): + def test_mbd_deferred_bindings_enabled_decode(self): binding = BlobClientConverter pb = protos.ParameterBinding(name='test', data=protos.TypedData( diff --git a/tests/unittests/test_types.py b/tests/unittests/test_types.py index 963f26914..406510c22 100644 --- a/tests/unittests/test_types.py +++ b/tests/unittests/test_types.py @@ -194,3 +194,26 @@ def test_model_binding_data_td_ok(self): mbd_datum = datumdef.Datum.from_typed_data(mock_mbd) self.assertEqual(mbd_datum.type, 'model_binding_data') + + def test_collection_model_binding_data_datum_ok(self): + sample_mbd = MockMBD(version="1.0", + source="AzureStorageBlobs", + content_type="application/json", + content="{\"Connection\":\"python-worker-tests\"," + "\"ContainerName\":\"test-blob\"," + "\"BlobName\":\"test.txt\"}") + sample_cmbd = [sample_mbd, sample_mbd] + + datum: bind_meta.Datum = bind_meta.Datum(value=sample_cmbd, + type='collection_model_binding_data') + + self.assertEqual(datum.value, sample_cmbd) + self.assertEqual(datum.type, "collection_model_binding_data") + + def test_collection_model_binding_data_td_ok(self): + mock_cmbd = protos.TypedData( + collection_model_binding_data={'model_binding_data': [{'version': '1.0'}]} + ) + cmbd_datum = datumdef.Datum.from_typed_data(mock_cmbd) + + self.assertEqual(cmbd_datum.type, 'collection_model_binding_data') From 9c34b223bc7fc3fba04480b84089a4fb42ae2454 Mon Sep 17 00:00:00 2001 From: hallvictoria <59299039+hallvictoria@users.noreply.github.com> Date: Mon, 31 Mar 2025 13:00:08 -0500 Subject: [PATCH 02/11] build: update Python Worker Version to 4.36.1 (#1660) Co-authored-by: AzureFunctionsPython --- azure_functions_worker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/azure_functions_worker/version.py b/azure_functions_worker/version.py index d91e24c1f..d60343328 100644 --- a/azure_functions_worker/version.py +++ b/azure_functions_worker/version.py @@ -1,4 +1,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -VERSION = '4.35.0' +VERSION = '4.36.1' From 004e12525bc2a831228f30845bb601b7b42936f9 Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Wed, 9 Apr 2025 13:32:16 -0500 Subject: [PATCH 03/11] initial changes --- azure_functions_worker/bindings/datumdef.py | 2 - azure_functions_worker/bindings/meta.py | 9 - azure_functions_worker/constants.py | 14 +- azure_functions_worker/dispatcher.py | 41 +- azure_functions_worker/protos/.gitignore | 1 - .../utils/app_setting_manager.py | 4 +- azure_functions_worker/version.py | 2 +- eng/ci/emulator-tests.yml | 1 + eng/ci/integration-tests.yml | 9 +- eng/ci/official-build.yml | 53 +- eng/ci/public-build.yml | 2 - eng/scripts/test-setup.sh | 2 +- eng/templates/jobs/build.yml | 4 +- eng/templates/jobs/ci-emulator-tests.yml | 290 +++++-- .../official/jobs/build-artifacts.yml | 169 +--- eng/templates/official/jobs/ci-e2e-tests.yml | 51 +- .../official/jobs/publish-release.yml | 10 +- ...oft.Azure.Functions.V4.PythonWorker.nuspec | 31 +- pack/scripts/mac_arm64_deps.sh | 8 +- pack/scripts/nix_deps.sh | 8 +- pack/scripts/win_deps.ps1 | 16 +- pack/templates/macos_64_env_gen.yml | 28 + pack/templates/nix_env_gen.yml | 28 + pack/templates/win_env_gen.yml | 26 + proxy_worker/__init__.py | 0 proxy_worker/__main__.py | 6 + proxy_worker/dispatcher.py | 536 +++++++++++++ proxy_worker/logging.py | 92 +++ proxy_worker/protos/.gitignore | 2 + proxy_worker/protos/__init__.py | 43 ++ proxy_worker/protos/_src/.gitignore | 288 +++++++ proxy_worker/protos/_src/LICENSE | 21 + proxy_worker/protos/_src/README.md | 98 +++ .../protos/_src/src/proto/FunctionRpc.proto | 730 ++++++++++++++++++ .../proto/identity/ClaimsIdentityRpc.proto | 26 + .../_src/src/proto/shared/NullableTypes.proto | 30 + proxy_worker/protos/identity/__init__.py | 0 proxy_worker/protos/shared/__init__.py | 0 proxy_worker/start_worker.py | 84 ++ proxy_worker/utils/__init__.py | 0 proxy_worker/utils/app_settings.py | 72 ++ proxy_worker/utils/common.py | 29 + proxy_worker/utils/constants.py | 11 + proxy_worker/utils/dependency.py | 333 ++++++++ proxy_worker/version.py | 1 + pyproject.toml | 23 +- python/prodV4/worker.config.json | 7 +- python/proxyV4/worker.py | 67 ++ python/test/worker.config.json | 7 +- python/test/worker.py | 58 +- tests/endtoend/test_blueprint_functions.py | 6 + .../test_deferred_bindings.py | 2 +- .../http_v2_tests/test_http_v2.py | 6 +- tests/test_setup.py | 15 +- tests/unittests/test_opentelemetry.py | 125 +-- tests/unittests/test_types.py | 23 - tests/utils/testutils.py | 288 +++---- 57 files changed, 3129 insertions(+), 709 deletions(-) create mode 100644 proxy_worker/__init__.py create mode 100644 proxy_worker/__main__.py create mode 100644 proxy_worker/dispatcher.py create mode 100644 proxy_worker/logging.py create mode 100644 proxy_worker/protos/.gitignore create mode 100644 proxy_worker/protos/__init__.py create mode 100644 proxy_worker/protos/_src/.gitignore create mode 100644 proxy_worker/protos/_src/LICENSE create mode 100644 proxy_worker/protos/_src/README.md create mode 100644 proxy_worker/protos/_src/src/proto/FunctionRpc.proto create mode 100644 proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto create mode 100644 proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto create mode 100644 proxy_worker/protos/identity/__init__.py create mode 100644 proxy_worker/protos/shared/__init__.py create mode 100644 proxy_worker/start_worker.py create mode 100644 proxy_worker/utils/__init__.py create mode 100644 proxy_worker/utils/app_settings.py create mode 100644 proxy_worker/utils/common.py create mode 100644 proxy_worker/utils/constants.py create mode 100644 proxy_worker/utils/dependency.py create mode 100644 proxy_worker/version.py create mode 100644 python/proxyV4/worker.py diff --git a/azure_functions_worker/bindings/datumdef.py b/azure_functions_worker/bindings/datumdef.py index 0e2a9c538..34fb9b0af 100644 --- a/azure_functions_worker/bindings/datumdef.py +++ b/azure_functions_worker/bindings/datumdef.py @@ -102,8 +102,6 @@ def from_typed_data(cls, td: protos.TypedData): val = td.collection_sint64 elif tt == 'model_binding_data': val = td.model_binding_data - elif tt == 'collection_model_binding_data': - val = td.collection_model_binding_data elif tt is None: return None else: diff --git a/azure_functions_worker/bindings/meta.py b/azure_functions_worker/bindings/meta.py index 5d84a06f5..ae40ce398 100644 --- a/azure_functions_worker/bindings/meta.py +++ b/azure_functions_worker/bindings/meta.py @@ -300,15 +300,6 @@ def deferred_bindings_decode(binding: typing.Any, """ global deferred_bindings_cache - # Only applies to Event Hub and Service Bus - cannot cache - # These types will always produce different content and are not clients - if (datum.type == "collection_model_binding_data" - or datum.value.source == "AzureEventHubsEventData" - or datum.value.source == "AzureServiceBusReceivedMessage"): - return binding.decode(datum, - trigger_metadata=metadata, - pytype=pytype) - if deferred_bindings_cache.get((pb.name, pytype, datum.value.content, diff --git a/azure_functions_worker/constants.py b/azure_functions_worker/constants.py index 6110752e2..b916252cf 100644 --- a/azure_functions_worker/constants.py +++ b/azure_functions_worker/constants.py @@ -82,18 +82,14 @@ BASE_EXT_SUPPORTED_PY_MINOR_VERSION = 8 # Appsetting to turn on OpenTelemetry support/features -# A value of "true" enables the setting +# Includes turning on Azure monitor distro to send telemetry to AppInsights PYTHON_ENABLE_OPENTELEMETRY = "PYTHON_ENABLE_OPENTELEMETRY" - -# Appsetting to turn on ApplicationInsights support/features -# A value of "true" enables the setting -PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY = \ - "PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY" +PYTHON_ENABLE_OPENTELEMETRY_DEFAULT = False # Appsetting to specify root logger name of logger to collect telemetry for -# Used by Azure monitor distro (Application Insights) -PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME = "PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME" -PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT = "" +# Used by Azure monitor distro +PYTHON_AZURE_MONITOR_LOGGER_NAME = "PYTHON_AZURE_MONITOR_LOGGER_NAME" +PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT = "" # Appsetting to specify AppInsights connection string APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" diff --git a/azure_functions_worker/dispatcher.py b/azure_functions_worker/dispatcher.py index b815bef1c..897a3499a 100644 --- a/azure_functions_worker/dispatcher.py +++ b/azure_functions_worker/dispatcher.py @@ -26,12 +26,12 @@ APPLICATIONINSIGHTS_CONNECTION_STRING, HTTP_URI, METADATA_PROPERTIES_WORKER_INDEXED, - PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME, - PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT, + PYTHON_AZURE_MONITOR_LOGGER_NAME, + PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT, PYTHON_ENABLE_DEBUG_LOGGING, PYTHON_ENABLE_INIT_INDEXING, - PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY, PYTHON_ENABLE_OPENTELEMETRY, + PYTHON_ENABLE_OPENTELEMETRY_DEFAULT, PYTHON_LANGUAGE_RUNTIME, PYTHON_ROLLBACK_CWD_PATH, PYTHON_SCRIPT_FILE_NAME, @@ -103,10 +103,8 @@ def __init__(self, loop: BaseEventLoop, host: str, port: int, self._function_metadata_result = None self._function_metadata_exception = None - # Used for checking if appinsights is enabled - self._azure_monitor_available = False # Used for checking if open telemetry is enabled - self._otel_libs_available = False + self._azure_monitor_available = False self._context_api = None self._trace_context_propagator = None @@ -320,8 +318,8 @@ def initialize_azure_monitor(self): setting=APPLICATIONINSIGHTS_CONNECTION_STRING ), logger_name=get_app_setting( - setting=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME, - default_value=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT + setting=PYTHON_AZURE_MONITOR_LOGGER_NAME, + default_value=PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT ), ) self._azure_monitor_available = True @@ -383,15 +381,12 @@ async def _handle__worker_init_request(self, request): constants.RPC_HTTP_TRIGGER_METADATA_REMOVED: _TRUE, constants.SHARED_MEMORY_DATA_TRANSFER: _TRUE, } - - if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY): + if get_app_setting(setting=PYTHON_ENABLE_OPENTELEMETRY, + default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): self.initialize_azure_monitor() - if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY): - self._otel_libs_available = True - - if self._azure_monitor_available or self._otel_libs_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = _TRUE + if self._azure_monitor_available: + capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = _TRUE if DependencyManager.should_load_cx_dependencies(): DependencyManager.prioritize_customer_dependencies() @@ -667,7 +662,7 @@ async def _handle__invocation_request(self, request): args[name] = bindings.Out() if fi.is_async: - if self._azure_monitor_available or self._otel_libs_available: + if self._azure_monitor_available: self.configure_opentelemetry(fi_context) call_result = \ @@ -784,14 +779,14 @@ async def _handle__function_environment_reload_request(self, request): bindings.load_binding_registry() capabilities = {} - if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY): - self._otel_libs_available = True - if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY): + if get_app_setting( + setting=PYTHON_ENABLE_OPENTELEMETRY, + default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): self.initialize_azure_monitor() - if self._azure_monitor_available or self._otel_libs_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = ( - _TRUE) + if self._azure_monitor_available: + capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = ( + _TRUE) if is_envvar_true(PYTHON_ENABLE_INIT_INDEXING): try: @@ -1001,7 +996,7 @@ def _run_sync_func(self, invocation_id, context, func, params): # invocation_id from ThreadPoolExecutor's threads. context.thread_local_storage.invocation_id = invocation_id try: - if self._azure_monitor_available or self._otel_libs_available: + if self._azure_monitor_available: self.configure_opentelemetry(context) return ExtensionManager.get_sync_invocation_wrapper(context, func)(params) diff --git a/azure_functions_worker/protos/.gitignore b/azure_functions_worker/protos/.gitignore index f43e6c214..49d7060ef 100644 --- a/azure_functions_worker/protos/.gitignore +++ b/azure_functions_worker/protos/.gitignore @@ -1,3 +1,2 @@ -/_src *_pb2.py *_pb2_grpc.py diff --git a/azure_functions_worker/utils/app_setting_manager.py b/azure_functions_worker/utils/app_setting_manager.py index ee43ccd62..3d8ccbb45 100644 --- a/azure_functions_worker/utils/app_setting_manager.py +++ b/azure_functions_worker/utils/app_setting_manager.py @@ -7,7 +7,6 @@ FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, PYTHON_ENABLE_DEBUG_LOGGING, PYTHON_ENABLE_INIT_INDEXING, - PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY, PYTHON_ENABLE_OPENTELEMETRY, PYTHON_ENABLE_WORKER_EXTENSIONS, PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT, @@ -30,8 +29,7 @@ def get_python_appsetting_state(): FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, PYTHON_SCRIPT_FILE_NAME, PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_OPENTELEMETRY, - PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY] + PYTHON_ENABLE_OPENTELEMETRY] app_setting_states = "".join( f"{app_setting}: {current_vars[app_setting]} | " diff --git a/azure_functions_worker/version.py b/azure_functions_worker/version.py index d60343328..adb421530 100644 --- a/azure_functions_worker/version.py +++ b/azure_functions_worker/version.py @@ -1,4 +1,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -VERSION = '4.36.1' +VERSION = '4.34.0' diff --git a/eng/ci/emulator-tests.yml b/eng/ci/emulator-tests.yml index b2e789c16..c6ee3a318 100644 --- a/eng/ci/emulator-tests.yml +++ b/eng/ci/emulator-tests.yml @@ -34,6 +34,7 @@ variables: - template: /ci/variables/build.yml@eng - template: /ci/variables/cfs.yml@eng - template: /eng/templates/utils/variables.yml@self + - template: /eng/templates/utils/official-variables.yml@self extends: template: v1/1ES.Unofficial.PipelineTemplate.yml@1es diff --git a/eng/ci/integration-tests.yml b/eng/ci/integration-tests.yml index 40594ab27..6f8f69d9e 100644 --- a/eng/ci/integration-tests.yml +++ b/eng/ci/integration-tests.yml @@ -47,8 +47,7 @@ extends: dependsOn: [] jobs: - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self -# Skipping consumption tests till pipeline is fixed -# - stage: RunLCTests -# dependsOn: [] -# jobs: -# - template: /eng/templates/official/jobs/ci-lc-tests.yml@self + - stage: RunLCTests + dependsOn: [] + jobs: + - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml index 7a555e8a4..34d8b6c07 100644 --- a/eng/ci/official-build.yml +++ b/eng/ci/official-build.yml @@ -50,30 +50,29 @@ extends: jobs: - template: /eng/templates/official/jobs/build-artifacts.yml@self - - stage: RunE2ETests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self - - stage: RunEmulatorTests - dependsOn: Build - jobs: - - template: /eng/templates/jobs/ci-emulator-tests.yml@self - parameters: - PoolName: 1es-pool-azfunc - - stage: RunUnitTests - dependsOn: Build - jobs: - - template: /eng/templates/jobs/ci-unit-tests.yml@self - - stage: RunDockerConsumptionTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self - - stage: RunDockerDedicatedTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self -# Skipping consumption tests till pipeline is fixed -# - stage: RunLinuxConsumptionTests -# dependsOn: Build -# jobs: -# - template: /eng/templates/official/jobs/ci-lc-tests.yml@self + # - stage: RunE2ETests + # dependsOn: Build + # jobs: + # - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self + # - stage: RunEmulatorTests + # dependsOn: Build + # jobs: + # - template: /eng/templates/jobs/ci-emulator-tests.yml@self + # parameters: + # PoolName: 1es-pool-azfunc + # - stage: RunUnitTests + # dependsOn: Build + # jobs: + # - template: /eng/templates/jobs/ci-unit-tests.yml@self + # - stage: RunDockerConsumptionTests + # dependsOn: Build + # jobs: + # - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self + # - stage: RunDockerDedicatedTests + # dependsOn: Build + # jobs: + # - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self + # - stage: RunLinuxConsumptionTests + # dependsOn: Build + # jobs: + # - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml index 470a94f9c..a8456b721 100644 --- a/eng/ci/public-build.yml +++ b/eng/ci/public-build.yml @@ -52,8 +52,6 @@ extends: - stage: Build jobs: - template: /eng/templates/jobs/build.yml@self - # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - stage: RunUnitTests dependsOn: Build jobs: diff --git a/eng/scripts/test-setup.sh b/eng/scripts/test-setup.sh index d062021dc..cac37c96a 100644 --- a/eng/scripts/test-setup.sh +++ b/eng/scripts/test-setup.sh @@ -2,5 +2,5 @@ cd tests python -m invoke -c test_setup build-protos -python -m invoke -c test_setup webhost --branch-name=dev +python -m invoke -c test_setup webhost --branch-name=gaaguiar/test_py_worker python -m invoke -c test_setup extensions \ No newline at end of file diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml index c5b989c7c..dd422f4fa 100644 --- a/eng/templates/jobs/build.yml +++ b/eng/templates/jobs/build.yml @@ -20,7 +20,9 @@ jobs: python -m pip install --upgrade pip python -m pip install . displayName: 'Build python worker' + # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version + condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - bash: | pip install pip-audit pip-audit -r requirements.txt - displayName: 'Run vulnerability scan' + displayName: 'Run vulnerability scan' \ No newline at end of file diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml index abc84f394..83a9a58db 100644 --- a/eng/templates/jobs/ci-emulator-tests.yml +++ b/eng/templates/jobs/ci-emulator-tests.yml @@ -1,26 +1,23 @@ jobs: - job: "TestPython" - displayName: "Run Python Emulator Tests" + displayName: "Run Python E2E Tests" pool: - name: ${{ parameters.PoolName }} + name: 1es-pool-azfunc image: 1es-ubuntu-22.04 os: linux strategy: matrix: - Python37: - PYTHON_VERSION: '3.7' - Python38: - PYTHON_VERSION: '3.8' - Python39: - PYTHON_VERSION: '3.9' - Python310: - PYTHON_VERSION: '3.10' - Python311: - PYTHON_VERSION: '3.11' Python312: - PYTHON_VERSION: '3.12' + PYTHON_VERSION: '3.13' + STORAGE_CONNECTION: $(LinuxStorageConnectionString312) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312) + SQL_CONNECTION: $(LinuxSqlConnectionString312) + EVENTGRID_URI: $(LinuxEventGridTopicUriString312) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) steps: - task: UsePythonVersion@0 inputs: @@ -32,69 +29,228 @@ jobs: - bash: | chmod +x eng/scripts/install-dependencies.sh chmod +x eng/scripts/test-setup.sh - + eng/scripts/install-dependencies.sh $(PYTHON_VERSION) eng/scripts/test-setup.sh displayName: 'Install dependencies and the worker' condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python SDK Artifact' - inputs: - buildType: specific - artifactName: 'azure-functions' - project: 'internal' - definition: 679 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonSdkArtifact' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - bash: | - chmod +x eng/scripts/test-sdk.sh - chmod +x eng/scripts/test-setup.sh - - eng/scripts/test-sdk.sh $(Pipeline.Workspace) $(PYTHON_VERSION) - eng/scripts/test-setup.sh - displayName: 'Install test python sdk, dependencies and the worker' - condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python Extension Artifact' - inputs: - buildType: specific - artifactName: $(PYTHONEXTENSIONNAME) - project: 'internal' - definition: 798 - buildVersionToDownload: latest - targetPath: '$(Pipeline.Workspace)/PythonExtensionArtifact' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - bash: | - chmod +x eng/scripts/test-setup.sh - chmod +x eng/scripts/test-extensions.sh - - eng/scripts/test-extensions.sh $(Pipeline.Workspace) $(PYTHON_VERSION) $(PYTHONEXTENSIONNAME) - eng/scripts/test-setup.sh - displayName: 'Install test python extension, dependencies and the worker' - condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_blueprint_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Blueprint E2E Tests" + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_cosmosdb_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python CosmosDB E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_dependency_isolation_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Dependency Isolation E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_durable_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Durable E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_eventgrid_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python EventGrid E2E Tests" + condition: always() - bash: | - docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml pull - docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml up -d - displayName: 'Install Azurite and Start EventHub Emulator' + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_file_name_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python FileName E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_http_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python HTTP E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_retry_policy_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Retry Policy E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_sql_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python SQL E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_third_party_http_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Third Party HTTP E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_threadpool_thread_count_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python ThreadPool Thread Count E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_timer_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Timer E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_warmup_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Warmup E2E Tests" + condition: always() + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_worker_process_count_functions.py + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Worker Process Count E2E Tests" + condition: always() - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --ignore=tests/emulator_tests/test_servicebus_functions.py tests/emulator_tests + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_worker_process_count_functions.py env: - AzureWebJobsStorage: "UseDevelopmentStorage=true" - AzureWebJobsEventHubConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" - displayName: "Running $(PYTHON_VERSION) Python Linux Emulator Tests" + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Worker Process Count E2E Tests" + condition: always() - bash: | - # Stop and remove EventHub Emulator container to free up the port - docker stop eventhubs-emulator - docker container rm --force eventhubs-emulator - docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml pull - docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml up -d + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/extension_tests/http_v2_tests env: - AzureWebJobsSQLPassword: $(AzureWebJobsSQLPassword) - displayName: 'Install Azurite and Start ServiceBus Emulator' + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Worker Http v2 Tests" + condition: always() - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 tests/emulator_tests/test_servicebus_functions.py + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/extension_tests/deferred_bindings_tests env: - AzureWebJobsStorage: "UseDevelopmentStorage=true" - AzureWebJobsServiceBusConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" - displayName: "Running $(PYTHON_VERSION) Python ServiceBus Linux Emulator Tests" + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + skipTest: $(skipTest) + displayName: "Running $(PYTHON_VERSION) Python Worker DB Tests" + condition: always() diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml index 631115b4c..1eabc4c1c 100644 --- a/eng/templates/official/jobs/build-artifacts.yml +++ b/eng/templates/official/jobs/build-artifacts.yml @@ -1,80 +1,4 @@ jobs: -- job: Build_WINDOWS_X64 - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_WINDOWS_X64" - steps: - - template: ../../../../pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x64' - artifactName: '$(pythonVersion)_WINDOWS_X64' -- job: Build_WINDOWS_X86 - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_WINDOWS_X86" - steps: - - template: ../../../../pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x86' - artifactName: '$(pythonVersion)_WINDOWS_x86' - job: Build_LINUX_X64 pool: name: 1es-pool-azfunc @@ -82,24 +6,9 @@ jobs: os: linux strategy: matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -112,77 +21,9 @@ jobs: pythonVersion: '$(pythonVersion)' workerPath: '$(workerPath)' artifactName: '$(pythonVersion)_LINUX_X64' -- job: Build_OSX_X64 - pool: - name: Azure Pipelines - image: macOS-latest - os: macOS - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: 'python/prodV4/worker.py' - Python38V4: - pythonVersion: '3.8' - workerPath: 'python/prodV4/worker.py' - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_OSX_X64" - steps: - - template: ../../../../pack/templates/nix_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_X64' -- job: Build_OSX_ARM64 - pool: - name: Azure Pipelines - image: macOS-latest - os: macOS - strategy: - matrix: - Python39V4: - pythonVersion: '3.9' - workerPath: 'python/prodV4/worker.py' - Python310V4: - pythonVersion: '3.10' - workerPath: 'python/prodV4/worker.py' - Python311V4: - pythonVersion: '3.11' - workerPath: 'python/prodV4/worker.py' - Python312V4: - pythonVersion: '3.12' - workerPath: 'python/prodV4/worker.py' - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "$(pythonVersion)_OSX_ARM4" - steps: - - template: ../../../../pack/templates/macos_64_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_ARM64' - job: PackageWorkers - dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64'] + dependsOn: ['Build_LINUX_X64'] templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -211,7 +52,7 @@ jobs: LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) - WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST" + WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST25" echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" fi diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml index edd898f65..b7050196a 100644 --- a/eng/templates/official/jobs/ci-e2e-tests.yml +++ b/eng/templates/official/jobs/ci-e2e-tests.yml @@ -9,53 +9,8 @@ jobs: strategy: matrix: - Python37: - PYTHON_VERSION: '3.7' - STORAGE_CONNECTION: $(LinuxStorageConnectionString37) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString37) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString37) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString37) - SQL_CONNECTION: $(LinuxSqlConnectionString37) - EVENTGRID_URI: $(LinuxEventGridTopicUriString37) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString37) - Python38: - PYTHON_VERSION: '3.8' - STORAGE_CONNECTION: $(LinuxStorageConnectionString38) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) - SQL_CONNECTION: $(LinuxSqlConnectionString38) - EVENTGRID_URI: $(LinuxEventGridTopicUriString38) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) - Python39: - PYTHON_VERSION: '3.9' - STORAGE_CONNECTION: $(LinuxStorageConnectionString39) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) - SQL_CONNECTION: $(LinuxSqlConnectionString39) - EVENTGRID_URI: $(LinuxEventGridTopicUriString39) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) - Python310: - PYTHON_VERSION: '3.10' - STORAGE_CONNECTION: $(LinuxStorageConnectionString310) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) - SQL_CONNECTION: $(LinuxSqlConnectionString310) - EVENTGRID_URI: $(LinuxEventGridTopicUriString310) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) - Python311: - PYTHON_VERSION: '3.11' - STORAGE_CONNECTION: $(LinuxStorageConnectionString311) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) - SQL_CONNECTION: $(LinuxSqlConnectionString311) - EVENTGRID_URI: $(LinuxEventGridTopicUriString311) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) - Python312: - PYTHON_VERSION: '3.12' + Python313: + PYTHON_VERSION: '3.13' STORAGE_CONNECTION: $(LinuxStorageConnectionString312) COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) @@ -135,7 +90,7 @@ jobs: displayName: 'Display skipTest variable' condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_blueprint_functions.py env: AzureWebJobsStorage: $(STORAGE_CONNECTION) AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml index 4c04d5779..0f1004898 100644 --- a/eng/templates/official/jobs/publish-release.yml +++ b/eng/templates/official/jobs/publish-release.yml @@ -15,7 +15,7 @@ jobs: if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { # Create GitHub credential git config --global user.name "AzureFunctionsPython" - git config --global user.email "azfunc@microsoft.com" + git config --global user.email "funcdisc@microsoft.com" # Heading to Artifact Repository Write-Host "Operating based on $stagingDirectory/azure-functions-python-worker" @@ -60,7 +60,7 @@ jobs: if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { # Create GitHub credential git config --global user.name "AzureFunctionsPython" - git config --global user.email "azfunc@microsoft.com" + git config --global user.email "funcdisc@microsoft.com" # Clone Repository git clone https://$githubToken@github.com/Azure/azure-functions-python-worker @@ -181,7 +181,7 @@ jobs: if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { # Create GitHub credential git config --global user.name "AzureFunctionsPython" - git config --global user.email "azfunc@microsoft.com" + git config --global user.email "funcdisc@microsoft.com" # Create GitHub credential $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) @@ -263,7 +263,7 @@ jobs: if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { # Create GitHub credential git config --global user.name "AzureFunctionsPython" - git config --global user.email "azfunc@microsoft.com" + git config --global user.email "funcdisc@microsoft.com" # Create GitHub credential $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) @@ -361,7 +361,7 @@ jobs: if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { # Create GitHub credential git config --global user.name "AzureFunctionsPython" - git config --global user.email "azfunc@microsoft.com" + git config --global user.email "funcdisc@microsoft.com" # Clone Repository git clone https://$githubToken@github.com/Azure/azure-functions-python-worker diff --git a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec index b3ce47d0c..466397e49 100644 --- a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec +++ b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec @@ -9,35 +9,8 @@ Microsoft Azure Functions Python Worker © .NET Foundation. All rights reserved. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + diff --git a/pack/scripts/mac_arm64_deps.sh b/pack/scripts/mac_arm64_deps.sh index 2d70bafad..9c08cce46 100644 --- a/pack/scripts/mac_arm64_deps.sh +++ b/pack/scripts/mac_arm64_deps.sh @@ -13,4 +13,10 @@ python -m invoke -c test_setup build-protos cd .. cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file + +version_minor=$(echo $1 | cut -d '.' -f 2) +if [[ $version_minor -lt 13 ]]; then + cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" +else + cp -r proxy_worker/protos "$BUILD_SOURCESDIRECTORY/deps/proxy_worker" +fi \ No newline at end of file diff --git a/pack/scripts/nix_deps.sh b/pack/scripts/nix_deps.sh index 2d70bafad..9c08cce46 100644 --- a/pack/scripts/nix_deps.sh +++ b/pack/scripts/nix_deps.sh @@ -13,4 +13,10 @@ python -m invoke -c test_setup build-protos cd .. cp .artifactignore "$BUILD_SOURCESDIRECTORY/deps" -cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" \ No newline at end of file + +version_minor=$(echo $1 | cut -d '.' -f 2) +if [[ $version_minor -lt 13 ]]; then + cp -r azure_functions_worker/protos "$BUILD_SOURCESDIRECTORY/deps/azure_functions_worker" +else + cp -r proxy_worker/protos "$BUILD_SOURCESDIRECTORY/deps/proxy_worker" +fi \ No newline at end of file diff --git a/pack/scripts/win_deps.ps1 b/pack/scripts/win_deps.ps1 index a7be372e7..b4c95203d 100644 --- a/pack/scripts/win_deps.ps1 +++ b/pack/scripts/win_deps.ps1 @@ -1,3 +1,9 @@ +param ( + [string]$pythonVersion +) +$versionParts = $pythonVersion -split '\.' # Splitting by dot +$versionMinor = [int]$versionParts[1] + python -m venv .env .env\Scripts\Activate.ps1 python -m pip install --upgrade pip @@ -5,7 +11,6 @@ python -m pip install --upgrade pip python -m pip install . $depsPath = Join-Path -Path $env:BUILD_SOURCESDIRECTORY -ChildPath "deps" -$protosPath = Join-Path -Path $depsPath -ChildPath "azure_functions_worker/protos" python -m pip install . azure-functions --no-compile --target $depsPath.ToString() @@ -15,4 +20,11 @@ python -m invoke -c test_setup build-protos cd .. Copy-Item -Path ".artifactignore" -Destination $depsPath.ToString() -Copy-Item -Path "azure_functions_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force + +if ($versionMinor -lt 13) { + $protosPath = Join-Path -Path $depsPath -ChildPath "azure_functions_worker/protos" + Copy-Item -Path "azure_functions_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force +} else { + $protosPath = Join-Path -Path $depsPath -ChildPath "proxy_worker/protos" + Copy-Item -Path "proxy_worker/protos/*" -Destination $protosPath.ToString() -Recurse -Force +} diff --git a/pack/templates/macos_64_env_gen.yml b/pack/templates/macos_64_env_gen.yml index 90a3578d7..3e54ab812 100644 --- a/pack/templates/macos_64_env_gen.yml +++ b/pack/templates/macos_64_env_gen.yml @@ -12,6 +12,8 @@ steps: inputs: disableAutoCwd: true scriptPath: 'pack/scripts/mac_arm64_deps.sh' + args: '${{ parameters.pythonVersion }}' + displayName: 'Install Dependencies' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +43,30 @@ steps: !pkg_resources/** !*.dist-info/** !werkzeug/debug/shared/debugger.js + !proxy_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: lt(variables['pythonVersion'], '3.13') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)/deps' + contents: | + ** + !grpc_tools/**/* + !grpcio_tools*/* + !build/** + !docs/** + !pack/** + !python/** + !tests/** + !setuptools*/** + !_distutils_hack/** + !distutils-precedence.pth + !pkg_resources/** + !*.dist-info/** + !werkzeug/debug/shared/debugger.js + !azure_functions_worker/** + !dateutil/** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: ge(variables['pythonVersion'], '3.13') + displayName: 'Copy proxy_worker files' diff --git a/pack/templates/nix_env_gen.yml b/pack/templates/nix_env_gen.yml index ae3cf4330..5de754ecd 100644 --- a/pack/templates/nix_env_gen.yml +++ b/pack/templates/nix_env_gen.yml @@ -12,6 +12,8 @@ steps: inputs: disableAutoCwd: true scriptPath: 'pack/scripts/nix_deps.sh' + args: '${{ parameters.pythonVersion }}' + displayName: 'Install Dependencies' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +43,30 @@ steps: !pkg_resources/** !*.dist-info/** !werkzeug/debug/shared/debugger.js + !proxy_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: lt(variables['pythonVersion'], '3.13') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)/deps' + contents: | + ** + !grpc_tools/**/* + !grpcio_tools*/* + !build/** + !docs/** + !pack/** + !python/** + !tests/** + !setuptools*/** + !_distutils_hack/** + !distutils-precedence.pth + !pkg_resources/** + !*.dist-info/** + !werkzeug/debug/shared/debugger.js + !dateutil/** + !azure_functions_worker/** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: ge(variables['pythonVersion'], '3.13') + displayName: 'Copy proxy_worker files' diff --git a/pack/templates/win_env_gen.yml b/pack/templates/win_env_gen.yml index 2eee3411a..167d56e10 100644 --- a/pack/templates/win_env_gen.yml +++ b/pack/templates/win_env_gen.yml @@ -12,6 +12,7 @@ steps: - task: PowerShell@2 inputs: filePath: 'pack\scripts\win_deps.ps1' + arguments: '${{ parameters.pythonVersion }}' - bash: | pip install pip-audit pip-audit -r requirements.txt @@ -41,4 +42,29 @@ steps: !pkg_resources\** !*.dist-info\** !werkzeug\debug\shared\debugger.js + !proxy_worker\** targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: lt(variables['pythonVersion'], '3.13') + displayName: 'Copy azure_functions_worker files' +- task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)\deps' + contents: | + ** + !grpc_tools\**\* + !grpcio_tools*\* + !build\** + !docs\** + !pack\** + !python\** + !tests\** + !setuptools*\** + !_distutils_hack\** + !distutils-precedence.pth + !pkg_resources\** + !*.dist-info\** + !werkzeug\debug\shared\debugger.js + !dateutil\** + targetFolder: '$(Build.ArtifactStagingDirectory)' + condition: ge(variables['pythonVersion'], '3.13') + displayName: 'Copy proxy_worker files' diff --git a/proxy_worker/__init__.py b/proxy_worker/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/proxy_worker/__main__.py b/proxy_worker/__main__.py new file mode 100644 index 000000000..5141dd60a --- /dev/null +++ b/proxy_worker/__main__.py @@ -0,0 +1,6 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +from proxy_worker import start_worker + +if __name__ == '__main__': + start_worker.start() diff --git a/proxy_worker/dispatcher.py b/proxy_worker/dispatcher.py new file mode 100644 index 000000000..46664f55e --- /dev/null +++ b/proxy_worker/dispatcher.py @@ -0,0 +1,536 @@ +import asyncio +import concurrent.futures +import logging +import os +import queue +import sys +import threading +import traceback +import typing +from asyncio import AbstractEventLoop +from dataclasses import dataclass +from typing import Optional + +import grpc + +from proxy_worker import protos +from proxy_worker.logging import ( + CONSOLE_LOG_PREFIX, + disable_console_logging, + enable_console_logging, + error_logger, + is_system_log_category, + logger, +) +from proxy_worker.utils.app_settings import get_app_setting, python_appsetting_state +from proxy_worker.utils.common import is_envvar_true +from proxy_worker.utils.constants import PYTHON_ENABLE_DEBUG_LOGGING, PYTHON_THREADPOOL_THREAD_COUNT +from proxy_worker.version import VERSION +from .utils.dependency import DependencyManager + +# Library worker import reloaded in init and reload request +library_worker = None + + +class ContextEnabledTask(asyncio.Task): + AZURE_INVOCATION_ID = '__azure_function_invocation_id__' + + def __init__(self, coro, loop, context=None): + super().__init__(coro, loop=loop, context=context) + + current_task = asyncio.current_task(loop) + if current_task is not None: + invocation_id = getattr( + current_task, self.AZURE_INVOCATION_ID, None) + if invocation_id is not None: + self.set_azure_invocation_id(invocation_id) + + def set_azure_invocation_id(self, invocation_id: str) -> None: + setattr(self, self.AZURE_INVOCATION_ID, invocation_id) + + +_invocation_id_local = threading.local() + + +def get_current_invocation_id() -> Optional[str]: + loop = asyncio._get_running_loop() + if loop is not None: + current_task = asyncio.current_task(loop) + if current_task is not None: + task_invocation_id = getattr(current_task, + ContextEnabledTask.AZURE_INVOCATION_ID, + None) + if task_invocation_id is not None: + return task_invocation_id + + return getattr(_invocation_id_local, 'invocation_id', None) + + +class AsyncLoggingHandler(logging.Handler): + def emit(self, record: logging.LogRecord) -> None: + # Since we disable console log after gRPC channel is initiated, + # we should redirect all the messages into dispatcher. + + # When dispatcher receives an exception, it should switch back + # to console logging. However, it is possible that + # __current_dispatcher__ is set to None as there are still messages + # buffered in this handler, not calling the emit yet. + msg = self.format(record) + try: + Dispatcher.current.on_logging(record, msg) + except RuntimeError as runtime_error: + # This will cause 'Dispatcher not found' failure. + # Logging such of an issue will cause infinite loop of gRPC logging + # To mitigate, we should suppress the 2nd level error logging here + # and use print function to report exception instead. + print(f'{CONSOLE_LOG_PREFIX} ERROR: {str(runtime_error)}', + file=sys.stderr, flush=True) + + +@dataclass +class WorkerRequest: + name: str + request: str + properties: Optional[dict[str, typing.Any]] = None + + +class DispatcherMeta(type): + __current_dispatcher__ = None + + @property + def current(cls): + disp = cls.__current_dispatcher__ + if disp is None: + raise RuntimeError('no currently running Dispatcher is found') + return disp + + +class Dispatcher(metaclass=DispatcherMeta): + _GRPC_STOP_RESPONSE = object() + + def __init__(self, loop: AbstractEventLoop, host: str, port: int, + worker_id: str, request_id: str, + grpc_connect_timeout: float, + grpc_max_msg_len: int = -1) -> None: + self._loop = loop + self._host = host + self._port = port + self._request_id = request_id + self._worker_id = worker_id + self._grpc_connect_timeout: float = grpc_connect_timeout + self._grpc_max_msg_len: int = grpc_max_msg_len + self._old_task_factory = None + + self._grpc_resp_queue: queue.Queue = queue.Queue() + self._grpc_connected_fut = loop.create_future() + self._grpc_thread: threading.Thread = threading.Thread( + name='grpc_local-thread', target=self.__poll_grpc) + + # TODO: Need to find a better place for these + self._function_data_cache_enabled = False + self._sync_call_tp: concurrent.futures.Executor = ( + self._create_sync_call_tp(self._get_sync_tp_max_workers())) + + def on_logging(self, record: logging.LogRecord, + formatted_msg: str) -> None: + if record.levelno >= logging.CRITICAL: + log_level = protos.RpcLog.Critical + elif record.levelno >= logging.ERROR: + log_level = protos.RpcLog.Error + elif record.levelno >= logging.WARNING: + log_level = protos.RpcLog.Warning + elif record.levelno >= logging.INFO: + log_level = protos.RpcLog.Information + elif record.levelno >= logging.DEBUG: + log_level = protos.RpcLog.Debug + else: + log_level = getattr(protos.RpcLog, 'None') + + if is_system_log_category(record.name): + log_category = protos.RpcLog.RpcLogCategory.Value('System') + else: # customers using logging will yield 'root' in record.name + log_category = protos.RpcLog.RpcLogCategory.Value('User') + + log = dict( + level=log_level, + message=formatted_msg, + category=record.name, + log_category=log_category + ) + + invocation_id = get_current_invocation_id() + if invocation_id is not None: + log['invocation_id'] = invocation_id + + self._grpc_resp_queue.put_nowait( + protos.StreamingMessage( + request_id=self.request_id, + rpc_log=protos.RpcLog(**log))) + + @property + def request_id(self) -> str: + return self._request_id + + @property + def worker_id(self) -> str: + return self._worker_id + + @classmethod + async def connect(cls, host: str, port: int, worker_id: str, + request_id: str, connect_timeout: float): + loop = asyncio.events.get_event_loop() + disp = cls(loop, host, port, worker_id, request_id, connect_timeout) + disp._grpc_thread.start() + await disp._grpc_connected_fut + logger.info('Successfully opened gRPC channel to %s:%s ', host, port) + return disp + + async def _initialize_grpc(self): + # Initialize gRPC-related attributes + self._grpc_resp_queue = queue.Queue() + self._grpc_connected_fut = self._loop.create_future() + self._grpc_thread = threading.Thread( + name='grpc_local-thread', target=self.__poll_grpc) + + # Start gRPC thread + self._grpc_thread.start() + + # Wait for gRPC connection to complete + await self._grpc_connected_fut + logger.info('Successfully opened gRPC channel to %s:%s', self._host, self._port) + + def __poll_grpc(self): + options = [] + if self._grpc_max_msg_len: + options.append(('grpc_local.max_receive_message_length', + self._grpc_max_msg_len)) + options.append(('grpc_local.max_send_message_length', + self._grpc_max_msg_len)) + + channel = grpc.insecure_channel( + f'{self._host}:{self._port}', options) + + try: + grpc.channel_ready_future(channel).result( + timeout=self._grpc_connect_timeout) + except Exception as ex: + self._loop.call_soon_threadsafe( + self._grpc_connected_fut.set_exception, ex) + return + else: + self._loop.call_soon_threadsafe( + self._grpc_connected_fut.set_result, True) + + stub = protos.FunctionRpcStub(channel) + + def gen(resp_queue): + while True: + msg = resp_queue.get() + if msg is self._GRPC_STOP_RESPONSE: + grpc_req_stream.cancel() + return + yield msg + + grpc_req_stream = stub.EventStream(gen(self._grpc_resp_queue)) + try: + for req in grpc_req_stream: + self._loop.call_soon_threadsafe( + self._loop.create_task, self._dispatch_grpc_request(req)) + except Exception as ex: + if ex is grpc_req_stream: + # Yes, this is how grpc_req_stream iterator exits. + return + error_logger.exception( + 'unhandled error in gRPC thread. Exception: {0}'.format( + ''.join(traceback.format_exception(ex)))) + raise + + async def _dispatch_grpc_request(self, request): + content_type = request.WhichOneof("content") + + match content_type: + case "worker_init_request": + request_handler = self._handle__worker_init_request + case "function_environment_reload_request": + request_handler = self._handle__function_environment_reload_request + case "functions_metadata_request": + request_handler = self._handle__functions_metadata_request + case "function_load_request": + request_handler = self._handle__function_load_request + case "worker_status_request": + request_handler = self._handle__worker_status_request + case "invocation_request": + request_handler = self._handle__invocation_request + case _: + # Don't crash on unknown messages. Log the error and return. + logger.error("Unknown StreamingMessage content type: %s", content_type) + return + + resp = await request_handler(request) + self._grpc_resp_queue.put_nowait(resp) + + async def dispatch_forever(self): # sourcery skip: swap-if-expression + if DispatcherMeta.__current_dispatcher__ is not None: + raise RuntimeError('there can be only one running dispatcher per ' + 'process') + + self._old_task_factory = self._loop.get_task_factory() + + DispatcherMeta.__current_dispatcher__ = self + try: + forever = self._loop.create_future() + + self._grpc_resp_queue.put_nowait( + protos.StreamingMessage( + request_id=self.request_id, + start_stream=protos.StartStream( + worker_id=self.worker_id))) + + # In Python 3.11+, constructing a task has an optional context + # parameter. Allow for this param to be passed to ContextEnabledTask + self._loop.set_task_factory( + lambda loop, coro, context=None: ContextEnabledTask( + coro, loop=loop, context=context)) + + # Detach console logging before enabling GRPC channel logging + logger.info('Detaching console logging.') + disable_console_logging() + + # Attach gRPC logging to the root logger. Since gRPC channel is + # established, should use it for system and user logs + logging_handler = AsyncLoggingHandler() + root_logger = logging.getLogger() + + log_level = logging.INFO if not is_envvar_true( + PYTHON_ENABLE_DEBUG_LOGGING) else logging.DEBUG + + root_logger.setLevel(log_level) + root_logger.addHandler(logging_handler) + logger.info('Switched to gRPC logging.') + logging_handler.flush() + + try: + await forever + finally: + logger.warning('Detaching gRPC logging due to exception.') + logging_handler.flush() + root_logger.removeHandler(logging_handler) + + # Reenable console logging when there's an exception + enable_console_logging() + logger.warning('Switched to console logging due to exception.') + finally: + DispatcherMeta.__current_dispatcher__ = None + + self._loop.set_task_factory(self._old_task_factory) + self.stop() + + def stop(self) -> None: + if self._grpc_thread is not None: + self._grpc_resp_queue.put_nowait(self._GRPC_STOP_RESPONSE) + self._grpc_thread.join() + self._grpc_thread = None + + self._stop_sync_call_tp() + + def _stop_sync_call_tp(self): + """Deallocate the current synchronous thread pool and assign + self._sync_call_tp to None. If the thread pool does not exist, + this will be a no op. + """ + if getattr(self, '_sync_call_tp', None): + self._sync_call_tp.shutdown() + self._sync_call_tp = None + + @staticmethod + def _create_sync_call_tp(max_worker: Optional[int]) -> concurrent.futures.Executor: + """Create a thread pool executor with max_worker. This is a wrapper + over ThreadPoolExecutor constructor. Consider calling this method after + _stop_sync_call_tp() to ensure only 1 synchronous thread pool is + running. + """ + return concurrent.futures.ThreadPoolExecutor( + max_workers=max_worker + ) + + @staticmethod + def _get_sync_tp_max_workers() -> typing.Optional[int]: + def tp_max_workers_validator(value: str) -> bool: + try: + int_value = int(value) + except ValueError: + logger.warning('%s must be an integer', + PYTHON_THREADPOOL_THREAD_COUNT) + return False + + if int_value < 1: + logger.warning( + '%s must be set to a value between 1 and sys.maxint. ' + 'Reverting to default value for max_workers', + PYTHON_THREADPOOL_THREAD_COUNT, + 1) + return False + return True + + max_workers = get_app_setting(setting=PYTHON_THREADPOOL_THREAD_COUNT, + validator=tp_max_workers_validator) + + # We can box the app setting as int for earlier python versions. + return int(max_workers) if max_workers else None + + async def _handle__worker_init_request(self, request): + logger.info('Received WorkerInitRequest, ' + 'python version %s, ' + 'worker version %s, ' + 'request ID %s. ' + 'App Settings state: %s. ' + 'To enable debug level logging, please refer to ' + 'https://aka.ms/python-enable-debug-logging', + sys.version, + VERSION, + self.request_id, + python_appsetting_state()) + + if DependencyManager.should_load_cx_dependencies(): + DependencyManager.prioritize_customer_dependencies() + + global library_worker + directory = request.worker_init_request.function_app_directory + v2_directory = os.path.join(directory, 'function_app.py') + logger.info(f"V2 Directory: {v2_directory}. Path exists: {os.path.exists(v2_directory)}") + if os.path.exists(v2_directory): + try: + logger.info("Trying to import v2 worker") + import azure_functions_worker_v2 + library_worker = azure_functions_worker_v2 + logger.info(f"V2 worker Import succeeded: {library_worker.__file__}") + except Exception as e: + logger.info(f"Error when importing V2 library: {traceback.format_exc()}") + else: + try: + logger.info("Trying to import v1 worker") + import azure_functions_worker_v1 + library_worker = azure_functions_worker_v1 + logger.info(f"V1 worker Import succeeded: {library_worker.__file__}") + except Exception as e: + logger.info(f"Error when importing V1 library: {traceback.format_exc()}") + + logger.info(f"Done Updating globals: {library_worker.__file__}") + + init_request = WorkerRequest(name="WorkerInitRequest", + request=request, + properties={"protos": protos, + "host": self._host}) + try: + init_response = await library_worker.worker_init_request(init_request) + except Exception as e: + logger.info(f"Exception from init: {e}") + logger.info("Finished WorkerInitRequest, request ID %s, worker id %s, ", + self.request_id, self.worker_id) + + return protos.StreamingMessage( + request_id=self.request_id, + worker_init_response=init_response) + + async def _handle__function_environment_reload_request(self, request): + logger.info('Received FunctionEnvironmentReloadRequest, ' + 'request ID: %s, ' + 'App Settings state: %s. ' + 'To enable debug level logging, please refer to ' + 'https://aka.ms/python-enable-debug-logging', + self.request_id, + python_appsetting_state()) + + func_env_reload_request = \ + request.function_environment_reload_request + directory = func_env_reload_request.function_app_directory + DependencyManager.reload_customer_libraries(directory) + + global library_worker + directory = request.worker_init_request.function_app_directory + v2_directory = os.path.join(directory, 'function_app.py') + logger.info(f"V2 Directory: {v2_directory}. Path exists: {os.path.exists(v2_directory)}") + if os.path.exists(v2_directory): + try: + logger.info("Trying to import v2 worker") + import azure_functions_worker_v2 + library_worker = azure_functions_worker_v2 + logger.info(f"V2 worker Import succeeded: {library_worker.__file__}") + except Exception as e: + logger.info( + f"Error when importing V2 library: {traceback.format_exc()}") + else: + try: + logger.info("Trying to import v1 worker") + import azure_functions_worker_v1 + library_worker = azure_functions_worker_v1 + logger.info(f"V1 worker Import succeeded: {library_worker.__file__}") + except Exception as e: + logger.info( + f"Error when importing V1 library: {traceback.format_exc()}") + + logger.info(f"Done Updating globals: {library_worker.__file__}") + + env_reload_request = WorkerRequest(name="FunctionEnvironmentReloadRequest", request=request, + properties={"protos": protos, + "host": self._host}) + env_reload_response = await library_worker.function_environment_reload_request(env_reload_request) + return protos.StreamingMessage( + request_id=self.request_id, + function_environment_reload_response=env_reload_response) + + async def _handle__worker_status_request(self, request): + # Logging is not necessary in this request since the response is used + # for host to judge scale decisions of out-of-proc languages. + # Having log here will reduce the responsiveness of the worker. + return protos.StreamingMessage( + request_id=request.request_id, + worker_status_response=protos.WorkerStatusResponse()) + + async def _handle__functions_metadata_request(self, request): + logger.info( + 'Received WorkerMetadataRequest, request ID %s, ' + 'worker id: %s', + self.request_id, self.worker_id) + + metadata_request = WorkerRequest(name="WorkerMetadataRequest", request=request) + metadata_response = await library_worker.functions_metadata_request(metadata_request) + + return protos.StreamingMessage( + request_id=request.request_id, + function_metadata_response=metadata_response) + + async def _handle__function_load_request(self, request): + func_request = request.function_load_request + function_id = func_request.function_id + function_metadata = func_request.metadata + function_name = function_metadata.name + + logger.info( + 'Received WorkerLoadRequest, request ID %s, function_id: %s,' + 'function_name: %s, worker_id: %s', + self.request_id, function_id, function_name, self.worker_id) + + load_request = WorkerRequest(name="FunctionsLoadRequest", request=request) + load_response = await library_worker.function_load_request(load_request) + + return protos.StreamingMessage( + request_id=self.request_id, + function_load_response=load_response) + + async def _handle__invocation_request(self, request): + invoc_request = request.invocation_request + invocation_id = invoc_request.invocation_id + function_id = invoc_request.function_id + + logger.info( + 'Received FunctionInvocationRequest, request ID %s, function_id: %s,' + 'invocation_id: %s, worker_id: %s', + self.request_id, function_id, invocation_id, self.worker_id) + + invocation_request = WorkerRequest(name="WorkerInvRequest", request=request, + properties={"threadpool": self._sync_call_tp}) + invocation_response = await library_worker.invocation_request(invocation_request) + return protos.StreamingMessage( + request_id=self.request_id, + invocation_response=invocation_response) diff --git a/proxy_worker/logging.py b/proxy_worker/logging.py new file mode 100644 index 000000000..7c723426b --- /dev/null +++ b/proxy_worker/logging.py @@ -0,0 +1,92 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import logging +import logging.handlers +import sys +from typing import Optional + +# Logging Prefixes +SYSTEM_LOG_PREFIX = "proxy_worker" +SDK_LOG_PREFIX = "azure.functions" +SYSTEM_ERROR_LOG_PREFIX = "azure_functions_worker_errors" +CONSOLE_LOG_PREFIX = "LanguageWorkerConsoleLog" + + +logger: logging.Logger = logging.getLogger(SYSTEM_LOG_PREFIX) +error_logger: logging.Logger = ( + logging.getLogger(SYSTEM_ERROR_LOG_PREFIX)) + +handler: Optional[logging.Handler] = None +error_handler: Optional[logging.Handler] = None + + +def setup(log_level, log_destination): + # Since handler and error_handler are moved to the global scope, + # before assigning to these handlers, we should define 'global' keyword + global handler + global error_handler + + if log_level == 'TRACE': + log_level = 'DEBUG' + + formatter = logging.Formatter(f'{CONSOLE_LOG_PREFIX}' + ' %(levelname)s: %(message)s') + + if log_destination is None: + # With no explicit log destination we do split logging, + # errors go into stderr, everything else -- to stdout. + error_handler = logging.StreamHandler(sys.stderr) + error_handler.setFormatter(formatter) + error_handler.setLevel(getattr(logging, log_level)) + + handler = logging.StreamHandler(sys.stdout) + + elif log_destination in ('stdout', 'stderr'): + handler = logging.StreamHandler(getattr(sys, log_destination)) + + elif log_destination == 'syslog': + handler = logging.handlers.SysLogHandler() + + else: + handler = logging.FileHandler(log_destination) + + if error_handler is None: + error_handler = handler + + handler.setFormatter(formatter) + handler.setLevel(getattr(logging, log_level)) + + logger.addHandler(handler) + logger.setLevel(getattr(logging, log_level)) + + error_logger.addHandler(error_handler) + error_logger.setLevel(getattr(logging, log_level)) + + +def disable_console_logging() -> None: + # We should only remove the sys.stdout stream, as error_logger is used for + # unexpected critical error logs handling. + if logger and handler: + handler.flush() + logger.removeHandler(handler) + + +def enable_console_logging() -> None: + if logger and handler: + logger.addHandler(handler) + + +def is_system_log_category(ctg: str) -> bool: + """Check if the logging namespace belongs to system logs. Category starts + with the following name will be treated as system logs. + 1. 'proxy_worker' (Worker Info) + 2. 'azure_functions_worker_errors' (Worker Error) + 3. 'azure.functions' (SDK) + + Expected behaviors for sytem logs and customer logs are listed below: + local_console customer_app_insight functions_kusto_table + system_log false false true + customer_log true true false + """ + return ctg.startswith(SYSTEM_LOG_PREFIX) or ctg.startswith(SDK_LOG_PREFIX) diff --git a/proxy_worker/protos/.gitignore b/proxy_worker/protos/.gitignore new file mode 100644 index 000000000..49d7060ef --- /dev/null +++ b/proxy_worker/protos/.gitignore @@ -0,0 +1,2 @@ +*_pb2.py +*_pb2_grpc.py diff --git a/proxy_worker/protos/__init__.py b/proxy_worker/protos/__init__.py new file mode 100644 index 000000000..e9c4f2397 --- /dev/null +++ b/proxy_worker/protos/__init__.py @@ -0,0 +1,43 @@ +from .FunctionRpc_pb2_grpc import ( # NoQA + FunctionRpcStub, + FunctionRpcServicer, + add_FunctionRpcServicer_to_server) + +from .FunctionRpc_pb2 import ( # NoQA + StreamingMessage, + StartStream, + WorkerInitRequest, + WorkerInitResponse, + RpcFunctionMetadata, + FunctionLoadRequest, + FunctionLoadResponse, + FunctionEnvironmentReloadRequest, + FunctionEnvironmentReloadResponse, + InvocationRequest, + InvocationResponse, + WorkerHeartbeat, + WorkerStatusRequest, + WorkerStatusResponse, + BindingInfo, + StatusResult, + RpcException, + ParameterBinding, + TypedData, + RpcHttp, + RpcHttpCookie, + RpcLog, + RpcSharedMemory, + RpcDataType, + CloseSharedMemoryResourcesRequest, + CloseSharedMemoryResourcesResponse, + FunctionsMetadataRequest, + FunctionMetadataResponse, + WorkerMetadata, + RpcRetryOptions) + +from .shared.NullableTypes_pb2 import ( + NullableString, + NullableBool, + NullableDouble, + NullableTimestamp +) diff --git a/proxy_worker/protos/_src/.gitignore b/proxy_worker/protos/_src/.gitignore new file mode 100644 index 000000000..940794e60 --- /dev/null +++ b/proxy_worker/protos/_src/.gitignore @@ -0,0 +1,288 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ +**/Properties/launchSettings.json + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Typescript v1 declaration files +typings/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs diff --git a/proxy_worker/protos/_src/LICENSE b/proxy_worker/protos/_src/LICENSE new file mode 100644 index 000000000..21071075c --- /dev/null +++ b/proxy_worker/protos/_src/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/proxy_worker/protos/_src/README.md b/proxy_worker/protos/_src/README.md new file mode 100644 index 000000000..b22f0bb4b --- /dev/null +++ b/proxy_worker/protos/_src/README.md @@ -0,0 +1,98 @@ +# Azure Functions Languge Worker Protobuf + +This repository contains the protobuf definition file which defines the gRPC service which is used between the [Azure Functions Host](https://github.com/Azure/azure-functions-host) and the Azure Functions language workers. This repo is shared across many repos in many languages (for each worker) by using git commands. + +To use this repo in Azure Functions language workers, follow steps below to add this repo as a subtree (*Adding This Repo*). If this repo is already embedded in a language worker repo, follow the steps to update the consumed file (*Pulling Updates*). + +Learn more about Azure Function's projects on the [meta](https://github.com/azure/azure-functions) repo. + +## Adding This Repo + +From within the Azure Functions language worker repo: +1. Define remote branch for cleaner git commands + - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` + - `git fetch proto-file` +2. Index contents of azure-functions-worker-protobuf to language worker repo + - `git read-tree --prefix= -u proto-file/` +3. Add new path in language worker repo to .gitignore file + - In .gitignore, add path in language worker repo +4. Finalize with commit + - `git commit -m "Added subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Branch: . Commit: "` + - `git push` + +## Pulling Updates + +From within the Azure Functions language worker repo: +1. Define remote branch for cleaner git commands + - `git remote add proto-file https://github.com/azure/azure-functions-language-worker-protobuf.git` + - `git fetch proto-file` +2. Pull a specific release tag + - `git fetch proto-file refs/tags/` + - Example: `git fetch proto-file refs/tags/v1.1.0-protofile` +3. Merge updates + - Merge with an explicit path to subtree: `git merge -X subtree= --squash --allow-unrelated-histories --strategy-option theirs` + - Example: `git merge -X subtree=src/WebJobs.Script.Grpc/azure-functions-language-worker-protobuf --squash v1.1.0-protofile --allow-unrelated-histories --strategy-option theirs` +4. Finalize with commit + - `git commit -m "Updated subtree from https://github.com/azure/azure-functions-language-worker-protobuf. Tag: . Commit: "` + - `git push` + +## Releasing a Language Worker Protobuf version + +1. Draft a release in the GitHub UI + - Be sure to inculde details of the release +2. Create a release version, following semantic versioning guidelines ([semver.org](https://semver.org/)) +3. Tag the version with the pattern: `v..

-protofile` (example: `v1.1.0-protofile`) +3. Merge `dev` to `master` + +## Consuming FunctionRPC.proto +*Note: Update versionNumber before running following commands* + +## CSharp +``` +set NUGET_PATH="%UserProfile%\.nuget\packages" +set GRPC_TOOLS_PATH=%NUGET_PATH%\grpc.tools\\tools\windows_x86 +set PROTO_PATH=.\azure-functions-language-worker-protobuf\src\proto +set PROTO=.\azure-functions-language-worker-protobuf\src\proto\FunctionRpc.proto +set PROTOBUF_TOOLS=%NUGET_PATH%\google.protobuf.tools\\tools +set MSGDIR=.\Messages + +if exist %MSGDIR% rmdir /s /q %MSGDIR% +mkdir %MSGDIR% + +set OUTDIR=%MSGDIR%\DotNet +mkdir %OUTDIR% +%GRPC_TOOLS_PATH%\protoc.exe %PROTO% --csharp_out %OUTDIR% --grpc_out=%OUTDIR% --plugin=protoc-gen-grpc=%GRPC_TOOLS_PATH%\grpc_csharp_plugin.exe --proto_path=%PROTO_PATH% --proto_path=%PROTOBUF_TOOLS% +``` +## JavaScript +In package.json, add to the build script the following commands to build .js files and to build .ts files. Use and install npm package `protobufjs`. + +Generate JavaScript files: +``` +pbjs -t json-module -w commonjs -o azure-functions-language-worker-protobuf/src/rpc.js azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto +``` +Generate TypeScript files: +``` +pbjs -t static-module azure-functions-language-worker-protobuf/src/proto/FunctionRpc.proto -o azure-functions-language-worker-protobuf/src/rpc_static.js && pbts -o azure-functions-language-worker-protobuf/src/rpc.d.ts azure-functions-language-worker-protobuf/src/rpc_static.js +``` + +## Java +Maven plugin : [protobuf-maven-plugin](https://www.xolstice.org/protobuf-maven-plugin/) +In pom.xml add following under configuration for this plugin +${basedir}//azure-functions-language-worker-protobuf/src/proto + +## Python +--TODO + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/proxy_worker/protos/_src/src/proto/FunctionRpc.proto b/proxy_worker/protos/_src/src/proto/FunctionRpc.proto new file mode 100644 index 000000000..f48bc7bbe --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/FunctionRpc.proto @@ -0,0 +1,730 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_multiple_files = true; +option java_package = "com.microsoft.azure.functions.rpc.messages"; +option java_outer_classname = "FunctionProto"; +option csharp_namespace = "Microsoft.Azure.WebJobs.Script.Grpc.Messages"; +option go_package ="github.com/Azure/azure-functions-go-worker/internal/rpc"; + +package AzureFunctionsRpcMessages; + +import "google/protobuf/duration.proto"; +import "identity/ClaimsIdentityRpc.proto"; +import "shared/NullableTypes.proto"; + +// Interface exported by the server. +service FunctionRpc { + rpc EventStream (stream StreamingMessage) returns (stream StreamingMessage) {} +} + +message StreamingMessage { + // Used to identify message between host and worker + string request_id = 1; + + // Payload of the message + oneof content { + + // Worker initiates stream + StartStream start_stream = 20; + + // Host sends capabilities/init data to worker + WorkerInitRequest worker_init_request = 17; + // Worker responds after initializing with its capabilities & status + WorkerInitResponse worker_init_response = 16; + + // MESSAGE NOT USED + // Worker periodically sends empty heartbeat message to host + WorkerHeartbeat worker_heartbeat = 15; + + // Host sends terminate message to worker. + // Worker terminates if it can, otherwise host terminates after a grace period + WorkerTerminate worker_terminate = 14; + + // Host periodically sends status request to the worker + WorkerStatusRequest worker_status_request = 12; + WorkerStatusResponse worker_status_response = 13; + + // On file change event, host sends notification to worker + FileChangeEventRequest file_change_event_request = 6; + + // Worker requests a desired action (restart worker, reload function) + WorkerActionResponse worker_action_response = 7; + + // Host sends required metadata to worker to load function + FunctionLoadRequest function_load_request = 8; + // Worker responds after loading with the load result + FunctionLoadResponse function_load_response = 9; + + // Host requests a given invocation + InvocationRequest invocation_request = 4; + + // Worker responds to a given invocation + InvocationResponse invocation_response = 5; + + // Host sends cancel message to attempt to cancel an invocation. + // If an invocation is cancelled, host will receive an invocation response with status cancelled. + InvocationCancel invocation_cancel = 21; + + // Worker logs a message back to the host + RpcLog rpc_log = 2; + + FunctionEnvironmentReloadRequest function_environment_reload_request = 25; + + FunctionEnvironmentReloadResponse function_environment_reload_response = 26; + + // Ask the worker to close any open shared memory resources for a given invocation + CloseSharedMemoryResourcesRequest close_shared_memory_resources_request = 27; + CloseSharedMemoryResourcesResponse close_shared_memory_resources_response = 28; + + // Worker indexing message types + FunctionsMetadataRequest functions_metadata_request = 29; + FunctionMetadataResponse function_metadata_response = 30; + + // Host sends required metadata to worker to load functions + FunctionLoadRequestCollection function_load_request_collection = 31; + + // Host gets the list of function load responses + FunctionLoadResponseCollection function_load_response_collection = 32; + + // Host sends required metadata to worker to warmup the worker + WorkerWarmupRequest worker_warmup_request = 33; + + // Worker responds after warming up with the warmup result + WorkerWarmupResponse worker_warmup_response = 34; + + } +} + +// Process.Start required info +// connection details +// protocol type +// protocol version + +// Worker sends the host information identifying itself +message StartStream { + // id of the worker + string worker_id = 2; +} + +// Host requests the worker to initialize itself +message WorkerInitRequest { + // version of the host sending init request + string host_version = 1; + + // A map of host supported features/capabilities + map capabilities = 2; + + // inform worker of supported categories and their levels + // i.e. Worker = Verbose, Function.MyFunc = None + map log_categories = 3; + + // Full path of worker.config.json location + string worker_directory = 4; + + // base directory for function app + string function_app_directory = 5; +} + +// Worker responds with the result of initializing itself +message WorkerInitResponse { + // PROPERTY NOT USED + // TODO: Remove from protobuf during next breaking change release + string worker_version = 1; + + // A map of worker supported features/capabilities + map capabilities = 2; + + // Status of the response + StatusResult result = 3; + + // Worker metadata captured for telemetry purposes + WorkerMetadata worker_metadata = 4; +} + +message WorkerMetadata { + // The runtime/stack name + string runtime_name = 1; + + // The version of the runtime/stack + string runtime_version = 2; + + // The version of the worker + string worker_version = 3; + + // The worker bitness/architecture + string worker_bitness = 4; + + // Optional additional custom properties + map custom_properties = 5; +} + +// Used by the host to determine success/failure/cancellation +message StatusResult { + // Indicates Failure/Success/Cancelled + enum Status { + Failure = 0; + Success = 1; + Cancelled = 2; + } + + // Status for the given result + Status status = 4; + + // Specific message about the result + string result = 1; + + // Exception message (if exists) for the status + RpcException exception = 2; + + // Captured logs or relevant details can use the logs property + repeated RpcLog logs = 3; +} + +// MESSAGE NOT USED +// TODO: Remove from protobuf during next breaking change release +message WorkerHeartbeat {} + +// Warning before killing the process after grace_period +// Worker self terminates ..no response on this +message WorkerTerminate { + google.protobuf.Duration grace_period = 1; +} + +// Host notifies worker of file content change +message FileChangeEventRequest { + // Types of File change operations (See link for more info: https://msdn.microsoft.com/en-us/library/t6xf43e0(v=vs.110).aspx) + enum Type { + Unknown = 0; + Created = 1; + Deleted = 2; + Changed = 4; + Renamed = 8; + All = 15; + } + + // type for this event + Type type = 1; + + // full file path for the file change notification + string full_path = 2; + + // Name of the function affected + string name = 3; +} + +// Indicates whether worker reloaded successfully or needs a restart +message WorkerActionResponse { + // indicates whether a restart is needed, or reload successfully + enum Action { + Restart = 0; + Reload = 1; + } + + // action for this response + Action action = 1; + + // text reason for the response + string reason = 2; +} + +// Used by the host to determine worker health +message WorkerStatusRequest { +} + +// Worker responds with status message +// TODO: Add any worker relevant status to response +message WorkerStatusResponse { +} + +message FunctionEnvironmentReloadRequest { + // Environment variables from the current process + map environment_variables = 1; + // Current directory of function app + string function_app_directory = 2; +} + +message FunctionEnvironmentReloadResponse { + // After specialization, worker sends capabilities & metadata. + // Worker metadata captured for telemetry purposes + WorkerMetadata worker_metadata = 1; + + // A map of worker supported features/capabilities + map capabilities = 2; + + // Status of the response + StatusResult result = 3; +} + +// Tell the out-of-proc worker to close any shared memory maps it allocated for given invocation +message CloseSharedMemoryResourcesRequest { + repeated string map_names = 1; +} + +// Response from the worker indicating which of the shared memory maps have been successfully closed and which have not been closed +// The key (string) is the map name and the value (bool) is true if it was closed, false if not +message CloseSharedMemoryResourcesResponse { + map close_map_results = 1; +} + +// Host tells the worker to load a list of Functions +message FunctionLoadRequestCollection { + repeated FunctionLoadRequest function_load_requests = 1; +} + +// Host gets the list of function load responses +message FunctionLoadResponseCollection { + repeated FunctionLoadResponse function_load_responses = 1; +} + +// Load request of a single Function +message FunctionLoadRequest { + // unique function identifier (avoid name collisions, facilitate reload case) + string function_id = 1; + + // Metadata for the request + RpcFunctionMetadata metadata = 2; + + // A flag indicating if managed dependency is enabled or not + bool managed_dependency_enabled = 3; +} + +// Worker tells host result of reload +message FunctionLoadResponse { + // unique function identifier + string function_id = 1; + + // Result of load operation + StatusResult result = 2; + // TODO: return type expected? + + // Result of load operation + bool is_dependency_downloaded = 3; +} + +// Information on how a Function should be loaded and its bindings +message RpcFunctionMetadata { + // TODO: do we want the host's name - the language worker might do a better job of assignment than the host + string name = 4; + + // base directory for the Function + string directory = 1; + + // Script file specified + string script_file = 2; + + // Entry point specified + string entry_point = 3; + + // Bindings info + map bindings = 6; + + // Is set to true for proxy + bool is_proxy = 7; + + // Function indexing status + StatusResult status = 8; + + // Function language + string language = 9; + + // Raw binding info + repeated string raw_bindings = 10; + + // unique function identifier (avoid name collisions, facilitate reload case) + string function_id = 13; + + // A flag indicating if managed dependency is enabled or not + bool managed_dependency_enabled = 14; + + // The optional function execution retry strategy to use on invocation failures. + RpcRetryOptions retry_options = 15; + + // Properties for function metadata + // They're usually specific to a worker and largely passed along to the controller API for use + // outside the host + map properties = 16; +} + +// Host tells worker it is ready to receive metadata +message FunctionsMetadataRequest { + // base directory for function app + string function_app_directory = 1; +} + +// Worker sends function metadata back to host +message FunctionMetadataResponse { + // list of function indexing responses + repeated RpcFunctionMetadata function_metadata_results = 1; + + // status of overall metadata request + StatusResult result = 2; + + // if set to true then host will perform indexing + bool use_default_metadata_indexing = 3; +} + +// Host requests worker to invoke a Function +message InvocationRequest { + // Unique id for each invocation + string invocation_id = 1; + + // Unique id for each Function + string function_id = 2; + + // Input bindings (include trigger) + repeated ParameterBinding input_data = 3; + + // binding metadata from trigger + map trigger_metadata = 4; + + // Populates activityId, tracestate and tags from host + RpcTraceContext trace_context = 5; + + // Current retry context + RetryContext retry_context = 6; +} + +// Host sends ActivityId, traceStateString and Tags from host +message RpcTraceContext { + // This corresponds to Activity.Current?.Id + string trace_parent = 1; + + // This corresponds to Activity.Current?.TraceStateString + string trace_state = 2; + + // This corresponds to Activity.Current?.Tags + map attributes = 3; +} + +// Host sends retry context for a function invocation +message RetryContext { + // Current retry count + int32 retry_count = 1; + + // Max retry count + int32 max_retry_count = 2; + + // Exception that caused the retry + RpcException exception = 3; +} + +// Host requests worker to cancel invocation +message InvocationCancel { + // Unique id for invocation + string invocation_id = 2; + + // PROPERTY NOT USED + google.protobuf.Duration grace_period = 1; +} + +// Worker responds with status of Invocation +message InvocationResponse { + // Unique id for invocation + string invocation_id = 1; + + // Output binding data + repeated ParameterBinding output_data = 2; + + // data returned from Function (for $return and triggers with return support) + TypedData return_value = 4; + + // Status of the invocation (success/failure/canceled) + StatusResult result = 3; +} + +message WorkerWarmupRequest { + // Full path of worker.config.json location + string worker_directory = 1; +} + +message WorkerWarmupResponse { + StatusResult result = 1; +} + +// Used to encapsulate data which could be a variety of types +message TypedData { + oneof data { + string string = 1; + string json = 2; + bytes bytes = 3; + bytes stream = 4; + RpcHttp http = 5; + sint64 int = 6; + double double = 7; + CollectionBytes collection_bytes = 8; + CollectionString collection_string = 9; + CollectionDouble collection_double = 10; + CollectionSInt64 collection_sint64 = 11; + ModelBindingData model_binding_data = 12; + CollectionModelBindingData collection_model_binding_data = 13; + } +} + +// Specify which type of data is contained in the shared memory region being read +enum RpcDataType { + unknown = 0; + string = 1; + json = 2; + bytes = 3; + stream = 4; + http = 5; + int = 6; + double = 7; + collection_bytes = 8; + collection_string = 9; + collection_double = 10; + collection_sint64 = 11; +} + +// Used to provide metadata about shared memory region to read data from +message RpcSharedMemory { + // Name of the shared memory map containing data + string name = 1; + // Offset in the shared memory map to start reading data from + int64 offset = 2; + // Number of bytes to read (starting from the offset) + int64 count = 3; + // Final type to which the read data (in bytes) is to be interpreted as + RpcDataType type = 4; +} + +// Used to encapsulate collection string +message CollectionString { + repeated string string = 1; +} + +// Used to encapsulate collection bytes +message CollectionBytes { + repeated bytes bytes = 1; +} + +// Used to encapsulate collection double +message CollectionDouble { + repeated double double = 1; +} + +// Used to encapsulate collection sint64 +message CollectionSInt64 { + repeated sint64 sint64 = 1; +} + +// Used to describe a given binding on invocation +message ParameterBinding { + // Name for the binding + string name = 1; + + oneof rpc_data { + // Data for the binding + TypedData data = 2; + + // Metadata about the shared memory region to read data from + RpcSharedMemory rpc_shared_memory = 3; + } +} + +// Used to describe a given binding on load +message BindingInfo { + // Indicates whether it is an input or output binding (or a fancy inout binding) + enum Direction { + in = 0; + out = 1; + inout = 2; + } + + // Indicates the type of the data for the binding + enum DataType { + undefined = 0; + string = 1; + binary = 2; + stream = 3; + } + + // Type of binding (e.g. HttpTrigger) + string type = 2; + + // Direction of the given binding + Direction direction = 3; + + DataType data_type = 4; + + // Properties for binding metadata + map properties = 5; +} + +// Used to send logs back to the Host +message RpcLog { + // Matching ILogger semantics + // https://github.com/aspnet/Logging/blob/9506ccc3f3491488fe88010ef8b9eb64594abf95/src/Microsoft.Extensions.Logging/Logger.cs + // Level for the Log + enum Level { + Trace = 0; + Debug = 1; + Information = 2; + Warning = 3; + Error = 4; + Critical = 5; + None = 6; + } + + // Category of the log. Defaults to User if not specified. + enum RpcLogCategory { + User = 0; + System = 1; + CustomMetric = 2; + } + + // Unique id for invocation (if exists) + string invocation_id = 1; + + // TOD: This should be an enum + // Category for the log (startup, load, invocation, etc.) + string category = 2; + + // Level for the given log message + Level level = 3; + + // Message for the given log + string message = 4; + + // Id for the even associated with this log (if exists) + string event_id = 5; + + // Exception (if exists) + RpcException exception = 6; + + // json serialized property bag + string properties = 7; + + // Category of the log. Either user(default), system, or custom metric. + RpcLogCategory log_category = 8; + + // strongly-typed (ish) property bag + map propertiesMap = 9; +} + +// Encapsulates an Exception +message RpcException { + // Source of the exception + string source = 3; + + // Stack trace for the exception + string stack_trace = 1; + + // Textual message describing the exception + string message = 2; + + // Worker specifies whether exception is a user exception, + // for purpose of application insights logging. Defaults to false. + bool is_user_exception = 4; + + // Type of exception. If it's a user exception, the type is passed along to app insights. + // Otherwise, it's ignored for now. + string type = 5; +} + +// Http cookie type. Note that only name and value are used for Http requests +message RpcHttpCookie { + // Enum that lets servers require that a cookie shouldn't be sent with cross-site requests + enum SameSite { + None = 0; + Lax = 1; + Strict = 2; + ExplicitNone = 3; + } + + // Cookie name + string name = 1; + + // Cookie value + string value = 2; + + // Specifies allowed hosts to receive the cookie + NullableString domain = 3; + + // Specifies URL path that must exist in the requested URL + NullableString path = 4; + + // Sets the cookie to expire at a specific date instead of when the client closes. + // It is generally recommended that you use "Max-Age" over "Expires". + NullableTimestamp expires = 5; + + // Sets the cookie to only be sent with an encrypted request + NullableBool secure = 6; + + // Sets the cookie to be inaccessible to JavaScript's Document.cookie API + NullableBool http_only = 7; + + // Allows servers to assert that a cookie ought not to be sent along with cross-site requests + SameSite same_site = 8; + + // Number of seconds until the cookie expires. A zero or negative number will expire the cookie immediately. + NullableDouble max_age = 9; +} + +// TODO - solidify this or remove it +message RpcHttp { + string method = 1; + string url = 2; + map headers = 3; + TypedData body = 4; + map params = 10; + string status_code = 12; + map query = 15; + bool enable_content_negotiation= 16; + TypedData rawBody = 17; + repeated RpcClaimsIdentity identities = 18; + repeated RpcHttpCookie cookies = 19; + map nullable_headers = 20; + map nullable_params = 21; + map nullable_query = 22; +} + +// Message representing Microsoft.Azure.WebJobs.ParameterBindingData +// Used for hydrating SDK-type bindings in out-of-proc workers +message ModelBindingData +{ + // The version of the binding data content + string version = 1; + + // The extension source of the binding data + string source = 2; + + // The content type of the binding data content + string content_type = 3; + + // The binding data content + bytes content = 4; +} + +// Used to encapsulate collection model_binding_data +message CollectionModelBindingData { + repeated ModelBindingData model_binding_data = 1; +} + +// Retry policy which the worker sends the host when the worker indexes +// a function. +message RpcRetryOptions +{ + // The retry strategy to use. Valid values are fixed delay or exponential backoff. + enum RetryStrategy + { + exponential_backoff = 0; + fixed_delay = 1; + } + + // The maximum number of retries allowed per function execution. + // -1 means to retry indefinitely. + int32 max_retry_count = 2; + + // The delay that's used between retries when you're using a fixed delay strategy. + google.protobuf.Duration delay_interval = 3; + + // The minimum retry delay when you're using an exponential backoff strategy + google.protobuf.Duration minimum_interval = 4; + + // The maximum retry delay when you're using an exponential backoff strategy + google.protobuf.Duration maximum_interval = 5; + + RetryStrategy retry_strategy = 6; +} \ No newline at end of file diff --git a/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto b/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto new file mode 100644 index 000000000..c3945bb8a --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/identity/ClaimsIdentityRpc.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_package = "com.microsoft.azure.functions.rpc.messages"; + +import "shared/NullableTypes.proto"; + +// Light-weight representation of a .NET System.Security.Claims.ClaimsIdentity object. +// This is the same serialization as found in EasyAuth, and needs to be kept in sync with +// its ClaimsIdentitySlim definition, as seen in the WebJobs extension: +// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimsIdentitySlim.cs +message RpcClaimsIdentity { + NullableString authentication_type = 1; + NullableString name_claim_type = 2; + NullableString role_claim_type = 3; + repeated RpcClaim claims = 4; +} + +// Light-weight representation of a .NET System.Security.Claims.Claim object. +// This is the same serialization as found in EasyAuth, and needs to be kept in sync with +// its ClaimSlim definition, as seen in the WebJobs extension: +// https://github.com/Azure/azure-webjobs-sdk-extensions/blob/dev/src/WebJobs.Extensions.Http/ClaimSlim.cs +message RpcClaim { + string value = 1; + string type = 2; +} diff --git a/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto b/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto new file mode 100644 index 000000000..4fb476502 --- /dev/null +++ b/proxy_worker/protos/_src/src/proto/shared/NullableTypes.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; +// protobuf vscode extension: https://marketplace.visualstudio.com/items?itemName=zxh404.vscode-proto3 + +option java_package = "com.microsoft.azure.functions.rpc.messages"; + +import "google/protobuf/timestamp.proto"; + +message NullableString { + oneof string { + string value = 1; + } +} + +message NullableDouble { + oneof double { + double value = 1; + } +} + +message NullableBool { + oneof bool { + bool value = 1; + } +} + +message NullableTimestamp { + oneof timestamp { + google.protobuf.Timestamp value = 1; + } +} diff --git a/proxy_worker/protos/identity/__init__.py b/proxy_worker/protos/identity/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/proxy_worker/protos/shared/__init__.py b/proxy_worker/protos/shared/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/proxy_worker/start_worker.py b/proxy_worker/start_worker.py new file mode 100644 index 000000000..6a0aaca12 --- /dev/null +++ b/proxy_worker/start_worker.py @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +"""Main entrypoint.""" + +import argparse +import traceback + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Python Azure Functions Worker') + parser.add_argument('--host', + help="host address") + parser.add_argument('--port', type=int, + help='port number') + parser.add_argument('--workerId', dest='worker_id', + help='id for the worker') + parser.add_argument('--requestId', dest='request_id', + help='id of the request') + parser.add_argument('--log-level', type=str, default='INFO', + choices=['TRACE', 'INFO', 'WARNING', 'ERROR'], + help="log level: 'TRACE', 'INFO', 'WARNING', " + "or 'ERROR'") + parser.add_argument('--log-to', type=str, default=None, + help='log destination: stdout, stderr, ' + 'syslog, or a file path') + parser.add_argument('--grpcMaxMessageLength', type=int, + dest='grpc_max_msg_len') + parser.add_argument('--functions-uri', dest='functions_uri', type=str, + help='URI with IP Address and Port used to' + ' connect to the Host via gRPC.') + parser.add_argument('--functions-request-id', dest='functions_request_id', + type=str, help='Request ID used for gRPC communication ' + 'with the Host.') + parser.add_argument('--functions-worker-id', + dest='functions_worker_id', type=str, + help='Worker ID assigned to this language worker.') + parser.add_argument('--functions-grpc-max-message-length', type=int, + dest='functions_grpc_max_msg_len', + help='Max grpc_local message length for Functions') + return parser.parse_args() + + +def start(): + from .utils.dependency import DependencyManager + DependencyManager.initialize() + DependencyManager.use_worker_dependencies() + + import asyncio + + from . import logging + from .logging import error_logger, logger + + args = parse_args() + logging.setup(log_level=args.log_level, log_destination=args.log_to) + + logger.info("Args: %s" , args) + logger.info('Starting Azure Functions Python Worker.') + logger.info('Worker ID: %s, Request ID: %s, Host Address: %s:%s', + args.worker_id, args.request_id, args.host, args.port) + + try: + return asyncio.run(start_async( + args.host, args.port, args.worker_id, args.request_id)) + except Exception as ex: + error_logger.exception( + 'unhandled error in functions worker: {0}'.format( + ''.join(traceback.format_exception(ex)))) + raise + + +async def start_async(host, port, worker_id, request_id): + from . import dispatcher + + # ToDo: Fix functions_grpc_max_msg_len. Needs to be parsed from args + disp = await dispatcher.Dispatcher.connect(host=host, port=port, + worker_id=worker_id, + request_id=request_id, + connect_timeout=5.0) + await disp.dispatch_forever() + + +if __name__ == '__main__': + start() diff --git a/proxy_worker/utils/__init__.py b/proxy_worker/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/proxy_worker/utils/app_settings.py b/proxy_worker/utils/app_settings.py new file mode 100644 index 000000000..ed8487ec3 --- /dev/null +++ b/proxy_worker/utils/app_settings.py @@ -0,0 +1,72 @@ +import os +from typing import Callable, Optional + +from .constants import ( + PYTHON_ENABLE_DEBUG_LOGGING, + PYTHON_ENABLE_INIT_INDEXING, + PYTHON_ENABLE_OPENTELEMETRY, + PYTHON_SCRIPT_FILE_NAME, + PYTHON_THREADPOOL_THREAD_COUNT, +) + + +def get_app_setting( + setting: str, + default_value: Optional[str] = None, + validator: Optional[Callable[[str], bool]] = None +) -> Optional[str]: + """Returns the application setting from environment variable. + + Parameters + ---------- + setting: str + The name of the application setting (e.g. FUNCTIONS_RUNTIME_VERSION) + + default_value: Optional[str] + The expected return value when the application setting is not found, + or the app setting does not pass the validator. + + validator: Optional[Callable[[str], bool]] + A function accepts the app setting value and should return True when + the app setting value is acceptable. + + Returns + ------- + Optional[str] + A string value that is set in the application setting + """ + app_setting_value = os.getenv(setting) + + # If an app setting is not configured, we return the default value + if app_setting_value is None: + return default_value + + # If there's no validator, we should return the app setting value directly + if validator is None: + return app_setting_value + + # If the app setting is set with a validator, + # On True, should return the app setting value + # On False, should return the default value + if validator(app_setting_value): + return app_setting_value + return default_value + + +def python_appsetting_state(): + current_vars = os.environ.copy() + python_specific_settings = \ + [ + PYTHON_THREADPOOL_THREAD_COUNT, + PYTHON_ENABLE_DEBUG_LOGGING, + PYTHON_SCRIPT_FILE_NAME, + PYTHON_ENABLE_INIT_INDEXING, + PYTHON_ENABLE_OPENTELEMETRY] + + app_setting_states = "".join( + f"{app_setting}: {current_vars[app_setting]} | " + for app_setting in python_specific_settings + if app_setting in current_vars + ) + + return app_setting_states diff --git a/proxy_worker/utils/common.py b/proxy_worker/utils/common.py new file mode 100644 index 000000000..2c212286d --- /dev/null +++ b/proxy_worker/utils/common.py @@ -0,0 +1,29 @@ +import os + + +def is_true_like(setting: str) -> bool: + if setting is None: + return False + + return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'} + + +def is_false_like(setting: str) -> bool: + if setting is None: + return False + + return setting.lower().strip() in {'0', 'false', 'f', 'no', 'n'} + + +def is_envvar_true(env_key: str) -> bool: + if os.getenv(env_key) is None: + return False + + return is_true_like(os.environ[env_key]) + + +def is_envvar_false(env_key: str) -> bool: + if os.getenv(env_key) is None: + return False + + return is_false_like(os.environ[env_key]) diff --git a/proxy_worker/utils/constants.py b/proxy_worker/utils/constants.py new file mode 100644 index 000000000..899b0433f --- /dev/null +++ b/proxy_worker/utils/constants.py @@ -0,0 +1,11 @@ +# App Setting constants +PYTHON_ENABLE_DEBUG_LOGGING = "PYTHON_ENABLE_DEBUG_LOGGING" +PYTHON_THREADPOOL_THREAD_COUNT = "PYTHON_THREADPOOL_THREAD_COUNT" +PYTHON_SCRIPT_FILE_NAME = "PYTHON_SCRIPT_FILE_NAME" +PYTHON_ENABLE_INIT_INDEXING = "PYTHON_ENABLE_INIT_INDEXING" +PYTHON_ENABLE_OPENTELEMETRY= "PYTHON_ENABLE_OPENTELEMETRY" +PYTHON_ISOLATE_WORKER_DEPENDENCIES = "PYTHON_ISOLATE_WORKER_DEPENDENCIES" + + +CONTAINER_NAME = "CONTAINER_NAME" +AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" diff --git a/proxy_worker/utils/dependency.py b/proxy_worker/utils/dependency.py new file mode 100644 index 000000000..7fdaafb9e --- /dev/null +++ b/proxy_worker/utils/dependency.py @@ -0,0 +1,333 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import importlib.util +import inspect +import os +import re +import sys +from types import ModuleType +from typing import List, Optional + +from .common import is_envvar_true, is_true_like +from .constants import AZURE_WEBJOBS_SCRIPT_ROOT, CONTAINER_NAME, PYTHON_ISOLATE_WORKER_DEPENDENCIES +from ..logging import logger + + +class DependencyManager: + """The dependency manager controls the Python packages source, preventing + worker packages interfer customer's code. + + It has two mode, in worker mode, the Python packages are loaded from worker + path, (e.g. workers/python///). In customer mode, + the packages are loaded from customer's .python_packages/ folder or from + their virtual environment. + + Azure Functions has three different set of sys.path ordering, + + Linux Consumption sys.path: [ + "/tmp/functions\\standby\\wwwroot", # Placeholder folder + "/home/site/wwwroot/.python_packages/lib/site-packages", # CX's deps + "/azure-functions-host/workers/python/3.13/LINUX/X64", # Worker's deps + "/home/site/wwwroot" # CX's Working Directory + ] + + Linux Dedicated/Premium sys.path: [ + "/home/site/wwwroot", # CX's Working Directory + "/home/site/wwwroot/.python_packages/lib/site-packages", # CX's deps + "/azure-functions-host/workers/python/3.13/LINUX/X64", # Worker's deps + ] + + Core Tools sys.path: [ + "%appdata%\\azure-functions-core-tools\\bin\\workers\\" + "python\\3.13\\WINDOWS\\X64", # Worker's deps + "C:\\Users\\user\\Project\\.venv311\\lib\\site-packages", # CX's deps + "C:\\Users\\user\\Project", # CX's Working Directory + ] + + When we first start up the Python worker, we should only loaded from + worker's deps and create module namespace (e.g. google.protobuf variable). + + Once the worker receives worker init request, we clear out the sys.path, + worker sys.modules cache and sys.path_import_cache so the libraries + will only get loaded from CX's deps path. + """ + + cx_deps_path: str = '' + cx_working_dir: str = '' + worker_deps_path: str = '' + + @classmethod + def initialize(cls): + cls.cx_deps_path = cls._get_cx_deps_path() + cls.cx_working_dir = cls._get_cx_working_dir() + cls.worker_deps_path = cls._get_worker_deps_path() + + @classmethod + def is_in_linux_consumption(cls): + return CONTAINER_NAME in os.environ + + @classmethod + def should_load_cx_dependencies(cls): + """ + Customer dependencies should be loaded when + 1) App is a dedicated app + 2) App is linux consumption but not in placeholder mode. + This can happen when the worker restarts for any reason + (OOM, timeouts etc) and env reload request is not called. + """ + return not (DependencyManager.is_in_linux_consumption() + and is_envvar_true("WEBSITE_PLACEHOLDER_MODE")) + + @classmethod + def use_worker_dependencies(cls): + """Switch the sys.path and ensure the worker imports are loaded from + Worker's dependenciess. + + This will not affect already imported namespaces, but will clear out + the module cache and ensure the upcoming modules are loaded from + worker's dependency path. + """ + + # The following log line will not show up in core tools but should + # work in kusto since core tools only collects gRPC logs. This function + # is executed even before the gRPC logging channel is ready. + logger.info('Applying use_worker_dependencies:' + ' worker_dependencies: %s,' + ' customer_dependencies: %s,' + ' working_directory: %s', cls.worker_deps_path, + cls.cx_deps_path, cls.cx_working_dir) + + cls._remove_from_sys_path(cls.cx_deps_path) + cls._remove_from_sys_path(cls.cx_working_dir) + cls._add_to_sys_path(cls.worker_deps_path, True) + logger.info('Start using worker dependencies %s. Sys.path: %s', cls.worker_deps_path, sys.path) + + @classmethod + def reload_customer_libraries(cls, cx_working_dir: str = None): + """Reload azure and google namespace, this including any modules in + this namespace, such as azure-functions, grpcio, grpcio-tools etc. + + Depends on the PYTHON_ISOLATE_WORKER_DEPENDENCIES, the actual behavior + differs. + + This is called only when placeholder mode is true. In the case of a + worker restart, this will not be called. + + Parameters + ---------- + cx_working_dir: str + The path which contains customer's project file (e.g. wwwroot). + """ + isolate_dependencies_setting = os.getenv(PYTHON_ISOLATE_WORKER_DEPENDENCIES) + if isolate_dependencies_setting is None: + isolate_dependencies = True + else: + isolate_dependencies = is_true_like(isolate_dependencies_setting) + + if isolate_dependencies: + cls.prioritize_customer_dependencies(cx_working_dir) + + @classmethod + def prioritize_customer_dependencies(cls, cx_working_dir=None): + """Switch the sys.path and ensure the customer's code import are loaded + from CX's deppendencies. + + This will not affect already imported namespaces, but will clear out + the module cache and ensure the upcoming modules are loaded from + customer's dependency path. + + As for Linux Consumption, this will only remove worker_deps_path, + but the customer's path will be loaded in function_environment_reload. + + The search order of a module name in customer's paths is: + 1. cx_deps_path + 2. worker_deps_path + 3. cx_working_dir + """ + # Try to get the latest customer's working directory + # cx_working_dir => cls.cx_working_dir => AzureWebJobsScriptRoot + working_directory: str = '' + if cx_working_dir: + working_directory: str = os.path.abspath(cx_working_dir) + if not working_directory: + working_directory = cls.cx_working_dir + if not working_directory: + working_directory = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT, '') + + # Try to get the latest customer's dependency path + cx_deps_path: str = cls._get_cx_deps_path() + + if not cx_deps_path: + cx_deps_path = cls.cx_deps_path + + logger.info( + 'Applying prioritize_customer_dependencies: ' + 'worker_dependencies_path: %s, customer_dependencies_path: %s, ' + 'working_directory: %s, Linux Consumption: %s, Placeholder: %s, sys.path: %s', + cls.worker_deps_path, cx_deps_path, working_directory, + DependencyManager.is_in_linux_consumption(), + is_envvar_true("WEBSITE_PLACEHOLDER_MODE"), sys.path) + + cls._remove_from_sys_path(cls.worker_deps_path) + cls._add_to_sys_path(cls.worker_deps_path, True) + cls._add_to_sys_path(cls.cx_deps_path, True) + cls._add_to_sys_path(working_directory, False) + + logger.info(f'Finished prioritize_customer_dependencies: {sys.path}') + + @classmethod + def _add_to_sys_path(cls, path: str, add_to_first: bool): + """This will ensure no duplicated path are added into sys.path and + clear importer cache. No action if path already exists in sys.path. + + Parameters + ---------- + path: str + The path needs to be added into sys.path. + If the path is an empty string, no action will be taken. + add_to_first: bool + Should the path added to the first entry (highest priority) + """ + if path and path not in sys.path: + if add_to_first: + sys.path.insert(0, path) + else: + sys.path.append(path) + + # Only clear path importer and sys.modules cache if path is not + # defined in sys.path + cls._clear_path_importer_cache_and_modules(path) + + @classmethod + def _remove_from_sys_path(cls, path: str): + """This will remove path from sys.path and clear importer cache. + No action if the path does not exist in sys.path. + + Parameters + ---------- + path: str + The path to be removed from sys.path. + If the path is an empty string, no action will be taken. + """ + if path and path in sys.path: + # Remove all occurances in sys.path + sys.path = list(filter(lambda p: p != path, sys.path)) + + # In case if any part of worker initialization do sys.path.pop() + # Always do a cache clear in path importer and sys.modules + cls._clear_path_importer_cache_and_modules(path) + + @classmethod + def _clear_path_importer_cache_and_modules(cls, path: str): + """Removes path from sys.path_importer_cache and clear related + sys.modules cache. No action if the path is empty or no entries + in sys.path_importer_cache or sys.modules. + + Parameters + ---------- + path: str + The path to be removed from sys.path_importer_cache. All related + modules will be cleared out from sys.modules cache. + If the path is an empty string, no action will be taken. + """ + if path and path in sys.path_importer_cache: + sys.path_importer_cache.pop(path) + + if path: + cls._remove_module_cache(path) + + @staticmethod + def _get_cx_deps_path() -> str: + """Get the directory storing the customer's third-party libraries. + + Returns + ------- + str + Core Tools: path to customer's site packages + Linux Dedicated/Premium: path to customer's site packages + Linux Consumption: empty string + """ + prefix: Optional[str] = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) + cx_paths: List[str] = [ + p for p in sys.path + if prefix and p.startswith(prefix) and ('site-packages' in p) + ] + # Return first or default of customer path + return (cx_paths or [''])[0] + + @staticmethod + def _get_cx_working_dir() -> str: + """Get the customer's working directory. + + Returns + ------- + str + Core Tools: AzureWebJobsScriptRoot env variable + Linux Dedicated/Premium: AzureWebJobsScriptRoot env variable + Linux Consumption: empty string + """ + return os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT, '') + + @staticmethod + def _get_worker_deps_path() -> str: + """Get the worker dependency sys.path. This will always available + even in all skus. + + Returns + ------- + str + The worker packages path + """ + # 1. Try to parse the absolute path python/3.13/LINUX/X64 in sys.path + r = re.compile(r'.*python(\/|\\)\d+\.\d+(\/|\\)(WINDOWS|LINUX|OSX).*') + worker_deps_paths: List[str] = [p for p in sys.path if r.match(p)] + if worker_deps_paths: + return worker_deps_paths[0] + + # 2. If it fails to find one, try to find one from the parent path + # This is used for handling the CI/localdev environment + return os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..') + ) + + @staticmethod + def _remove_module_cache(path: str): + """Remove module cache if the module is imported from specific path. + This will not impact builtin modules + + Parameters + ---------- + path: str + The module cache to be removed if it is imported from this path. + """ + if not path: + return + + not_builtin = set(sys.modules.keys()) - set(sys.builtin_module_names) + + # Don't reload proxy_worker + to_be_cleared_from_cache = set([ + module_name for module_name in not_builtin + if not module_name.startswith('proxy_worker') + ]) + + for module_name in to_be_cleared_from_cache: + module = sys.modules.get(module_name) + if not isinstance(module, ModuleType): + continue + + # Module path can be actual file path or a pure namespace path. + # Both of these has the module path placed in __path__ property + # The property .__path__ can be None or does not exist in module + try: + module_paths = set(getattr(module, '__path__', None) or []) + if hasattr(module, '__file__') and module.__file__: + module_paths.add(module.__file__) + + if any([p for p in module_paths if p.startswith(path)]): + sys.modules.pop(module_name) + except Exception as e: + logger.warning( + 'Attempt to remove module cache for %s but failed with ' + '%s. Using the original module cache.', + module_name, e) \ No newline at end of file diff --git a/proxy_worker/version.py b/proxy_worker/version.py new file mode 100644 index 000000000..a56028d71 --- /dev/null +++ b/proxy_worker/version.py @@ -0,0 +1 @@ +VERSION="1.0.0" \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index c0b46e4b1..1f159484f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,15 +26,20 @@ classifiers = [ "Intended Audience :: Developers" ] dependencies = [ - "azure-functions==1.23.0b3", - "python-dateutil ~=2.9.0", + "azure-functions==1.23.0b1", + "python-dateutil~=2.9.0", "protobuf~=3.19.3; python_version == '3.7'", - "protobuf~=4.25.3; python_version >= '3.8'", + "protobuf~=4.25.3; python_version >= '3.8' and python_version < '3.13'", + "protobuf~=5.29.0; python_version >= '3.13'", "grpcio-tools~=1.43.0; python_version == '3.7'", - "grpcio-tools~=1.59.0; python_version >= '3.8'", + "grpcio-tools~=1.59.0; python_version >= '3.8' and python_version < '3.13'", + "grpcio-tools~=1.70.0; python_version >= '3.13'", "grpcio~=1.43.0; python_version == '3.7'", - "grpcio~=1.59.0; python_version >= '3.8'", - "azurefunctions-extensions-base; python_version >= '3.8'" + "grpcio ~=1.59.0; python_version >= '3.8' and python_version < '3.13'", + "grpcio~=1.70.0; python_version >= '3.13'", + "azurefunctions-extensions-base; python_version >= '3.8'", + "test-worker==1.0.0a38; python_version >= '3.13'", + "test-worker-v1==1.0.0a11; python_version >= '3.13'" ] [project.urls] @@ -45,7 +50,6 @@ Repository = "https://github.com/Azure/azure-functions-python-worker" dev = [ "azure-eventhub", # Used for EventHub E2E tests "azure-functions-durable", # Used for Durable E2E tests - "azure-monitor-opentelemetry; python_version >= '3.8'", # Used for Azure Monitor unit tests "flask", "fastapi~=0.103.2", "pydantic", @@ -56,7 +60,6 @@ dev = [ "requests==2.*", "coverage", "pytest-sugar", - "opentelemetry-api; python_version >= '3.8'", # Used for OpenTelemetry unit tests "pytest-cov", "pytest-xdist", "pytest-randomly", @@ -74,7 +77,7 @@ dev = [ "cryptography" ] test-http-v2 = [ - "azurefunctions-extensions-http-fastapi==1.0.0b2", + "azurefunctions-extensions-http-fastapi==1.0.0b1", "ujson", "orjson" ] @@ -83,7 +86,7 @@ test-deferred-bindings = [ ] [build-system] -requires = ["setuptools>=62", "wheel"] +requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" diff --git a/python/prodV4/worker.config.json b/python/prodV4/worker.config.json index 3e431ac4d..98f1e56db 100644 --- a/python/prodV4/worker.config.json +++ b/python/prodV4/worker.config.json @@ -3,16 +3,15 @@ "language":"python", "defaultRuntimeVersion":"3.11", "supportedOperatingSystems":["LINUX", "OSX", "WINDOWS"], - "supportedRuntimeVersions":["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], + "supportedRuntimeVersions":["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], "supportedArchitectures":["X64", "X86", "Arm64"], "extensions":[".py"], "defaultExecutablePath":"python", "defaultWorkerPath":"%FUNCTIONS_WORKER_RUNTIME_VERSION%/{os}/{architecture}/worker.py", - "workerIndexing": "true", - "arguments": ["-X no_debug_ranges"] + "workerIndexing": "true" }, "processOptions": { "initializationTimeout": "00:02:00", "environmentReloadTimeout": "00:02:00" } -} +} \ No newline at end of file diff --git a/python/proxyV4/worker.py b/python/proxyV4/worker.py new file mode 100644 index 000000000..dce5d51e6 --- /dev/null +++ b/python/proxyV4/worker.py @@ -0,0 +1,67 @@ +import os +import pathlib +import sys + + +PKGS_PATH = "/home/site/wwwroot/.python_packages" +PKGS = "lib/site-packages" + +# Azure environment variables +AZURE_WEBSITE_INSTANCE_ID = "WEBSITE_INSTANCE_ID" +AZURE_CONTAINER_NAME = "CONTAINER_NAME" +AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" + + + +def is_azure_environment(): + """Check if the function app is running on the cloud""" + return (AZURE_CONTAINER_NAME in os.environ + or AZURE_WEBSITE_INSTANCE_ID in os.environ) + + +def validate_python_version(): + minor_version = sys.version_info[1] + if not (13 <= minor_version < 14): + raise RuntimeError(f'Unsupported Python version: 3.{minor_version}') + + +def determine_user_pkg_paths(): + """This finds the user packages when function apps are running on the cloud + User packages are defined in: + /home/site/wwwroot/.python_packages/lib/site-packages + """ + usr_packages_path = [os.path.join(PKGS_PATH, PKGS)] + return usr_packages_path + + +def add_script_root_to_sys_path(): + """Append function project root to module finding sys.path""" + functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) + if functions_script_root is not None: + sys.path.append(functions_script_root) + + +if __name__ == '__main__': + validate_python_version() + func_worker_dir = str(pathlib.Path(__file__).absolute().parent) + env = os.environ + + # Setting up python path for all environments to prioritize + # third-party user packages over worker packages in PYTHONPATH + user_pkg_paths = determine_user_pkg_paths() + joined_pkg_paths = os.pathsep.join(user_pkg_paths) + env['PYTHONPATH'] = f'{joined_pkg_paths}:{func_worker_dir}' + + project_root = os.path.abspath(os.path.dirname(__file__)) + if project_root not in sys.path: + sys.path.append(project_root) + + if is_azure_environment(): + os.execve(sys.executable, + [sys.executable, '-m', 'proxy_worker'] + + sys.argv[1:], + env) + else: + add_script_root_to_sys_path() + from proxy_worker import start_worker + start_worker.start() diff --git a/python/test/worker.config.json b/python/test/worker.config.json index f778e45f3..deb726a05 100644 --- a/python/test/worker.config.json +++ b/python/test/worker.config.json @@ -2,13 +2,12 @@ "description":{ "language":"python", "extensions":[".py"], - "defaultExecutablePath":"python", + "defaultExecutablePath":"C:\\Users\\victoriahall\\Documents\\repos\\azure.azure-functions-python-worker\\.venv\\Scripts\\python.exe", "defaultWorkerPath":"worker.py", - "workerIndexing": "true", - "arguments": ["-X no_debug_ranges"] + "workerIndexing": "true" }, "processOptions": { "initializationTimeout": "00:02:00", "environmentReloadTimeout": "00:02:00" } -} +} \ No newline at end of file diff --git a/python/test/worker.py b/python/test/worker.py index e2ef12d22..db66b6dea 100644 --- a/python/test/worker.py +++ b/python/test/worker.py @@ -1,19 +1,67 @@ -import sys import os -from azure_functions_worker import main +import pathlib +import sys +PKGS_PATH = "/home/site/wwwroot/.python_packages" +PKGS = "lib/site-packages" + # Azure environment variables +AZURE_WEBSITE_INSTANCE_ID = "WEBSITE_INSTANCE_ID" +AZURE_CONTAINER_NAME = "CONTAINER_NAME" AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" + +def is_azure_environment(): + """Check if the function app is running on the cloud""" + return (AZURE_CONTAINER_NAME in os.environ + or AZURE_WEBSITE_INSTANCE_ID in os.environ) + + +def validate_python_version(): + minor_version = sys.version_info[1] + if not (11 <= minor_version < 14): + raise RuntimeError(f'Unsupported Python version: 3.{minor_version}') + + +def determine_user_pkg_paths(): + """This finds the user packages when function apps are running on the cloud + User packages are defined in: + /home/site/wwwroot/.python_packages/lib/site-packages + """ + usr_packages_path = [os.path.join(PKGS_PATH, PKGS)] + return usr_packages_path + + def add_script_root_to_sys_path(): - '''Append function project root to module finding sys.path''' + """Append function project root to module finding sys.path""" functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) if functions_script_root is not None: sys.path.append(functions_script_root) if __name__ == '__main__': - add_script_root_to_sys_path() - main.main() + validate_python_version() + func_worker_dir = str(pathlib.Path(__file__).absolute().parent) + env = os.environ + + # Setting up python path for all environments to prioritize + # third-party user packages over worker packages in PYTHONPATH + user_pkg_paths = determine_user_pkg_paths() + joined_pkg_paths = os.pathsep.join(user_pkg_paths) + env['PYTHONPATH'] = f'{joined_pkg_paths}:{func_worker_dir}' + + project_root = os.path.abspath(os.path.dirname(__file__)) + if project_root not in sys.path: + sys.path.append(project_root) + + if is_azure_environment(): + os.execve(sys.executable, + [sys.executable, '-m', 'proxy_worker'] + + sys.argv[1:], + env) + else: + add_script_root_to_sys_path() + from proxy_worker import start_worker + start_worker.start() diff --git a/tests/endtoend/test_blueprint_functions.py b/tests/endtoend/test_blueprint_functions.py index c421f583b..0c78b4289 100644 --- a/tests/endtoend/test_blueprint_functions.py +++ b/tests/endtoend/test_blueprint_functions.py @@ -2,6 +2,10 @@ # Licensed under the MIT License. from tests.utils import testutils +import os +import logging + +from unittest.mock import patch class TestFunctionInBluePrintOnly(testutils.WebHostTestCase): @@ -10,8 +14,10 @@ def get_script_dir(cls): return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ 'functions_in_blueprint_only' + @patch.dict(os.environ, {"PYAZURE_WEBHOST_DEBUG": '1'}) def test_function_in_blueprint_only(self): r = self.webhost.request('GET', 'default_template') + logging.info(f"R: {r}") self.assertTrue(r.ok) diff --git a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py index c527cb680..1899f9e75 100644 --- a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py +++ b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings.py @@ -137,7 +137,7 @@ async def test_deferred_bindings_dual_enabled_log(self): "is only supported for 3.9+.") class TestDeferredBindingsHelpers(testutils.AsyncTestCase): - def test_mbd_deferred_bindings_enabled_decode(self): + def test_deferred_bindings_enabled_decode(self): binding = BlobClientConverter pb = protos.ParameterBinding(name='test', data=protos.TypedData( diff --git a/tests/extension_tests/http_v2_tests/test_http_v2.py b/tests/extension_tests/http_v2_tests/test_http_v2.py index 8c1d5b48e..07b204660 100644 --- a/tests/extension_tests/http_v2_tests/test_http_v2.py +++ b/tests/extension_tests/http_v2_tests/test_http_v2.py @@ -8,10 +8,12 @@ import requests from tests.utils import testutils -from azure_functions_worker.utils.common import is_envvar_true +#from azure_functions_worker.utils.common import is_envvar_true +from proxy_worker.utils.common import is_envvar_true from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST -from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING +#from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING +from proxy_worker.utils.constants import PYTHON_ENABLE_INIT_INDEXING REQUEST_TIMEOUT_SEC = 5 diff --git a/tests/test_setup.py b/tests/test_setup.py index 6f9c3edd5..906642661 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -38,6 +38,7 @@ WEBHOST_GITHUB_API = "https://api.github.com/repos/Azure/azure-functions-host" WEBHOST_GIT_REPO = "https://github.com/Azure/azure-functions-host/archive" WEBHOST_TAG_PREFIX = "v4." +WORKER_DIR = "azure_functions_worker" if sys.version_info.minor < 11 else "proxy_worker" def get_webhost_version() -> str: @@ -125,7 +126,7 @@ def compile_webhost(webhost_dir): except subprocess.CalledProcessError: print( f"Failed to compile webhost in {webhost_dir}. " - "A compatible .NET Core SDK is required to build the solution. " + ".NET Core SDK is required to build the solution. " "Please visit https://aka.ms/dotnet-download", file=sys.stderr, ) @@ -134,10 +135,10 @@ def compile_webhost(webhost_dir): def gen_grpc(): - proto_root_dir = ROOT_DIR / "azure_functions_worker" / "protos" + proto_root_dir = ROOT_DIR / WORKER_DIR / "protos" proto_src_dir = proto_root_dir / "_src" / "src" / "proto" staging_root_dir = BUILD_DIR / "protos" - staging_dir = staging_root_dir / "azure_functions_worker" / "protos" + staging_dir = staging_root_dir / WORKER_DIR / "protos" built_protos_dir = BUILD_DIR / "built_protos" if os.path.exists(BUILD_DIR): @@ -159,12 +160,12 @@ def gen_grpc(): "-m", "grpc_tools.protoc", "-I", - os.sep.join(("azure_functions_worker", "protos")), + os.sep.join((WORKER_DIR, "protos")), "--python_out", str(built_protos_dir), "--grpc_python_out", str(built_protos_dir), - os.sep.join(("azure_functions_worker", "protos", proto)), + os.sep.join((WORKER_DIR, "protos", proto)), ], check=True, stdout=sys.stdout, @@ -197,7 +198,7 @@ def make_absolute_imports(compiled_files): # from azure_functions_worker.protos import xxx_pb2 as.. p1 = re.sub( r"\nimport (.*?_pb2)", - r"\nfrom azure_functions_worker.protos import \g<1>", + fr"\nfrom {WORKER_DIR}.protos import \g<1>", content, ) # Convert lines of the form: @@ -205,7 +206,7 @@ def make_absolute_imports(compiled_files): # from azure_functions_worker.protos.identity import xxx_pb2.. p2 = re.sub( r"from ([a-z]*) (import.*_pb2)", - r"from azure_functions_worker.protos.\g<1> \g<2>", + fr"from {WORKER_DIR}.protos.\g<1> \g<2>", p1, ) f.write(p2) diff --git a/tests/unittests/test_opentelemetry.py b/tests/unittests/test_opentelemetry.py index 3f560382e..b26334bdf 100644 --- a/tests/unittests/test_opentelemetry.py +++ b/tests/unittests/test_opentelemetry.py @@ -1,9 +1,6 @@ import asyncio import os -import sys import unittest - -from unittest import skipIf from unittest.mock import MagicMock, patch from tests.unittests.test_dispatcher import FUNCTION_APP_DIRECTORY @@ -12,8 +9,6 @@ from azure_functions_worker import protos -@skipIf(sys.version_info.minor == 7, - "Packages are only supported for 3.8+") class TestOpenTelemetry(unittest.TestCase): def setUp(self): @@ -28,9 +23,8 @@ def test_update_opentelemetry_status_import_error(self): # Patch the built-in import mechanism with patch('builtins.__import__', side_effect=ImportError): self.dispatcher.update_opentelemetry_status() - # Verify that context variables are None due to ImportError - self.assertIsNone(self.dispatcher._context_api) - self.assertIsNone(self.dispatcher._trace_context_propagator) + # Verify that otel_libs_available is set to False due to ImportError + self.assertFalse(self.dispatcher._azure_monitor_available) @patch('builtins.__import__') def test_update_opentelemetry_status_success( @@ -60,12 +54,12 @@ def test_initialize_azure_monitor_import_error( with patch('builtins.__import__', side_effect=ImportError): self.dispatcher.initialize_azure_monitor() mock_update_ot.assert_called_once() - # Verify that azure_monitor_available is set to False due to ImportError + # Verify that otel_libs_available is set to False due to ImportError self.assertFalse(self.dispatcher._azure_monitor_available) - @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'true'}) + @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'}) @patch('builtins.__import__') - def test_init_request_initialize_azure_monitor_enabled_app_setting( + def test_init_request_otel_capability_enabled_app_setting( self, mock_imports, ): @@ -84,45 +78,13 @@ def test_init_request_initialize_azure_monitor_enabled_app_setting( self.assertEqual(init_response.worker_init_response.result.status, protos.StatusResult.Success) - # Verify azure_monitor_available is set to True - self.assertTrue(self.dispatcher._azure_monitor_available) # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE capabilities = init_response.worker_init_response.capabilities self.assertIn("WorkerOpenTelemetryEnabled", capabilities) self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true") @patch("azure_functions_worker.dispatcher.Dispatcher.initialize_azure_monitor") - def test_init_request_initialize_azure_monitor_default_app_setting( - self, - mock_initialize_azmon, - ): - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Azure monitor initialized not called - # Since default behavior is not enabled - mock_initialize_azmon.assert_not_called() - - # Verify azure_monitor_available is set to False - self.assertFalse(self.dispatcher._azure_monitor_available) - # Verify that WorkerOpenTelemetryEnabled capability is not set - capabilities = init_response.worker_init_response.capabilities - self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) - - @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'}) - @patch("azure_functions_worker.dispatcher.Dispatcher.initialize_azure_monitor") - def test_init_request_initialize_azure_monitor_disabled_app_setting( + def test_init_request_otel_capability_disabled_app_setting( self, mock_initialize_azmon, ): @@ -143,81 +105,6 @@ def test_init_request_initialize_azure_monitor_disabled_app_setting( # Azure monitor initialized not called mock_initialize_azmon.assert_not_called() - # Verify azure_monitor_available is set to False - self.assertFalse(self.dispatcher._azure_monitor_available) - # Verify that WorkerOpenTelemetryEnabled capability is not set - capabilities = init_response.worker_init_response.capabilities - self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) - - @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'}) - def test_init_request_enable_opentelemetry_enabled_app_setting( - self, - ): - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Verify otel_libs_available is set to True - self.assertTrue(self.dispatcher._otel_libs_available) - # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE - capabilities = init_response.worker_init_response.capabilities - self.assertIn("WorkerOpenTelemetryEnabled", capabilities) - self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true") - - @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'false'}) - def test_init_request_enable_opentelemetry_default_app_setting( - self, - ): - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Verify otel_libs_available is set to False by default - self.assertFalse(self.dispatcher._otel_libs_available) - # Verify that WorkerOpenTelemetryEnabled capability is not set - capabilities = init_response.worker_init_response.capabilities - self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) - - @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'}) - def test_init_request_enable_azure_monitor_disabled_app_setting( - self, - ): - - init_request = protos.StreamingMessage( - worker_init_request=protos.WorkerInitRequest( - host_version="2.3.4", - function_app_directory=str(FUNCTION_APP_DIRECTORY) - ) - ) - - init_response = self.loop.run_until_complete( - self.dispatcher._handle__worker_init_request(init_request)) - - self.assertEqual(init_response.worker_init_response.result.status, - protos.StatusResult.Success) - - # Verify otel_libs_available is set to False by default - self.assertFalse(self.dispatcher._otel_libs_available) # Verify that WorkerOpenTelemetryEnabled capability is not set capabilities = init_response.worker_init_response.capabilities self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) diff --git a/tests/unittests/test_types.py b/tests/unittests/test_types.py index 406510c22..963f26914 100644 --- a/tests/unittests/test_types.py +++ b/tests/unittests/test_types.py @@ -194,26 +194,3 @@ def test_model_binding_data_td_ok(self): mbd_datum = datumdef.Datum.from_typed_data(mock_mbd) self.assertEqual(mbd_datum.type, 'model_binding_data') - - def test_collection_model_binding_data_datum_ok(self): - sample_mbd = MockMBD(version="1.0", - source="AzureStorageBlobs", - content_type="application/json", - content="{\"Connection\":\"python-worker-tests\"," - "\"ContainerName\":\"test-blob\"," - "\"BlobName\":\"test.txt\"}") - sample_cmbd = [sample_mbd, sample_mbd] - - datum: bind_meta.Datum = bind_meta.Datum(value=sample_cmbd, - type='collection_model_binding_data') - - self.assertEqual(datum.value, sample_cmbd) - self.assertEqual(datum.type, "collection_model_binding_data") - - def test_collection_model_binding_data_td_ok(self): - mock_cmbd = protos.TypedData( - collection_model_binding_data={'model_binding_data': [{'version': '1.0'}]} - ) - cmbd_datum = datumdef.Datum.from_typed_data(mock_cmbd) - - self.assertEqual(cmbd_datum.type, 'collection_model_binding_data') diff --git a/tests/utils/testutils.py b/tests/utils/testutils.py index c04b134c5..98368c963 100644 --- a/tests/utils/testutils.py +++ b/tests/utils/testutils.py @@ -50,18 +50,22 @@ WebHostDedicated, ) -from azure_functions_worker import dispatcher, protos -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - FileAccessorFactory, -) -from azure_functions_worker.bindings.shared_memory_data_transfer import ( - SharedMemoryConstants as consts, -) -from azure_functions_worker.constants import ( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, - UNIX_SHARED_MEMORY_DIRECTORIES, -) -from azure_functions_worker.utils.common import get_app_setting, is_envvar_true +if sys.version_info.minor < 11: + from azure_functions_worker import dispatcher, protos +else: + from proxy_worker import dispatcher, protos + +# from azure_functions_worker.bindings.shared_memory_data_transfer import ( +# FileAccessorFactory, +# ) +# from azure_functions_worker.bindings.shared_memory_data_transfer import ( +# SharedMemoryConstants as consts, +# ) +# from azure_functions_worker.constants import ( +# FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, +# UNIX_SHARED_MEMORY_DIRECTORIES, +# ) +# from azure_functions_worker.utils.common import get_app_setting, is_envvar_true TESTS_ROOT = PROJECT_ROOT / 'tests' E2E_TESTS_FOLDER = pathlib.Path('endtoend') @@ -115,6 +119,19 @@ } """ +def is_true_like(setting: str) -> bool: + if setting is None: + return False + + return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'} + + +def is_envvar_true(env_key: str) -> bool: + if os.getenv(env_key) is None: + return False + + return is_true_like(os.environ[env_key]) + class AsyncTestCaseMeta(type(unittest.TestCase)): @@ -157,7 +174,7 @@ def wrapper(self, *args, __meth__=test_case, __check_log__=check_log_case, **kwargs): if (__check_log__ is not None and callable(__check_log__) - and not is_envvar_true(PYAZURE_WEBHOST_DEBUG)): + and not True): # Check logging output for unit test scenarios result = self._run_test(__meth__, *args, **kwargs) @@ -231,7 +248,7 @@ def setUpClass(cls): docker_tests_enabled, sku = cls.docker_tests_enabled() - cls.host_stdout = None if is_envvar_true(PYAZURE_WEBHOST_DEBUG) \ + cls.host_stdout = None if True \ else tempfile.NamedTemporaryFile('w+t') try: @@ -247,13 +264,17 @@ def setUpClass(cls): cls.webhost = \ WebHostDedicated(docker_configs).spawn_container() else: + cls.host_stdout_logger.info("Starting to setup function app") _setup_func_app(TESTS_ROOT / script_dir, is_unit_test) + cls.host_stdout_logger.info("Finished setup function app") try: + cls.host_stdout_logger.info("Starting webhost") cls.webhost = start_webhost(script_dir=script_dir, stdout=cls.host_stdout) except Exception: raise + time.sleep(5) if not cls.webhost.is_healthy() and cls.host_stdout is not None: cls.host_out = cls.host_stdout.read() if cls.host_out is not None and len(cls.host_out) > 0: @@ -294,6 +315,7 @@ def tearDownClass(cls): _teardown_func_app(TESTS_ROOT / script_dir) def _run_test(self, test, *args, **kwargs): + self.host_stdout_logger.info(f"Starting to run test. Test: {test}, Args: {args}, Kwargs: {kwargs}") if self.host_stdout is None: test(self, *args, **kwargs) else: @@ -321,124 +343,123 @@ def _run_test(self, test, *args, **kwargs): raise test_exception -class SharedMemoryTestCase(unittest.TestCase): - """ - For tests involving shared memory data transfer usage. - """ - - def setUp(self): - self.was_shmem_env_true = is_envvar_true( - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - - os_name = platform.system() - if os_name == 'Darwin': - # If an existing AppSetting is specified, save it so it can be - # restored later - self.was_shmem_dirs = get_app_setting( - UNIX_SHARED_MEMORY_DIRECTORIES - ) - self._setUpDarwin() - elif os_name == 'Linux': - self._setUpLinux() - self.file_accessor = FileAccessorFactory.create_file_accessor() - - def tearDown(self): - os_name = platform.system() - if os_name == 'Darwin': - self._tearDownDarwin() - if self.was_shmem_dirs is not None: - # If an AppSetting was set before the tests ran, restore it back - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) - elif os_name == 'Linux': - self._tearDownLinux() - - if not self.was_shmem_env_true: - os.environ.update( - {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - - def get_new_mem_map_name(self): - return str(uuid.uuid4()) - - def get_random_bytes(self, num_bytes): - return bytearray(random.getrandbits(8) for _ in range(num_bytes)) - - def get_random_string(self, num_chars): - return ''.join(random.choices(string.ascii_uppercase + string.digits, - k=num_chars)) - - def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: - """ - Check if uuid_to_test is a valid UUID. - Reference: https://stackoverflow.com/a/33245493/3132415 - """ - try: - uuid_obj = uuid.UUID(uuid_to_test, version=version) - except ValueError: - return False - return str(uuid_obj) == uuid_to_test - - def _createSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - if not os.path.exists(temp_dir_path): - os.makedirs(temp_dir_path) - - def _deleteSharedMemoryDirectories(self, directories): - for temp_dir in directories: - temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) - shutil.rmtree(temp_dir_path) - - def _setUpLinux(self): - self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - - def _tearDownLinux(self): - self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - - def _setUpDarwin(self): - """ - Create a RAM disk on macOS. - Ref: https://stackoverflow.com/a/2033417/3132415 - """ - size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) - size = 2048 * size_in_mb - # The following command returns the name of the created disk - cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create ram disk with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - disk_name = result.stdout.strip().decode() - # We create a volume on the disk created above and mount it - volume_name = 'shm' - cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode != 0: - raise IOError(f'Cannot create volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') - directory = f'/Volumes/{volume_name}' - self.created_directories = [directory] - # Create directories in the volume for shared memory maps - self._createSharedMemoryDirectories(self.created_directories) - # Override the AppSetting for the duration of this test so the - # FileAccessorUnix can use these directories for creating memory maps - os.environ.update( - {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} - ) - - def _tearDownDarwin(self): - # Delete the directories containing shared memory maps - self._deleteSharedMemoryDirectories(self.created_directories) - # Unmount the volume used for shared memory maps - volume_name = 'shm' - cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ - "| xargs -0 umount -f" - result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) - if result.returncode != 0: - raise IOError(f'Cannot delete volume with command: {cmd} - ' - f'{result.stdout} - {result.stderr}') +# class SharedMemoryTestCase(unittest.TestCase): +# """ +# For tests involving shared memory data transfer usage. +# """ + +# def setUp(self): +# self.was_shmem_env_true = is_envvar_true( +# FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) +# os.environ.update( +# {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) + +# os_name = platform.system() +# if os_name == 'Darwin': +# # If an existing AppSetting is specified, save it so it can be +# # restored later +# self.was_shmem_dirs = get_app_setting( +# UNIX_SHARED_MEMORY_DIRECTORIES +# ) +# self._setUpDarwin() +# elif os_name == 'Linux': +# self._setUpLinux() + +# def tearDown(self): +# os_name = platform.system() +# if os_name == 'Darwin': +# self._tearDownDarwin() +# if self.was_shmem_dirs is not None: +# # If an AppSetting was set before the tests ran, restore it back +# os.environ.update( +# {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) +# elif os_name == 'Linux': +# self._tearDownLinux() + +# if not self.was_shmem_env_true: +# os.environ.update( +# {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) + +# def get_new_mem_map_name(self): +# return str(uuid.uuid4()) + +# def get_random_bytes(self, num_bytes): +# return bytearray(random.getrandbits(8) for _ in range(num_bytes)) + +# def get_random_string(self, num_chars): +# return ''.join(random.choices(string.ascii_uppercase + string.digits, +# k=num_chars)) + +# def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: +# """ +# Check if uuid_to_test is a valid UUID. +# Reference: https://stackoverflow.com/a/33245493/3132415 +# """ +# try: +# uuid_obj = uuid.UUID(uuid_to_test, version=version) +# except ValueError: +# return False +# return str(uuid_obj) == uuid_to_test + +# def _createSharedMemoryDirectories(self, directories): +# for temp_dir in directories: +# temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) +# if not os.path.exists(temp_dir_path): +# os.makedirs(temp_dir_path) + +# def _deleteSharedMemoryDirectories(self, directories): +# for temp_dir in directories: +# temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) +# shutil.rmtree(temp_dir_path) + +# def _setUpLinux(self): +# self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + +# def _tearDownLinux(self): +# self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + +# def _setUpDarwin(self): +# """ +# Create a RAM disk on macOS. +# Ref: https://stackoverflow.com/a/2033417/3132415 +# """ +# size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) +# size = 2048 * size_in_mb +# # The following command returns the name of the created disk +# cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] +# result = subprocess.run(cmd, stdout=subprocess.PIPE) +# if result.returncode != 0: +# raise IOError(f'Cannot create ram disk with command: {cmd} - ' +# f'{result.stdout} - {result.stderr}') +# disk_name = result.stdout.strip().decode() +# # We create a volume on the disk created above and mount it +# volume_name = 'shm' +# cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] +# result = subprocess.run(cmd, stdout=subprocess.PIPE) +# if result.returncode != 0: +# raise IOError(f'Cannot create volume with command: {cmd} - ' +# f'{result.stdout} - {result.stderr}') +# directory = f'/Volumes/{volume_name}' +# self.created_directories = [directory] +# # Create directories in the volume for shared memory maps +# self._createSharedMemoryDirectories(self.created_directories) +# # Override the AppSetting for the duration of this test so the +# # FileAccessorUnix can use these directories for creating memory maps +# os.environ.update( +# {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} +# ) + +# def _tearDownDarwin(self): +# # Delete the directories containing shared memory maps +# self._deleteSharedMemoryDirectories(self.created_directories) +# # Unmount the volume used for shared memory maps +# volume_name = 'shm' +# cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ +# "| xargs -0 umount -f" +# result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) +# if result.returncode != 0: +# raise IOError(f'Cannot delete volume with command: {cmd} - ' +# f'{result.stdout} - {result.stderr}') class _MockWebHostServicer(protos.FunctionRpcServicer): @@ -799,6 +820,7 @@ def __init__(self, proc, addr): self._addr = addr def is_healthy(self): + time.sleep(3) r = self.request('GET', '', no_prefix=True) return 200 <= r.status_code < 300 @@ -967,7 +989,7 @@ def popen_webhost(*, stdout, stderr, script_root=FUNCS_PATH, port=None): def start_webhost(*, script_dir=None, stdout=None): script_root = TESTS_ROOT / script_dir if script_dir else FUNCS_PATH if stdout is None: - if is_envvar_true(PYAZURE_WEBHOST_DEBUG): + if True: stdout = sys.stdout else: stdout = subprocess.DEVNULL From 91494ba6e806fb9f3cfdf66bb5724901ff9b82e9 Mon Sep 17 00:00:00 2001 From: hallvictoria <59299039+hallvictoria@users.noreply.github.com> Date: Wed, 9 Apr 2025 13:50:35 -0500 Subject: [PATCH 04/11] Update Python SDK Version to 1.23.0 (#1663) Co-authored-by: AzureFunctionsPython --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c0b46e4b1..8182e1106 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: Developers" ] dependencies = [ - "azure-functions==1.23.0b3", + "azure-functions==1.23.0", "python-dateutil ~=2.9.0", "protobuf~=3.19.3; python_version == '3.7'", "protobuf~=4.25.3; python_version >= '3.8'", From 15ab86ed0418d42eb7fd5b313552690ddcd4f6db Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Wed, 9 Apr 2025 15:19:15 -0500 Subject: [PATCH 05/11] merges from ADO --- eng/ci/emulator-tests.yml | 1 - eng/ci/official-build.yml | 52 +-- eng/ci/worker-release.yml | 33 -- eng/scripts/test-setup.sh | 2 +- eng/templates/jobs/ci-emulator-tests.yml | 290 +++---------- eng/templates/jobs/ci-unit-tests.yml | 2 + .../official/jobs/build-artifacts.yml | 178 +++++++- eng/templates/official/jobs/ci-e2e-tests.yml | 56 ++- .../official/jobs/publish-release.yml | 386 ------------------ ...oft.Azure.Functions.V4.PythonWorker.nuspec | 34 +- python/test/worker.config.json | 2 +- python/test/worker.py | 57 +-- setup.cfg | 6 + tests/endtoend/test_blueprint_functions.py | 6 - .../http_v2_tests/test_http_v2.py | 11 +- tests/test_setup.py | 2 +- tests/unittests/test_code_quality.py | 2 +- tests/utils/testutils.py | 288 ++++++------- 18 files changed, 520 insertions(+), 888 deletions(-) delete mode 100644 eng/ci/worker-release.yml delete mode 100644 eng/templates/official/jobs/publish-release.yml diff --git a/eng/ci/emulator-tests.yml b/eng/ci/emulator-tests.yml index c6ee3a318..b2e789c16 100644 --- a/eng/ci/emulator-tests.yml +++ b/eng/ci/emulator-tests.yml @@ -34,7 +34,6 @@ variables: - template: /ci/variables/build.yml@eng - template: /ci/variables/cfs.yml@eng - template: /eng/templates/utils/variables.yml@self - - template: /eng/templates/utils/official-variables.yml@self extends: template: v1/1ES.Unofficial.PipelineTemplate.yml@1es diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml index 34d8b6c07..568fdf16b 100644 --- a/eng/ci/official-build.yml +++ b/eng/ci/official-build.yml @@ -50,29 +50,29 @@ extends: jobs: - template: /eng/templates/official/jobs/build-artifacts.yml@self - # - stage: RunE2ETests - # dependsOn: Build - # jobs: - # - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self - # - stage: RunEmulatorTests - # dependsOn: Build - # jobs: - # - template: /eng/templates/jobs/ci-emulator-tests.yml@self - # parameters: - # PoolName: 1es-pool-azfunc - # - stage: RunUnitTests - # dependsOn: Build - # jobs: - # - template: /eng/templates/jobs/ci-unit-tests.yml@self - # - stage: RunDockerConsumptionTests - # dependsOn: Build - # jobs: - # - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self - # - stage: RunDockerDedicatedTests - # dependsOn: Build - # jobs: - # - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self - # - stage: RunLinuxConsumptionTests - # dependsOn: Build - # jobs: - # - template: /eng/templates/official/jobs/ci-lc-tests.yml@self + - stage: RunE2ETests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self + - stage: RunEmulatorTests + dependsOn: Build + jobs: + - template: /eng/templates/jobs/ci-emulator-tests.yml@self + parameters: + PoolName: 1es-pool-azfunc + - stage: RunUnitTests + dependsOn: Build + jobs: + - template: /eng/templates/jobs/ci-unit-tests.yml@self + - stage: RunDockerConsumptionTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self + - stage: RunDockerDedicatedTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self + - stage: RunLinuxConsumptionTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/worker-release.yml b/eng/ci/worker-release.yml deleted file mode 100644 index 987569c61..000000000 --- a/eng/ci/worker-release.yml +++ /dev/null @@ -1,33 +0,0 @@ -pr: none - -resources: - repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - - repository: eng - type: git - name: engineering - ref: refs/tags/release - -variables: - - name: codeql.excludePathPatterns - value: deps/,build/ - -extends: - template: v1/1ES.Official.PipelineTemplate.yml@1es - parameters: - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows - sdl: - codeSignValidation: - enabled: true - break: true - - stages: - - stage: Release - jobs: - - template: /eng/templates/official/jobs/publish-release.yml@self diff --git a/eng/scripts/test-setup.sh b/eng/scripts/test-setup.sh index cac37c96a..d062021dc 100644 --- a/eng/scripts/test-setup.sh +++ b/eng/scripts/test-setup.sh @@ -2,5 +2,5 @@ cd tests python -m invoke -c test_setup build-protos -python -m invoke -c test_setup webhost --branch-name=gaaguiar/test_py_worker +python -m invoke -c test_setup webhost --branch-name=dev python -m invoke -c test_setup extensions \ No newline at end of file diff --git a/eng/templates/jobs/ci-emulator-tests.yml b/eng/templates/jobs/ci-emulator-tests.yml index 83a9a58db..968585017 100644 --- a/eng/templates/jobs/ci-emulator-tests.yml +++ b/eng/templates/jobs/ci-emulator-tests.yml @@ -1,23 +1,28 @@ jobs: - job: "TestPython" - displayName: "Run Python E2E Tests" + displayName: "Run Python Emulator Tests" pool: - name: 1es-pool-azfunc + name: ${{ parameters.PoolName }} image: 1es-ubuntu-22.04 os: linux strategy: matrix: + Python37: + PYTHON_VERSION: '3.7' + Python38: + PYTHON_VERSION: '3.8' + Python39: + PYTHON_VERSION: '3.9' + Python310: + PYTHON_VERSION: '3.10' + Python311: + PYTHON_VERSION: '3.11' Python312: + PYTHON_VERSION: '3.12' + Python313: PYTHON_VERSION: '3.13' - STORAGE_CONNECTION: $(LinuxStorageConnectionString312) - COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) - EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) - SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312) - SQL_CONNECTION: $(LinuxSqlConnectionString312) - EVENTGRID_URI: $(LinuxEventGridTopicUriString312) - EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) steps: - task: UsePythonVersion@0 inputs: @@ -29,228 +34,69 @@ jobs: - bash: | chmod +x eng/scripts/install-dependencies.sh chmod +x eng/scripts/test-setup.sh - + eng/scripts/install-dependencies.sh $(PYTHON_VERSION) eng/scripts/test-setup.sh displayName: 'Install dependencies and the worker' condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) + - task: DownloadPipelineArtifact@2 + displayName: 'Download Python SDK Artifact' + inputs: + buildType: specific + artifactName: 'azure-functions' + project: 'internal' + definition: 679 + buildVersionToDownload: latest + targetPath: '$(Pipeline.Workspace)/PythonSdkArtifact' + condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) + - bash: | + chmod +x eng/scripts/test-sdk.sh + chmod +x eng/scripts/test-setup.sh + + eng/scripts/test-sdk.sh $(Pipeline.Workspace) $(PYTHON_VERSION) + eng/scripts/test-setup.sh + displayName: 'Install test python sdk, dependencies and the worker' + condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) + - task: DownloadPipelineArtifact@2 + displayName: 'Download Python Extension Artifact' + inputs: + buildType: specific + artifactName: $(PYTHONEXTENSIONNAME) + project: 'internal' + definition: 798 + buildVersionToDownload: latest + targetPath: '$(Pipeline.Workspace)/PythonExtensionArtifact' + condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_blueprint_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Blueprint E2E Tests" - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_cosmosdb_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python CosmosDB E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_dependency_isolation_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Dependency Isolation E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_durable_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Durable E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_eventgrid_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python EventGrid E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_file_name_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python FileName E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_http_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python HTTP E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_retry_policy_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Retry Policy E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_sql_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python SQL E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_third_party_http_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Third Party HTTP E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_threadpool_thread_count_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python ThreadPool Thread Count E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_timer_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Timer E2E Tests" - condition: always() - - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_warmup_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Warmup E2E Tests" - condition: always() + chmod +x eng/scripts/test-setup.sh + chmod +x eng/scripts/test-extensions.sh + + eng/scripts/test-extensions.sh $(Pipeline.Workspace) $(PYTHON_VERSION) $(PYTHONEXTENSIONNAME) + eng/scripts/test-setup.sh + displayName: 'Install test python extension, dependencies and the worker' + condition: or(eq(variables.isExtensionsRelease, true), eq(variables['USETESTPYTHONEXTENSIONS'], true)) - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_worker_process_count_functions.py - env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Worker Process Count E2E Tests" - condition: always() + docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml pull + docker compose -f tests/emulator_tests/utils/eventhub/docker-compose.yml up -d + displayName: 'Install Azurite and Start EventHub Emulator' - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_worker_process_count_functions.py + python -m pytest -q -n auto --dist loadfile --reruns 4 --ignore=tests/emulator_tests/test_servicebus_functions.py tests/emulator_tests env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Worker Process Count E2E Tests" - condition: always() + AzureWebJobsStorage: "UseDevelopmentStorage=true" + AzureWebJobsEventHubConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" + displayName: "Running $(PYTHON_VERSION) Python Linux Emulator Tests" - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/extension_tests/http_v2_tests + # Stop and remove EventHub Emulator container to free up the port + docker stop eventhubs-emulator + docker container rm --force eventhubs-emulator + docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml pull + docker compose -f tests/emulator_tests/utils/servicebus/docker-compose.yml up -d env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Worker Http v2 Tests" - condition: always() + AzureWebJobsSQLPassword: $(AzureWebJobsSQLPassword) + displayName: 'Install Azurite and Start ServiceBus Emulator' - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/extension_tests/deferred_bindings_tests + python -m pytest -q -n auto --dist loadfile --reruns 4 tests/emulator_tests/test_servicebus_functions.py env: - AzureWebJobsStorage: $(STORAGE_CONNECTION) - AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) - AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) - AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) - AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) - AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) - AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) - skipTest: $(skipTest) - displayName: "Running $(PYTHON_VERSION) Python Worker DB Tests" - condition: always() + AzureWebJobsStorage: "UseDevelopmentStorage=true" + AzureWebJobsServiceBusConnectionString: "Endpoint=sb://localhost;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=SAS_KEY_VALUE;UseDevelopmentEmulator=true;" + displayName: "Running $(PYTHON_VERSION) Python ServiceBus Linux Emulator Tests" diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml index 5ff54888c..e013207c3 100644 --- a/eng/templates/jobs/ci-unit-tests.yml +++ b/eng/templates/jobs/ci-unit-tests.yml @@ -16,6 +16,8 @@ jobs: PYTHON_VERSION: '3.11' Python312: PYTHON_VERSION: '3.12' + Python313: + PYTHON_VERSION: '3.13' steps: - task: UsePythonVersion@0 diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml index 1eabc4c1c..bc0c5de1a 100644 --- a/eng/templates/official/jobs/build-artifacts.yml +++ b/eng/templates/official/jobs/build-artifacts.yml @@ -1,4 +1,86 @@ jobs: +- job: Build_WINDOWS_X64 + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + Python312V4: + pythonVersion: '3.12' + workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.ArtifactStagingDirectory) + artifactName: "$(pythonVersion)_WINDOWS_X64" + steps: + - template: ../../../../pack/templates/win_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + architecture: 'x64' + artifactName: '$(pythonVersion)_WINDOWS_X64' +- job: Build_WINDOWS_X86 + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + Python312V4: + pythonVersion: '3.12' + workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.ArtifactStagingDirectory) + artifactName: "$(pythonVersion)_WINDOWS_X86" + steps: + - template: ../../../../pack/templates/win_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + architecture: 'x86' + artifactName: '$(pythonVersion)_WINDOWS_x86' - job: Build_LINUX_X64 pool: name: 1es-pool-azfunc @@ -6,6 +88,24 @@ jobs: os: linux strategy: matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + Python312V4: + pythonVersion: '3.12' + workerPath: 'python/prodV4/worker.py' Python313V4: pythonVersion: '3.13' workerPath: 'python/proxyV4/worker.py' @@ -21,9 +121,83 @@ jobs: pythonVersion: '$(pythonVersion)' workerPath: '$(workerPath)' artifactName: '$(pythonVersion)_LINUX_X64' +- job: Build_OSX_X64 + pool: + name: Azure Pipelines + image: macOS-latest + os: macOS + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + Python312V4: + pythonVersion: '3.12' + workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.ArtifactStagingDirectory) + artifactName: "$(pythonVersion)_OSX_X64" + steps: + - template: ../../../../pack/templates/nix_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + artifactName: '$(pythonVersion)_OSX_X64' +- job: Build_OSX_ARM64 + pool: + name: Azure Pipelines + image: macOS-latest + os: macOS + strategy: + matrix: + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + Python312V4: + pythonVersion: '3.12' + workerPath: 'python/prodV4/worker.py' + Python313V4: + pythonVersion: '3.13' + workerPath: 'python/proxyV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.ArtifactStagingDirectory) + artifactName: "$(pythonVersion)_OSX_ARM4" + steps: + - template: ../../../../pack/templates/macos_64_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + artifactName: '$(pythonVersion)_OSX_ARM64' - job: PackageWorkers - dependsOn: ['Build_LINUX_X64'] + dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64'] templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: @@ -52,7 +226,7 @@ jobs: LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) - WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST25" + WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST" echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" fi diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml index b7050196a..33148bf94 100644 --- a/eng/templates/official/jobs/ci-e2e-tests.yml +++ b/eng/templates/official/jobs/ci-e2e-tests.yml @@ -9,6 +9,60 @@ jobs: strategy: matrix: + Python37: + PYTHON_VERSION: '3.7' + STORAGE_CONNECTION: $(LinuxStorageConnectionString37) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString37) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString37) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString37) + SQL_CONNECTION: $(LinuxSqlConnectionString37) + EVENTGRID_URI: $(LinuxEventGridTopicUriString37) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString37) + Python38: + PYTHON_VERSION: '3.8' + STORAGE_CONNECTION: $(LinuxStorageConnectionString38) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) + SQL_CONNECTION: $(LinuxSqlConnectionString38) + EVENTGRID_URI: $(LinuxEventGridTopicUriString38) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) + Python39: + PYTHON_VERSION: '3.9' + STORAGE_CONNECTION: $(LinuxStorageConnectionString39) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) + SQL_CONNECTION: $(LinuxSqlConnectionString39) + EVENTGRID_URI: $(LinuxEventGridTopicUriString39) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) + Python310: + PYTHON_VERSION: '3.10' + STORAGE_CONNECTION: $(LinuxStorageConnectionString310) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) + SQL_CONNECTION: $(LinuxSqlConnectionString310) + EVENTGRID_URI: $(LinuxEventGridTopicUriString310) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) + Python311: + PYTHON_VERSION: '3.11' + STORAGE_CONNECTION: $(LinuxStorageConnectionString311) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) + SQL_CONNECTION: $(LinuxSqlConnectionString311) + EVENTGRID_URI: $(LinuxEventGridTopicUriString311) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) + Python312: + PYTHON_VERSION: '3.12' + STORAGE_CONNECTION: $(LinuxStorageConnectionString312) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString312) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString312) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString312) + SQL_CONNECTION: $(LinuxSqlConnectionString312) + EVENTGRID_URI: $(LinuxEventGridTopicUriString312) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString312) Python313: PYTHON_VERSION: '3.13' STORAGE_CONNECTION: $(LinuxStorageConnectionString312) @@ -90,7 +144,7 @@ jobs: displayName: 'Display skipTest variable' condition: or(eq(variables.isSdkRelease, true), eq(variables['USETESTPYTHONSDK'], true)) - bash: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend/test_blueprint_functions.py + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests env: AzureWebJobsStorage: $(STORAGE_CONNECTION) AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml deleted file mode 100644 index 0f1004898..000000000 --- a/eng/templates/official/jobs/publish-release.yml +++ /dev/null @@ -1,386 +0,0 @@ -jobs: - -- job: "CreateReleaseBranch" - displayName: 'Create Release Branch' - pool: - name: 1es-pool-azfunc - image: 1es-ubuntu-22.04 - os: linux - steps: - - powershell: | - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - $versionFile = "azure_functions_worker/version.py" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - git config --global user.name "AzureFunctionsPython" - git config --global user.email "funcdisc@microsoft.com" - - # Heading to Artifact Repository - Write-Host "Operating based on $stagingDirectory/azure-functions-python-worker" - git checkout -b "release/$newWorkerVersion" - - # Change azure_functions_worker/version.py version - Write-Host "Change version number in version.py to $newWorkerVersion" - ((Get-Content $versionFile) -replace "VERSION = '(\d+).(\d+).*'", "VERSION = '$newWorkerVersion'" -join "`n") + "`n" | Set-Content -NoNewline $versionFile - git add $versionFile - git commit -m "build: update Python Worker Version to $newWorkerVersion" - - # Create release branch release/X.Y.Z - Write-Host "Creating release branch release/$newWorkerVersion" - git push --repo="https://$githubToken@github.com/Azure/azure-functions-python-worker.git" - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Push release/x.y.z' - -- job: "CheckReleaseBranch" - dependsOn: ['CreateReleaseBranch'] - displayName: '(Manual) Check Release Branch' - pool: server - steps: - - task: ManualValidation@1 - displayName: '(Optional) Modify release/x.y.z branch' - inputs: - notifyUsers: '' # No email notifications sent - instructions: | - 1. Check if the https://github.com/Azure/azure-functions-python-worker/tree/release/$(NewWorkerVersion) passes all unit tests. - 2. If not, modify the release/$(NewWorkerVersion) branch. - 3. Ensure release/$(NewWorkerVersion) branch contains all necessary changes since it will be propagated to v4 workers. - -- job: "CreateReleaseTag" - dependsOn: ['CheckReleaseBranch'] - steps: - - powershell: | - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - git config --global user.name "AzureFunctionsPython" - git config --global user.email "funcdisc@microsoft.com" - - # Clone Repository - git clone https://$githubToken@github.com/Azure/azure-functions-python-worker - Write-Host "Cloned azure-functions-python-worker into local" - Set-Location "azure-functions-python-worker" - git checkout "origin/release/$newWorkerVersion" - - # Create release tag X.Y.Z - Write-Host "Creating release tag $newWorkerVersion" - git tag -a "$newWorkerVersion" -m "$newWorkerVersion" - - # Push tag to remote - git push origin $newWorkerVersion - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Create and push release tag x.y.z' - - powershell: | - $githubUser = "$(GithubUser)" - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) - - # Create Release Note - Write-Host "Creating release note in GitHub" - $body = (@{tag_name="$newWorkerVersion";name="Release $newWorkerVersion";body="- Fill in Release Note Here";draft=$true} | ConvertTo-Json -Compress) - $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/releases" - - # Return Value - if ($response.StatusCode -ne 201) { - Write-Host "Failed to create release note in GitHub" - exit -1 - } - - $draftUrl = $response | ConvertFrom-Json | Select -expand url - Write-Host "Release draft created in $draftUrl" - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Create GitHub release draft' - -- job: "CheckGitHubRelease" - dependsOn: ['CreateReleaseTag'] - displayName: '(Manual) Check GitHub release note' - pool: server - steps: - - task: ManualValidation@1 - displayName: 'Write GitHub release note' - inputs: - notifyUsers: '' - instructions: 'Please head to https://github.com/Azure/azure-functions-python-worker/releases to finish the release note' - -- job: "WaitForPythonWorkerBuild" - dependsOn: ['CheckGitHubRelease'] - displayName: '(Manual) Wait For Python Worker Build' - pool: server - steps: - - task: ManualValidation@1 - displayName: 'Wait For Python Worker Build' - inputs: - notifyUsers: '' - instructions: 'Ensure the build of release/4.x.y.z finishes in https://dev.azure.com/azfunc/internal/_build?definitionId=652 and verify if PackageWorkers task is completed.' - - -- job: "PublishNuget" - dependsOn: ['WaitForPythonWorkerBuild'] - displayName: 'Publish Nuget' - templateContext: - outputs: - - output: nuget - packagesToPush: '$(Pipeline.Workspace)/PythonWorkerArtifact/**/*.nupkg;!$(Pipeline.Workspace)/PythonWorkerArtifact/**/*.symbols.nupkg' - publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/eb652719-f36a-4e78-8541-e13a3cd655f9' - allowPackageConflicts: true - packageParentPath: '$(Pipeline.Workspace)' - steps: - - task: DownloadPipelineArtifact@2 - displayName: 'Download Python Worker release/4.x.y.z Artifact' - inputs: - buildType: specific - project: '3f99e810-c336-441f-8892-84983093ad7f' - definition: 652 - specificBuildWithTriggering: true - buildVersionToDownload: latestFromBranch - branchName: 'refs/heads/release/$(NewWorkerVersion)' - allowPartiallySucceededBuilds: true - allowFailedBuilds: true - targetPath: '$(Pipeline.Workspace)/PythonWorkerArtifact' - - -- job: "CheckNugetPackageContent" - dependsOn: ['PublishNuget'] - displayName: '(Manual) Check Nuget Package Content' - pool: server - steps: - - task: ManualValidation@1 - displayName: 'Check nuget package content' - inputs: - notifyUsers: '' - instructions: | - Please check the latest release package at - https://azfunc.visualstudio.com/Azure%20Functions/_artifacts/feed/AzureFunctionsRelease/NuGet/Microsoft.Azure.Functions.PythonWorker/overview - -- job: "HostRepoPRs" - dependsOn: ['CheckNugetPackageContent'] - displayName: 'Create Host PRs' - steps: - - powershell: | - $githubUser = "$(GithubUser)" - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - $newBranch = "python/$newWorkerVersion" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - git config --global user.name "AzureFunctionsPython" - git config --global user.email "funcdisc@microsoft.com" - - # Create GitHub credential - $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) - - # Clone Repository - git clone https://$githubToken@github.com/Azure/azure-functions-host - Write-Host "Cloned azure-functions-host into local and checkout $newBranch branch" - Set-Location "azure-functions-host" - git checkout -b $newBranch "origin/dev" - - # Modify Python Worker Version in eng\build\python.props - Write-Host "Replacing eng\build\python.props" - ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props - - # Modify Python Worker Version in test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - Write-Host "Replacing test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj" - ((Get-Content test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") + "`n" | Set-Content -NoNewline test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - - # Modify release_notes.md - Write-Host "Adding a new entry in release_note.md" - ((Get-Content release_notes.md) -replace "-->","$&`n- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)" -join "`n") + "`n" | Set-Content -NoNewline release_notes.md - - # Commit Python Version - Write-Host "Pushing $newBranch to host repo" - git add eng\build\Workers.Python.props - git add test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - git add release_notes.md - git commit -m "Update Python Worker Version to $newWorkerVersion" - git push origin $newBranch - - # Create PR - Write-Host "Creating PR draft in GitHub" - $prTemplateContent = @" - ### Issue describing the changes in this PR - - Update Python Worker Version to $newWorkerVersion - - Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) - - ### Pull request checklist - - **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. - - * [ ] Backporting to the `in-proc` branch is not required - * [x]Otherwise: Link to backporting PR - * [x] My changes **do not** require documentation changes - * [ ] Otherwise: Documentation issue linked to PR - * [ ] My changes **should not** be added to the release notes for the next release - * [x] Otherwise: I've added my notes to `release_notes.md` - * [x] My changes **do not** need to be backported to a previous version - * [ ] Otherwise: Backport tracked by issue/PR #issue_or_pr - * [x] My changes **do not** require diagnostic events changes - * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) - * [x] I have added all required tests (Unit tests, E2E tests) - - "@ - - $body = (@{head="$newBranch";base="dev";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" - - # Return Value - if ($response.StatusCode -ne 201) { - Write-Host "Failed to create a PR in Azure Functions Host" - exit -1 - } - - $draftUrl = $response | ConvertFrom-Json | Select -expand url - Write-Host "PR draft created in $draftUrl" - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Create Host PR for dev' - - powershell: | - $githubUser = "$(GithubUser)" - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - $newBranch = "python/$newWorkerVersion" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - git config --global user.name "AzureFunctionsPython" - git config --global user.email "funcdisc@microsoft.com" - - # Create GitHub credential - $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) - - # Clone Repository - git clone https://$githubToken@github.com/Azure/azure-functions-host - Write-Host "Cloned azure-functions-host into local and checkout $newBranch branch" - Set-Location "azure-functions-host" - git checkout -b backport/$newBranch "origin/in-proc" - - # Modify Python Worker Version in eng\build\python.props - Write-Host "Replacing eng\build\python.props" - ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props - - # Modify Python Worker Version in test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - Write-Host "Replacing test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj" - ((Get-Content test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") + "`n" | Set-Content -NoNewline test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - - # Modify release_notes.md - Write-Host "Adding a new entry in release_note.md" - ((Get-Content release_notes.md) -replace "-->","$&`n- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)" -join "`n") + "`n" | Set-Content -NoNewline release_notes.md - - # Commit Python Version - Write-Host "Pushing $newBranch to host repo" - git add eng\build\Workers.Python.props - git add test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj - git add release_notes.md - git commit -m "[Backport] Update Python Worker Version to $newWorkerVersion" - git push origin $newBranch - - # Create PR - Write-Host "Creating PR draft in GitHub" - $prTemplateContent = @" - ### Issue describing the changes in this PR - - [Backport] Update Python Worker Version to $newWorkerVersion - - Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) - - ### Pull request checklist - - **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. - - * [ ] Backporting to the `in-proc` branch is not required - * [ ]Otherwise: Link to backporting PR - * [x] My changes **do not** require documentation changes - * [ ] Otherwise: Documentation issue linked to PR - * [ ] My changes **should not** be added to the release notes for the next release - * [x] Otherwise: I've added my notes to `release_notes.md` - * [x] My changes **do not** need to be backported to a previous version - * [ ] Otherwise: Backport tracked by issue/PR #issue_or_pr - * [x] My changes **do not** require diagnostic events changes - * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) - * [x] I have added all required tests (Unit tests, E2E tests) - - "@ - - $body = (@{head="$newBranch";base="in-proc";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" - - # Return Value - if ($response.StatusCode -ne 201) { - Write-Host "Failed to create a PR in Azure Functions Host" - exit -1 - } - - $draftUrl = $response | ConvertFrom-Json | Select -expand url - Write-Host "PR draft created in $draftUrl" - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Create Host PR for in-proc' - -- job: "CheckHostPRs" - dependsOn: ['HostRepoPRs'] - displayName: '(Manual) Check Host PRs' - pool: server - steps: - - task: ManualValidation@1 - displayName: 'Finish Host PRs' - inputs: - notifyUsers: '' - instructions: | - Go to https://github.com/Azure/azure-functions-host/pulls and finish the host v4 PR. - If the content misses something, checkout "python/x.y.z" from remote and make new commits to it. - -- job: "MergeToMainAndDev" - dependsOn: ['CheckHostPRs'] - displayName: 'Merge release/x.y.z back to main & dev' - steps: - - powershell: | - $githubToken = "$(GithubPat)" - $newWorkerVersion = "$(NewWorkerVersion)" - - if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { - # Create GitHub credential - git config --global user.name "AzureFunctionsPython" - git config --global user.email "funcdisc@microsoft.com" - - # Clone Repository - git clone https://$githubToken@github.com/Azure/azure-functions-python-worker - Write-Host "Cloned azure-functions-python-worker into local" - Set-Location "azure-functions-python-worker" - - # Merge back to main - Write-Host "Merging release/$newWorkerVersion back to main" - git checkout main - git merge "origin/release/$newWorkerVersion" - git push origin main - - # Merge back to dev - Write-Host "Merging release/$newWorkerVersion back to dev" - git checkout dev - git merge "origin/release/$newWorkerVersion" - git push origin dev - } else { - Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" - exit -1 - } - displayName: 'Merge release/x.y.z back to main & dev' diff --git a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec index 466397e49..98b0c832a 100644 --- a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec +++ b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec @@ -9,8 +9,40 @@ Microsoft Azure Functions Python Worker © .NET Foundation. All rights reserved. - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/worker.config.json b/python/test/worker.config.json index deb726a05..a0b0ad3fe 100644 --- a/python/test/worker.config.json +++ b/python/test/worker.config.json @@ -2,7 +2,7 @@ "description":{ "language":"python", "extensions":[".py"], - "defaultExecutablePath":"C:\\Users\\victoriahall\\Documents\\repos\\azure.azure-functions-python-worker\\.venv\\Scripts\\python.exe", + "defaultExecutablePath":"python", "defaultWorkerPath":"worker.py", "workerIndexing": "true" }, diff --git a/python/test/worker.py b/python/test/worker.py index db66b6dea..95790083f 100644 --- a/python/test/worker.py +++ b/python/test/worker.py @@ -1,67 +1,24 @@ -import os -import pathlib import sys +import os -PKGS_PATH = "/home/site/wwwroot/.python_packages" -PKGS = "lib/site-packages" - # Azure environment variables -AZURE_WEBSITE_INSTANCE_ID = "WEBSITE_INSTANCE_ID" -AZURE_CONTAINER_NAME = "CONTAINER_NAME" AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" - -def is_azure_environment(): - """Check if the function app is running on the cloud""" - return (AZURE_CONTAINER_NAME in os.environ - or AZURE_WEBSITE_INSTANCE_ID in os.environ) - - -def validate_python_version(): - minor_version = sys.version_info[1] - if not (11 <= minor_version < 14): - raise RuntimeError(f'Unsupported Python version: 3.{minor_version}') - - -def determine_user_pkg_paths(): - """This finds the user packages when function apps are running on the cloud - User packages are defined in: - /home/site/wwwroot/.python_packages/lib/site-packages - """ - usr_packages_path = [os.path.join(PKGS_PATH, PKGS)] - return usr_packages_path - - def add_script_root_to_sys_path(): - """Append function project root to module finding sys.path""" + '''Append function project root to module finding sys.path''' functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) if functions_script_root is not None: sys.path.append(functions_script_root) if __name__ == '__main__': - validate_python_version() - func_worker_dir = str(pathlib.Path(__file__).absolute().parent) - env = os.environ - - # Setting up python path for all environments to prioritize - # third-party user packages over worker packages in PYTHONPATH - user_pkg_paths = determine_user_pkg_paths() - joined_pkg_paths = os.pathsep.join(user_pkg_paths) - env['PYTHONPATH'] = f'{joined_pkg_paths}:{func_worker_dir}' - - project_root = os.path.abspath(os.path.dirname(__file__)) - if project_root not in sys.path: - sys.path.append(project_root) - - if is_azure_environment(): - os.execve(sys.executable, - [sys.executable, '-m', 'proxy_worker'] - + sys.argv[1:], - env) + add_script_root_to_sys_path() + minor_version = sys.version_info[1] + if minor_version < 13: + from azure_functions_worker import main + main.main() else: - add_script_root_to_sys_path() from proxy_worker import start_worker start_worker.start() diff --git a/setup.cfg b/setup.cfg index 6f5a7fb98..5dde99ef5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,3 +18,9 @@ ignore_errors = True [mypy-azure_functions_worker._thirdparty.typing_inspect] ignore_errors = True + +[mypy-proxy_worker.protos.*] +ignore_errors = True + +[mypy-proxy_worker._thirdparty.typing_inspect] +ignore_errors = True \ No newline at end of file diff --git a/tests/endtoend/test_blueprint_functions.py b/tests/endtoend/test_blueprint_functions.py index 0c78b4289..c421f583b 100644 --- a/tests/endtoend/test_blueprint_functions.py +++ b/tests/endtoend/test_blueprint_functions.py @@ -2,10 +2,6 @@ # Licensed under the MIT License. from tests.utils import testutils -import os -import logging - -from unittest.mock import patch class TestFunctionInBluePrintOnly(testutils.WebHostTestCase): @@ -14,10 +10,8 @@ def get_script_dir(cls): return testutils.E2E_TESTS_FOLDER / 'blueprint_functions' / \ 'functions_in_blueprint_only' - @patch.dict(os.environ, {"PYAZURE_WEBHOST_DEBUG": '1'}) def test_function_in_blueprint_only(self): r = self.webhost.request('GET', 'default_template') - logging.info(f"R: {r}") self.assertTrue(r.ok) diff --git a/tests/extension_tests/http_v2_tests/test_http_v2.py b/tests/extension_tests/http_v2_tests/test_http_v2.py index 07b204660..514633743 100644 --- a/tests/extension_tests/http_v2_tests/test_http_v2.py +++ b/tests/extension_tests/http_v2_tests/test_http_v2.py @@ -8,12 +8,15 @@ import requests from tests.utils import testutils -#from azure_functions_worker.utils.common import is_envvar_true -from proxy_worker.utils.common import is_envvar_true from tests.utils.constants import CONSUMPTION_DOCKER_TEST, DEDICATED_DOCKER_TEST -#from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING -from proxy_worker.utils.constants import PYTHON_ENABLE_INIT_INDEXING +# This app setting is only present for Python < 3.13 +from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING + +if sys.version_info.minor < 13: + from azure_functions_worker.utils.common import is_envvar_true +else: + from proxy_worker.utils.common import is_envvar_true REQUEST_TIMEOUT_SEC = 5 diff --git a/tests/test_setup.py b/tests/test_setup.py index 9dae3b49e..38491d650 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -38,7 +38,7 @@ WEBHOST_GITHUB_API = "https://api.github.com/repos/Azure/azure-functions-host" WEBHOST_GIT_REPO = "https://github.com/Azure/azure-functions-host/archive" WEBHOST_TAG_PREFIX = "v4." -WORKER_DIR = "azure_functions_worker" if sys.version_info.minor < 11 else "proxy_worker" +WORKER_DIR = "azure_functions_worker" if sys.version_info.minor < 13 else "proxy_worker" def get_webhost_version() -> str: diff --git a/tests/unittests/test_code_quality.py b/tests/unittests/test_code_quality.py index 54d1cc725..45f7bda47 100644 --- a/tests/unittests/test_code_quality.py +++ b/tests/unittests/test_code_quality.py @@ -17,7 +17,7 @@ def test_mypy(self): try: subprocess.run( - [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker'], + [sys.executable, '-m', 'mypy', '-m', 'azure_functions_worker', 'proxy_worker'], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, diff --git a/tests/utils/testutils.py b/tests/utils/testutils.py index 98368c963..360fca2e5 100644 --- a/tests/utils/testutils.py +++ b/tests/utils/testutils.py @@ -50,22 +50,23 @@ WebHostDedicated, ) -if sys.version_info.minor < 11: +if sys.version_info.minor < 13: from azure_functions_worker import dispatcher, protos + from azure_functions_worker.bindings.shared_memory_data_transfer import ( + FileAccessorFactory, + ) + from azure_functions_worker.bindings.shared_memory_data_transfer import ( + SharedMemoryConstants as consts, + ) + from azure_functions_worker.constants import ( + FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, + UNIX_SHARED_MEMORY_DIRECTORIES, + ) + from azure_functions_worker.utils.common import get_app_setting, is_envvar_true else: from proxy_worker import dispatcher, protos - -# from azure_functions_worker.bindings.shared_memory_data_transfer import ( -# FileAccessorFactory, -# ) -# from azure_functions_worker.bindings.shared_memory_data_transfer import ( -# SharedMemoryConstants as consts, -# ) -# from azure_functions_worker.constants import ( -# FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, -# UNIX_SHARED_MEMORY_DIRECTORIES, -# ) -# from azure_functions_worker.utils.common import get_app_setting, is_envvar_true + from proxy_worker.utils.common import is_envvar_true + from proxy_worker.utils.app_settings import get_app_setting TESTS_ROOT = PROJECT_ROOT / 'tests' E2E_TESTS_FOLDER = pathlib.Path('endtoend') @@ -119,19 +120,6 @@ } """ -def is_true_like(setting: str) -> bool: - if setting is None: - return False - - return setting.lower().strip() in {'1', 'true', 't', 'yes', 'y'} - - -def is_envvar_true(env_key: str) -> bool: - if os.getenv(env_key) is None: - return False - - return is_true_like(os.environ[env_key]) - class AsyncTestCaseMeta(type(unittest.TestCase)): @@ -174,7 +162,7 @@ def wrapper(self, *args, __meth__=test_case, __check_log__=check_log_case, **kwargs): if (__check_log__ is not None and callable(__check_log__) - and not True): + and not is_envvar_true(PYAZURE_WEBHOST_DEBUG)): # Check logging output for unit test scenarios result = self._run_test(__meth__, *args, **kwargs) @@ -248,7 +236,7 @@ def setUpClass(cls): docker_tests_enabled, sku = cls.docker_tests_enabled() - cls.host_stdout = None if True \ + cls.host_stdout = None if is_envvar_true(PYAZURE_WEBHOST_DEBUG) \ else tempfile.NamedTemporaryFile('w+t') try: @@ -264,17 +252,13 @@ def setUpClass(cls): cls.webhost = \ WebHostDedicated(docker_configs).spawn_container() else: - cls.host_stdout_logger.info("Starting to setup function app") _setup_func_app(TESTS_ROOT / script_dir, is_unit_test) - cls.host_stdout_logger.info("Finished setup function app") try: - cls.host_stdout_logger.info("Starting webhost") cls.webhost = start_webhost(script_dir=script_dir, stdout=cls.host_stdout) except Exception: raise - time.sleep(5) if not cls.webhost.is_healthy() and cls.host_stdout is not None: cls.host_out = cls.host_stdout.read() if cls.host_out is not None and len(cls.host_out) > 0: @@ -315,7 +299,6 @@ def tearDownClass(cls): _teardown_func_app(TESTS_ROOT / script_dir) def _run_test(self, test, *args, **kwargs): - self.host_stdout_logger.info(f"Starting to run test. Test: {test}, Args: {args}, Kwargs: {kwargs}") if self.host_stdout is None: test(self, *args, **kwargs) else: @@ -342,124 +325,126 @@ def _run_test(self, test, *args, **kwargs): if test_exception is not None: raise test_exception +# This is not supported in 3.13+ +if sys.version_info.minor < 13: + class SharedMemoryTestCase(unittest.TestCase): + """ + For tests involving shared memory data transfer usage. + """ -# class SharedMemoryTestCase(unittest.TestCase): -# """ -# For tests involving shared memory data transfer usage. -# """ - -# def setUp(self): -# self.was_shmem_env_true = is_envvar_true( -# FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) -# os.environ.update( -# {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) - -# os_name = platform.system() -# if os_name == 'Darwin': -# # If an existing AppSetting is specified, save it so it can be -# # restored later -# self.was_shmem_dirs = get_app_setting( -# UNIX_SHARED_MEMORY_DIRECTORIES -# ) -# self._setUpDarwin() -# elif os_name == 'Linux': -# self._setUpLinux() - -# def tearDown(self): -# os_name = platform.system() -# if os_name == 'Darwin': -# self._tearDownDarwin() -# if self.was_shmem_dirs is not None: -# # If an AppSetting was set before the tests ran, restore it back -# os.environ.update( -# {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) -# elif os_name == 'Linux': -# self._tearDownLinux() - -# if not self.was_shmem_env_true: -# os.environ.update( -# {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) - -# def get_new_mem_map_name(self): -# return str(uuid.uuid4()) - -# def get_random_bytes(self, num_bytes): -# return bytearray(random.getrandbits(8) for _ in range(num_bytes)) - -# def get_random_string(self, num_chars): -# return ''.join(random.choices(string.ascii_uppercase + string.digits, -# k=num_chars)) - -# def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: -# """ -# Check if uuid_to_test is a valid UUID. -# Reference: https://stackoverflow.com/a/33245493/3132415 -# """ -# try: -# uuid_obj = uuid.UUID(uuid_to_test, version=version) -# except ValueError: -# return False -# return str(uuid_obj) == uuid_to_test - -# def _createSharedMemoryDirectories(self, directories): -# for temp_dir in directories: -# temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) -# if not os.path.exists(temp_dir_path): -# os.makedirs(temp_dir_path) - -# def _deleteSharedMemoryDirectories(self, directories): -# for temp_dir in directories: -# temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) -# shutil.rmtree(temp_dir_path) - -# def _setUpLinux(self): -# self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - -# def _tearDownLinux(self): -# self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) - -# def _setUpDarwin(self): -# """ -# Create a RAM disk on macOS. -# Ref: https://stackoverflow.com/a/2033417/3132415 -# """ -# size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) -# size = 2048 * size_in_mb -# # The following command returns the name of the created disk -# cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] -# result = subprocess.run(cmd, stdout=subprocess.PIPE) -# if result.returncode != 0: -# raise IOError(f'Cannot create ram disk with command: {cmd} - ' -# f'{result.stdout} - {result.stderr}') -# disk_name = result.stdout.strip().decode() -# # We create a volume on the disk created above and mount it -# volume_name = 'shm' -# cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] -# result = subprocess.run(cmd, stdout=subprocess.PIPE) -# if result.returncode != 0: -# raise IOError(f'Cannot create volume with command: {cmd} - ' -# f'{result.stdout} - {result.stderr}') -# directory = f'/Volumes/{volume_name}' -# self.created_directories = [directory] -# # Create directories in the volume for shared memory maps -# self._createSharedMemoryDirectories(self.created_directories) -# # Override the AppSetting for the duration of this test so the -# # FileAccessorUnix can use these directories for creating memory maps -# os.environ.update( -# {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} -# ) - -# def _tearDownDarwin(self): -# # Delete the directories containing shared memory maps -# self._deleteSharedMemoryDirectories(self.created_directories) -# # Unmount the volume used for shared memory maps -# volume_name = 'shm' -# cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ -# "| xargs -0 umount -f" -# result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) -# if result.returncode != 0: -# raise IOError(f'Cannot delete volume with command: {cmd} - ' -# f'{result.stdout} - {result.stderr}') + def setUp(self): + self.was_shmem_env_true = is_envvar_true( + FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) + os.environ.update( + {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) + + os_name = platform.system() + if os_name == 'Darwin': + # If an existing AppSetting is specified, save it so it can be + # restored later + self.was_shmem_dirs = get_app_setting( + UNIX_SHARED_MEMORY_DIRECTORIES + ) + self._setUpDarwin() + elif os_name == 'Linux': + self._setUpLinux() + self.file_accessor = FileAccessorFactory.create_file_accessor() + + def tearDown(self): + os_name = platform.system() + if os_name == 'Darwin': + self._tearDownDarwin() + if self.was_shmem_dirs is not None: + # If an AppSetting was set before the tests ran, restore it back + os.environ.update( + {UNIX_SHARED_MEMORY_DIRECTORIES: self.was_shmem_dirs}) + elif os_name == 'Linux': + self._tearDownLinux() + + if not self.was_shmem_env_true: + os.environ.update( + {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) + + def get_new_mem_map_name(self): + return str(uuid.uuid4()) + + def get_random_bytes(self, num_bytes): + return bytearray(random.getrandbits(8) for _ in range(num_bytes)) + + def get_random_string(self, num_chars): + return ''.join(random.choices(string.ascii_uppercase + string.digits, + k=num_chars)) + + def is_valid_uuid(self, uuid_to_test: str, version: int = 4) -> bool: + """ + Check if uuid_to_test is a valid UUID. + Reference: https://stackoverflow.com/a/33245493/3132415 + """ + try: + uuid_obj = uuid.UUID(uuid_to_test, version=version) + except ValueError: + return False + return str(uuid_obj) == uuid_to_test + + def _createSharedMemoryDirectories(self, directories): + for temp_dir in directories: + temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) + if not os.path.exists(temp_dir_path): + os.makedirs(temp_dir_path) + + def _deleteSharedMemoryDirectories(self, directories): + for temp_dir in directories: + temp_dir_path = os.path.join(temp_dir, consts.UNIX_TEMP_DIR_SUFFIX) + shutil.rmtree(temp_dir_path) + + def _setUpLinux(self): + self._createSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + + def _tearDownLinux(self): + self._deleteSharedMemoryDirectories(consts.UNIX_TEMP_DIRS) + + def _setUpDarwin(self): + """ + Create a RAM disk on macOS. + Ref: https://stackoverflow.com/a/2033417/3132415 + """ + size_in_mb = consts.MAX_BYTES_FOR_SHARED_MEM_TRANSFER / (1024 * 1024) + size = 2048 * size_in_mb + # The following command returns the name of the created disk + cmd = ['hdiutil', 'attach', '-nomount', f'ram://{size}'] + result = subprocess.run(cmd, stdout=subprocess.PIPE) + if result.returncode != 0: + raise IOError(f'Cannot create ram disk with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') + disk_name = result.stdout.strip().decode() + # We create a volume on the disk created above and mount it + volume_name = 'shm' + cmd = ['diskutil', 'eraseVolume', 'HFS+', volume_name, disk_name] + result = subprocess.run(cmd, stdout=subprocess.PIPE) + if result.returncode != 0: + raise IOError(f'Cannot create volume with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') + directory = f'/Volumes/{volume_name}' + self.created_directories = [directory] + # Create directories in the volume for shared memory maps + self._createSharedMemoryDirectories(self.created_directories) + # Override the AppSetting for the duration of this test so the + # FileAccessorUnix can use these directories for creating memory maps + os.environ.update( + {UNIX_SHARED_MEMORY_DIRECTORIES: ','.join(self.created_directories)} + ) + + def _tearDownDarwin(self): + # Delete the directories containing shared memory maps + self._deleteSharedMemoryDirectories(self.created_directories) + # Unmount the volume used for shared memory maps + volume_name = 'shm' + cmd = f"find /Volumes -type d -name '{volume_name}*' -print0 " \ + "| xargs -0 umount -f" + result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True) + if result.returncode != 0: + raise IOError(f'Cannot delete volume with command: {cmd} - ' + f'{result.stdout} - {result.stderr}') class _MockWebHostServicer(protos.FunctionRpcServicer): @@ -820,7 +805,6 @@ def __init__(self, proc, addr): self._addr = addr def is_healthy(self): - time.sleep(3) r = self.request('GET', '', no_prefix=True) return 200 <= r.status_code < 300 @@ -989,7 +973,7 @@ def popen_webhost(*, stdout, stderr, script_root=FUNCS_PATH, port=None): def start_webhost(*, script_dir=None, stdout=None): script_root = TESTS_ROOT / script_dir if script_dir else FUNCS_PATH if stdout is None: - if True: + if is_envvar_true(PYAZURE_WEBHOST_DEBUG): stdout = sys.stdout else: stdout = subprocess.DEVNULL From 34bb6d38ade525b83f6781fe88d7f2d505c848d2 Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Thu, 10 Apr 2025 10:29:14 -0500 Subject: [PATCH 06/11] merge fixes --- azure_functions_worker/constants.py | 16 +- azure_functions_worker/dispatcher.py | 41 +- .../utils/app_setting_manager.py | 4 +- azure_functions_worker/version.py | 2 +- eng/ci/integration-tests.yml | 9 +- eng/ci/official-build.yml | 9 +- eng/ci/public-build.yml | 2 + eng/ci/worker-release.yml | 33 ++ eng/templates/jobs/build.yml | 2 - .../official/jobs/publish-release.yml | 368 ++++++++++++++++++ 10 files changed, 450 insertions(+), 36 deletions(-) create mode 100644 eng/ci/worker-release.yml create mode 100644 eng/templates/official/jobs/publish-release.yml diff --git a/azure_functions_worker/constants.py b/azure_functions_worker/constants.py index b916252cf..97dabbd68 100644 --- a/azure_functions_worker/constants.py +++ b/azure_functions_worker/constants.py @@ -82,14 +82,18 @@ BASE_EXT_SUPPORTED_PY_MINOR_VERSION = 8 # Appsetting to turn on OpenTelemetry support/features -# Includes turning on Azure monitor distro to send telemetry to AppInsights +# A value of "true" enables the setting PYTHON_ENABLE_OPENTELEMETRY = "PYTHON_ENABLE_OPENTELEMETRY" -PYTHON_ENABLE_OPENTELEMETRY_DEFAULT = False + +# Appsetting to turn on ApplicationInsights support/features +# A value of "true" enables the setting +PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY = \ + "PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY" # Appsetting to specify root logger name of logger to collect telemetry for -# Used by Azure monitor distro -PYTHON_AZURE_MONITOR_LOGGER_NAME = "PYTHON_AZURE_MONITOR_LOGGER_NAME" -PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT = "" +# Used by Azure monitor distro (Application Insights) +PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME = "PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME" +PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT = "" # Appsetting to specify AppInsights connection string -APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" +APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" \ No newline at end of file diff --git a/azure_functions_worker/dispatcher.py b/azure_functions_worker/dispatcher.py index 897a3499a..b815bef1c 100644 --- a/azure_functions_worker/dispatcher.py +++ b/azure_functions_worker/dispatcher.py @@ -26,12 +26,12 @@ APPLICATIONINSIGHTS_CONNECTION_STRING, HTTP_URI, METADATA_PROPERTIES_WORKER_INDEXED, - PYTHON_AZURE_MONITOR_LOGGER_NAME, - PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT, + PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME, + PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT, PYTHON_ENABLE_DEBUG_LOGGING, PYTHON_ENABLE_INIT_INDEXING, + PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY, PYTHON_ENABLE_OPENTELEMETRY, - PYTHON_ENABLE_OPENTELEMETRY_DEFAULT, PYTHON_LANGUAGE_RUNTIME, PYTHON_ROLLBACK_CWD_PATH, PYTHON_SCRIPT_FILE_NAME, @@ -103,8 +103,10 @@ def __init__(self, loop: BaseEventLoop, host: str, port: int, self._function_metadata_result = None self._function_metadata_exception = None - # Used for checking if open telemetry is enabled + # Used for checking if appinsights is enabled self._azure_monitor_available = False + # Used for checking if open telemetry is enabled + self._otel_libs_available = False self._context_api = None self._trace_context_propagator = None @@ -318,8 +320,8 @@ def initialize_azure_monitor(self): setting=APPLICATIONINSIGHTS_CONNECTION_STRING ), logger_name=get_app_setting( - setting=PYTHON_AZURE_MONITOR_LOGGER_NAME, - default_value=PYTHON_AZURE_MONITOR_LOGGER_NAME_DEFAULT + setting=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME, + default_value=PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT ), ) self._azure_monitor_available = True @@ -381,12 +383,15 @@ async def _handle__worker_init_request(self, request): constants.RPC_HTTP_TRIGGER_METADATA_REMOVED: _TRUE, constants.SHARED_MEMORY_DATA_TRANSFER: _TRUE, } - if get_app_setting(setting=PYTHON_ENABLE_OPENTELEMETRY, - default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): + + if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY): self.initialize_azure_monitor() - if self._azure_monitor_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = _TRUE + if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY): + self._otel_libs_available = True + + if self._azure_monitor_available or self._otel_libs_available: + capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = _TRUE if DependencyManager.should_load_cx_dependencies(): DependencyManager.prioritize_customer_dependencies() @@ -662,7 +667,7 @@ async def _handle__invocation_request(self, request): args[name] = bindings.Out() if fi.is_async: - if self._azure_monitor_available: + if self._azure_monitor_available or self._otel_libs_available: self.configure_opentelemetry(fi_context) call_result = \ @@ -779,14 +784,14 @@ async def _handle__function_environment_reload_request(self, request): bindings.load_binding_registry() capabilities = {} - if get_app_setting( - setting=PYTHON_ENABLE_OPENTELEMETRY, - default_value=PYTHON_ENABLE_OPENTELEMETRY_DEFAULT): + if is_envvar_true(PYTHON_ENABLE_OPENTELEMETRY): + self._otel_libs_available = True + if is_envvar_true(PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY): self.initialize_azure_monitor() - if self._azure_monitor_available: - capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = ( - _TRUE) + if self._azure_monitor_available or self._otel_libs_available: + capabilities[constants.WORKER_OPEN_TELEMETRY_ENABLED] = ( + _TRUE) if is_envvar_true(PYTHON_ENABLE_INIT_INDEXING): try: @@ -996,7 +1001,7 @@ def _run_sync_func(self, invocation_id, context, func, params): # invocation_id from ThreadPoolExecutor's threads. context.thread_local_storage.invocation_id = invocation_id try: - if self._azure_monitor_available: + if self._azure_monitor_available or self._otel_libs_available: self.configure_opentelemetry(context) return ExtensionManager.get_sync_invocation_wrapper(context, func)(params) diff --git a/azure_functions_worker/utils/app_setting_manager.py b/azure_functions_worker/utils/app_setting_manager.py index 3d8ccbb45..ee43ccd62 100644 --- a/azure_functions_worker/utils/app_setting_manager.py +++ b/azure_functions_worker/utils/app_setting_manager.py @@ -7,6 +7,7 @@ FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, PYTHON_ENABLE_DEBUG_LOGGING, PYTHON_ENABLE_INIT_INDEXING, + PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY, PYTHON_ENABLE_OPENTELEMETRY, PYTHON_ENABLE_WORKER_EXTENSIONS, PYTHON_ENABLE_WORKER_EXTENSIONS_DEFAULT, @@ -29,7 +30,8 @@ def get_python_appsetting_state(): FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED, PYTHON_SCRIPT_FILE_NAME, PYTHON_ENABLE_INIT_INDEXING, - PYTHON_ENABLE_OPENTELEMETRY] + PYTHON_ENABLE_OPENTELEMETRY, + PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY] app_setting_states = "".join( f"{app_setting}: {current_vars[app_setting]} | " diff --git a/azure_functions_worker/version.py b/azure_functions_worker/version.py index adb421530..d91e24c1f 100644 --- a/azure_functions_worker/version.py +++ b/azure_functions_worker/version.py @@ -1,4 +1,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -VERSION = '4.34.0' +VERSION = '4.35.0' diff --git a/eng/ci/integration-tests.yml b/eng/ci/integration-tests.yml index 6f8f69d9e..40594ab27 100644 --- a/eng/ci/integration-tests.yml +++ b/eng/ci/integration-tests.yml @@ -47,7 +47,8 @@ extends: dependsOn: [] jobs: - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self - - stage: RunLCTests - dependsOn: [] - jobs: - - template: /eng/templates/official/jobs/ci-lc-tests.yml@self +# Skipping consumption tests till pipeline is fixed +# - stage: RunLCTests +# dependsOn: [] +# jobs: +# - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml index 568fdf16b..7a555e8a4 100644 --- a/eng/ci/official-build.yml +++ b/eng/ci/official-build.yml @@ -72,7 +72,8 @@ extends: dependsOn: Build jobs: - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self - - stage: RunLinuxConsumptionTests - dependsOn: Build - jobs: - - template: /eng/templates/official/jobs/ci-lc-tests.yml@self +# Skipping consumption tests till pipeline is fixed +# - stage: RunLinuxConsumptionTests +# dependsOn: Build +# jobs: +# - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml index a8456b721..df27d5842 100644 --- a/eng/ci/public-build.yml +++ b/eng/ci/public-build.yml @@ -52,6 +52,8 @@ extends: - stage: Build jobs: - template: /eng/templates/jobs/build.yml@self + # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version + condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - stage: RunUnitTests dependsOn: Build jobs: diff --git a/eng/ci/worker-release.yml b/eng/ci/worker-release.yml new file mode 100644 index 000000000..08e58b226 --- /dev/null +++ b/eng/ci/worker-release.yml @@ -0,0 +1,33 @@ +pr: none + +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - name: codeql.excludePathPatterns + value: deps/,build/ + +extends: + template: v1/1ES.Official.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + sdl: + codeSignValidation: + enabled: true + break: true + + stages: + - stage: Release + jobs: + - template: /eng/templates/official/jobs/publish-release.yml@self \ No newline at end of file diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml index dd422f4fa..132e4fb6e 100644 --- a/eng/templates/jobs/build.yml +++ b/eng/templates/jobs/build.yml @@ -20,8 +20,6 @@ jobs: python -m pip install --upgrade pip python -m pip install . displayName: 'Build python worker' - # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - bash: | pip install pip-audit pip-audit -r requirements.txt diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml new file mode 100644 index 000000000..e90d40c57 --- /dev/null +++ b/eng/templates/official/jobs/publish-release.yml @@ -0,0 +1,368 @@ +jobs: + +- job: "CreateReleaseBranch" + displayName: 'Create Release Branch' + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + steps: + - powershell: | + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + $versionFile = "azure_functions_worker/version.py" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + git config --global user.name "AzureFunctionsPython" + git config --global user.email "azfunc@microsoft.com" + + # Heading to Artifact Repository + Write-Host "Operating based on $stagingDirectory/azure-functions-python-worker" + git checkout -b "release/$newWorkerVersion" + + # Change azure_functions_worker/version.py version + Write-Host "Change version number in version.py to $newWorkerVersion" + ((Get-Content $versionFile) -replace "VERSION = '(\d+).(\d+).*'", "VERSION = '$newWorkerVersion'" -join "`n") + "`n" | Set-Content -NoNewline $versionFile + git add $versionFile + git commit -m "build: update Python Worker Version to $newWorkerVersion" + + # Create release branch release/X.Y.Z + Write-Host "Creating release branch release/$newWorkerVersion" + git push --repo="https://$githubToken@github.com/Azure/azure-functions-python-worker.git" + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Push release/x.y.z' + +- job: "CheckReleaseBranch" + dependsOn: ['CreateReleaseBranch'] + displayName: '(Manual) Check Release Branch' + pool: server + steps: + - task: ManualValidation@1 + displayName: '(Optional) Modify release/x.y.z branch' + inputs: + notifyUsers: '' # No email notifications sent + instructions: | + 1. Check if the https://github.com/Azure/azure-functions-python-worker/tree/release/$(NewWorkerVersion) passes all unit tests. + 2. If not, modify the release/$(NewWorkerVersion) branch. + 3. Ensure release/$(NewWorkerVersion) branch contains all necessary changes since it will be propagated to v4 workers. +- job: "CreateReleaseTag" + dependsOn: ['CheckReleaseBranch'] + steps: + - powershell: | + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + git config --global user.name "AzureFunctionsPython" + git config --global user.email "azfunc@microsoft.com" + + # Clone Repository + git clone https://$githubToken@github.com/Azure/azure-functions-python-worker + Write-Host "Cloned azure-functions-python-worker into local" + Set-Location "azure-functions-python-worker" + git checkout "origin/release/$newWorkerVersion" + + # Create release tag X.Y.Z + Write-Host "Creating release tag $newWorkerVersion" + git tag -a "$newWorkerVersion" -m "$newWorkerVersion" + + # Push tag to remote + git push origin $newWorkerVersion + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Create and push release tag x.y.z' + - powershell: | + $githubUser = "$(GithubUser)" + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) + + # Create Release Note + Write-Host "Creating release note in GitHub" + $body = (@{tag_name="$newWorkerVersion";name="Release $newWorkerVersion";body="- Fill in Release Note Here";draft=$true} | ConvertTo-Json -Compress) + $response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-python-worker/releases" + + # Return Value + if ($response.StatusCode -ne 201) { + Write-Host "Failed to create release note in GitHub" + exit -1 + } + + $draftUrl = $response | ConvertFrom-Json | Select -expand url + Write-Host "Release draft created in $draftUrl" + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Create GitHub release draft' + +- job: "CheckGitHubRelease" + dependsOn: ['CreateReleaseTag'] + displayName: '(Manual) Check GitHub release note' + pool: server + steps: + - task: ManualValidation@1 + displayName: 'Write GitHub release note' + inputs: + notifyUsers: '' + instructions: 'Please head to https://github.com/Azure/azure-functions-python-worker/releases to finish the release note' + +- job: "WaitForPythonWorkerBuild" + dependsOn: ['CheckGitHubRelease'] + displayName: '(Manual) Wait For Python Worker Build' + pool: server + steps: + - task: ManualValidation@1 + displayName: 'Wait For Python Worker Build' + inputs: + notifyUsers: '' + instructions: 'Ensure the build of release/4.x.y.z finishes in https://dev.azure.com/azfunc/internal/_build?definitionId=652 and verify if PackageWorkers task is completed.' + + +- job: "PublishNuget" + dependsOn: ['WaitForPythonWorkerBuild'] + displayName: 'Publish Nuget' + templateContext: + outputs: + - output: nuget + packagesToPush: '$(Pipeline.Workspace)/PythonWorkerArtifact/**/*.nupkg;!$(Pipeline.Workspace)/PythonWorkerArtifact/**/*.symbols.nupkg' + publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/eb652719-f36a-4e78-8541-e13a3cd655f9' + allowPackageConflicts: true + packageParentPath: '$(Pipeline.Workspace)' + steps: + - task: DownloadPipelineArtifact@2 + displayName: 'Download Python Worker release/4.x.y.z Artifact' + inputs: + buildType: specific + project: '3f99e810-c336-441f-8892-84983093ad7f' + definition: 652 + specificBuildWithTriggering: true + buildVersionToDownload: latestFromBranch + branchName: 'refs/heads/release/$(NewWorkerVersion)' + allowPartiallySucceededBuilds: true + allowFailedBuilds: true + targetPath: '$(Pipeline.Workspace)/PythonWorkerArtifact' + + +- job: "CheckNugetPackageContent" + dependsOn: ['PublishNuget'] + displayName: '(Manual) Check Nuget Package Content' + pool: server + steps: + - task: ManualValidation@1 + displayName: 'Check nuget package content' + inputs: + notifyUsers: '' + instructions: | + Please check the latest release package at + https://azfunc.visualstudio.com/Azure%20Functions/_artifacts/feed/AzureFunctionsRelease/NuGet/Microsoft.Azure.Functions.PythonWorker/overview +- job: "HostRepoPRs" + dependsOn: ['CheckNugetPackageContent'] + displayName: 'Create Host PRs' + steps: + - powershell: | + $githubUser = "$(GithubUser)" + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + $newBranch = "python/$newWorkerVersion" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + git config --global user.name "AzureFunctionsPython" + git config --global user.email "azfunc@microsoft.com" + + # Create GitHub credential + $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) + + # Clone Repository + git clone https://$githubToken@github.com/Azure/azure-functions-host + Write-Host "Cloned azure-functions-host into local and checkout $newBranch branch" + Set-Location "azure-functions-host" + git checkout -b $newBranch "origin/dev" + + # Modify Python Worker Version in eng\build\python.props + Write-Host "Replacing eng\build\python.props" + ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props + + # Modify Python Worker Version in test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + Write-Host "Replacing test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj" + ((Get-Content test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") + "`n" | Set-Content -NoNewline test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + + # Modify release_notes.md + Write-Host "Adding a new entry in release_note.md" + ((Get-Content release_notes.md) -replace "-->","$&`n- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)" -join "`n") + "`n" | Set-Content -NoNewline release_notes.md + + # Commit Python Version + Write-Host "Pushing $newBranch to host repo" + git add eng\build\Workers.Python.props + git add test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + git add release_notes.md + git commit -m "Update Python Worker Version to $newWorkerVersion" + git push origin $newBranch + + # Create PR + Write-Host "Creating PR draft in GitHub" + $prTemplateContent = @" + ### Issue describing the changes in this PR + Update Python Worker Version to $newWorkerVersion + Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) + ### Pull request checklist + **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. + * [ ] Backporting to the `in-proc` branch is not required + * [x]Otherwise: Link to backporting PR + * [x] My changes **do not** require documentation changes + * [ ] Otherwise: Documentation issue linked to PR + * [ ] My changes **should not** be added to the release notes for the next release + * [x] Otherwise: I've added my notes to `release_notes.md` + * [x] My changes **do not** need to be backported to a previous version + * [ ] Otherwise: Backport tracked by issue/PR #issue_or_pr + * [x] My changes **do not** require diagnostic events changes + * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) + * [x] I have added all required tests (Unit tests, E2E tests) + "@ + $body = (@{head="$newBranch";base="dev";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" + + # Return Value + if ($response.StatusCode -ne 201) { + Write-Host "Failed to create a PR in Azure Functions Host" + exit -1 + } + + $draftUrl = $response | ConvertFrom-Json | Select -expand url + Write-Host "PR draft created in $draftUrl" + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Create Host PR for dev' + - powershell: | + $githubUser = "$(GithubUser)" + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + $newBranch = "python/$newWorkerVersion" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + git config --global user.name "AzureFunctionsPython" + git config --global user.email "azfunc@microsoft.com" + + # Create GitHub credential + $credential = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("${githubUser}:${githubToken}")) + + # Clone Repository + git clone https://$githubToken@github.com/Azure/azure-functions-host + Write-Host "Cloned azure-functions-host into local and checkout $newBranch branch" + Set-Location "azure-functions-host" + git checkout -b backport/$newBranch "origin/in-proc" + + # Modify Python Worker Version in eng\build\python.props + Write-Host "Replacing eng\build\python.props" + ((Get-Content eng\build\Workers.Python.props) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") +"`n" | Set-Content -NoNewline eng\build\Workers.Python.props + + # Modify Python Worker Version in test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + Write-Host "Replacing test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj" + ((Get-Content test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj) -replace "PythonWorker`" Version=`"(\d)+.(\d)+.(\d)+.?(\d)*`"","PythonWorker`" Version=`"$newWorkerVersion`"" -join "`n") + "`n" | Set-Content -NoNewline test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + + # Modify release_notes.md + Write-Host "Adding a new entry in release_note.md" + ((Get-Content release_notes.md) -replace "-->","$&`n- Update Python Worker Version to [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion)" -join "`n") + "`n" | Set-Content -NoNewline release_notes.md + + # Commit Python Version + Write-Host "Pushing $newBranch to host repo" + git add eng\build\Workers.Python.props + git add test\WebJobs.Script.Tests\WebJobs.Script.Tests.csproj + git add release_notes.md + git commit -m "[Backport] Update Python Worker Version to $newWorkerVersion" + git push origin $newBranch + + # Create PR + Write-Host "Creating PR draft in GitHub" + $prTemplateContent = @" + ### Issue describing the changes in this PR + [Backport] Update Python Worker Version to $newWorkerVersion + Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) + ### Pull request checklist + **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. + * [ ] Backporting to the `in-proc` branch is not required + * [ ]Otherwise: Link to backporting PR + * [x] My changes **do not** require documentation changes + * [ ] Otherwise: Documentation issue linked to PR + * [ ] My changes **should not** be added to the release notes for the next release + * [x] Otherwise: I've added my notes to `release_notes.md` + * [x] My changes **do not** need to be backported to a previous version + * [ ] Otherwise: Backport tracked by issue/PR #issue_or_pr + * [x] My changes **do not** require diagnostic events changes + * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) + * [x] I have added all required tests (Unit tests, E2E tests) + "@ + $body = (@{head="$newBranch";base="in-proc";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" + + # Return Value + if ($response.StatusCode -ne 201) { + Write-Host "Failed to create a PR in Azure Functions Host" + exit -1 + } + + $draftUrl = $response | ConvertFrom-Json | Select -expand url + Write-Host "PR draft created in $draftUrl" + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Create Host PR for in-proc' +- job: "CheckHostPRs" + dependsOn: ['HostRepoPRs'] + displayName: '(Manual) Check Host PRs' + pool: server + steps: + - task: ManualValidation@1 + displayName: 'Finish Host PRs' + inputs: + notifyUsers: '' + instructions: | + Go to https://github.com/Azure/azure-functions-host/pulls and finish the host v4 PR. + If the content misses something, checkout "python/x.y.z" from remote and make new commits to it. +- job: "MergeToMainAndDev" + dependsOn: ['CheckHostPRs'] + displayName: 'Merge release/x.y.z back to main & dev' + steps: + - powershell: | + $githubToken = "$(GithubPat)" + $newWorkerVersion = "$(NewWorkerVersion)" + + if($newWorkerVersion -match '(\d)+.(\d)+.(\d)+') { + # Create GitHub credential + git config --global user.name "AzureFunctionsPython" + git config --global user.email "azfunc@microsoft.com" + + # Clone Repository + git clone https://$githubToken@github.com/Azure/azure-functions-python-worker + Write-Host "Cloned azure-functions-python-worker into local" + Set-Location "azure-functions-python-worker" + + # Merge back to main + Write-Host "Merging release/$newWorkerVersion back to main" + git checkout main + git merge "origin/release/$newWorkerVersion" + git push origin main + + # Merge back to dev + Write-Host "Merging release/$newWorkerVersion back to dev" + git checkout dev + git merge "origin/release/$newWorkerVersion" + git push origin dev + } else { + Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" + exit -1 + } + displayName: 'Merge release/x.y.z back to main & dev' \ No newline at end of file From 9f61768810ef0bcacb0a4e59fbc7d701b416356f Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Thu, 10 Apr 2025 10:32:40 -0500 Subject: [PATCH 07/11] merge fixes --- azure_functions_worker/constants.py | 2 +- eng/ci/public-build.yml | 4 ++-- eng/ci/worker-release.yml | 2 +- eng/templates/jobs/build.yml | 2 +- .../official/jobs/publish-release.yml | 20 ++++++++++++++++++- 5 files changed, 24 insertions(+), 6 deletions(-) diff --git a/azure_functions_worker/constants.py b/azure_functions_worker/constants.py index 97dabbd68..6110752e2 100644 --- a/azure_functions_worker/constants.py +++ b/azure_functions_worker/constants.py @@ -96,4 +96,4 @@ PYTHON_APPLICATIONINSIGHTS_LOGGER_NAME_DEFAULT = "" # Appsetting to specify AppInsights connection string -APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" \ No newline at end of file +APPLICATIONINSIGHTS_CONNECTION_STRING = "APPLICATIONINSIGHTS_CONNECTION_STRING" diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml index df27d5842..470a94f9c 100644 --- a/eng/ci/public-build.yml +++ b/eng/ci/public-build.yml @@ -52,8 +52,8 @@ extends: - stage: Build jobs: - template: /eng/templates/jobs/build.yml@self - # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version - condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) + # Skip the build stage for SDK and Extensions release branches. This stage will fail because pyproject.toml contains the updated (and unreleased) library version + condition: and(eq(variables.isSdkRelease, false), eq(variables.isExtensionsRelease, false), eq(variables['USETESTPYTHONSDK'], false), eq(variables['USETESTPYTHONEXTENSIONS'], false)) - stage: RunUnitTests dependsOn: Build jobs: diff --git a/eng/ci/worker-release.yml b/eng/ci/worker-release.yml index 08e58b226..987569c61 100644 --- a/eng/ci/worker-release.yml +++ b/eng/ci/worker-release.yml @@ -30,4 +30,4 @@ extends: stages: - stage: Release jobs: - - template: /eng/templates/official/jobs/publish-release.yml@self \ No newline at end of file + - template: /eng/templates/official/jobs/publish-release.yml@self diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml index 132e4fb6e..c5b989c7c 100644 --- a/eng/templates/jobs/build.yml +++ b/eng/templates/jobs/build.yml @@ -23,4 +23,4 @@ jobs: - bash: | pip install pip-audit pip-audit -r requirements.txt - displayName: 'Run vulnerability scan' \ No newline at end of file + displayName: 'Run vulnerability scan' diff --git a/eng/templates/official/jobs/publish-release.yml b/eng/templates/official/jobs/publish-release.yml index e90d40c57..4c04d5779 100644 --- a/eng/templates/official/jobs/publish-release.yml +++ b/eng/templates/official/jobs/publish-release.yml @@ -49,6 +49,7 @@ jobs: 1. Check if the https://github.com/Azure/azure-functions-python-worker/tree/release/$(NewWorkerVersion) passes all unit tests. 2. If not, modify the release/$(NewWorkerVersion) branch. 3. Ensure release/$(NewWorkerVersion) branch contains all necessary changes since it will be propagated to v4 workers. + - job: "CreateReleaseTag" dependsOn: ['CheckReleaseBranch'] steps: @@ -166,6 +167,7 @@ jobs: instructions: | Please check the latest release package at https://azfunc.visualstudio.com/Azure%20Functions/_artifacts/feed/AzureFunctionsRelease/NuGet/Microsoft.Azure.Functions.PythonWorker/overview + - job: "HostRepoPRs" dependsOn: ['CheckNugetPackageContent'] displayName: 'Create Host PRs' @@ -214,10 +216,15 @@ jobs: Write-Host "Creating PR draft in GitHub" $prTemplateContent = @" ### Issue describing the changes in this PR + Update Python Worker Version to $newWorkerVersion + Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) + ### Pull request checklist + **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. + * [ ] Backporting to the `in-proc` branch is not required * [x]Otherwise: Link to backporting PR * [x] My changes **do not** require documentation changes @@ -229,7 +236,9 @@ jobs: * [x] My changes **do not** require diagnostic events changes * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) * [x] I have added all required tests (Unit tests, E2E tests) + "@ + $body = (@{head="$newBranch";base="dev";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" # Return Value @@ -289,10 +298,15 @@ jobs: Write-Host "Creating PR draft in GitHub" $prTemplateContent = @" ### Issue describing the changes in this PR + [Backport] Update Python Worker Version to $newWorkerVersion + Python Worker Release note [$newWorkerVersion](https://github.com/Azure/azure-functions-python-worker/releases/tag/$newWorkerVersion) + ### Pull request checklist + **IMPORTANT**: Currently, changes must be backported to the `in-proc` branch to be included in Core Tools and non-Flex deployments. + * [ ] Backporting to the `in-proc` branch is not required * [ ]Otherwise: Link to backporting PR * [x] My changes **do not** require documentation changes @@ -304,7 +318,9 @@ jobs: * [x] My changes **do not** require diagnostic events changes * Otherwise: I have added/updated all related diagnostic events and their documentation (Documentation issue linked to PR) * [x] I have added all required tests (Unit tests, E2E tests) + "@ + $body = (@{head="$newBranch";base="in-proc";body=$prTemplateContent;draft=$true;maintainer_can_modify=$true;title="Update Python Worker Version to $newWorkerVersion"} | ConvertTo-Json -Compress)$response = Invoke-WebRequest -Headers @{"Cache-Control"="no-cache";"Content-Type"="application/json";"Authorization"="Basic $credential";"Accept"="application/vnd.github.v3+json"} -Method Post -Body "$body" -Uri "https://api.github.com/repos/Azure/azure-functions-host/pulls" # Return Value @@ -320,6 +336,7 @@ jobs: exit -1 } displayName: 'Create Host PR for in-proc' + - job: "CheckHostPRs" dependsOn: ['HostRepoPRs'] displayName: '(Manual) Check Host PRs' @@ -332,6 +349,7 @@ jobs: instructions: | Go to https://github.com/Azure/azure-functions-host/pulls and finish the host v4 PR. If the content misses something, checkout "python/x.y.z" from remote and make new commits to it. + - job: "MergeToMainAndDev" dependsOn: ['CheckHostPRs'] displayName: 'Merge release/x.y.z back to main & dev' @@ -365,4 +383,4 @@ jobs: Write-Host "NewWorkerVersion $newWorkerVersion is malformed (example: 1.1.8)" exit -1 } - displayName: 'Merge release/x.y.z back to main & dev' \ No newline at end of file + displayName: 'Merge release/x.y.z back to main & dev' From e59792f06327be8229a783d39cf75c17a19ca06c Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Thu, 10 Apr 2025 10:36:50 -0500 Subject: [PATCH 08/11] merge fixes --- proxy_worker/utils/dependency.py | 11 +-- pyproject.toml | 6 +- python/prodV4/worker.config.json | 5 +- python/test/worker.config.json | 5 +- tests/unittests/test_opentelemetry.py | 125 ++++++++++++++++++++++++-- 5 files changed, 135 insertions(+), 17 deletions(-) diff --git a/proxy_worker/utils/dependency.py b/proxy_worker/utils/dependency.py index 536e5595c..e80613815 100644 --- a/proxy_worker/utils/dependency.py +++ b/proxy_worker/utils/dependency.py @@ -69,7 +69,7 @@ def is_in_linux_consumption(cls): @classmethod def should_load_cx_dependencies(cls): """ - Customer dependencies should be loaded when + Customer dependencies should be loaded when 1) App is a dedicated app 2) App is linux consumption but not in placeholder mode. This can happen when the worker restarts for any reason @@ -131,14 +131,14 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None): # Try to get the latest customer's dependency path cx_deps_path: str = cls._get_cx_deps_path() - + if not cx_deps_path: - cx_deps_path = cls.cx_deps_path - + cx_deps_path = cls.cx_deps_path + logger.info( 'Applying prioritize_customer_dependencies: ' 'worker_dependencies_path: %s, customer_dependencies_path: %s, ' - 'working_directory: %s, Linux Consumption: %s, Placeholder: %s, sys.path: %s', + 'working_directory: %s, Linux Consumption: %s, Placeholder: %s, sys.path: %s', cls.worker_deps_path, cx_deps_path, working_directory, DependencyManager.is_in_linux_consumption(), is_envvar_true("WEBSITE_PLACEHOLDER_MODE"), sys.path) @@ -148,6 +148,7 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None): cls._add_to_sys_path(cls.cx_deps_path, True) cls._add_to_sys_path(working_directory, False) + logger.info(f'Finished prioritize_customer_dependencies: {sys.path}') diff --git a/pyproject.toml b/pyproject.toml index d0d9b079f..ce8a5063b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ Repository = "https://github.com/Azure/azure-functions-python-worker" dev = [ "azure-eventhub", # Used for EventHub E2E tests "azure-functions-durable", # Used for Durable E2E tests + "azure-monitor-opentelemetry; python_version >= '3.8'", # Used for Azure Monitor unit tests "flask", "fastapi~=0.103.2", "pydantic", @@ -60,6 +61,7 @@ dev = [ "requests==2.*", "coverage", "pytest-sugar", + "opentelemetry-api; python_version >= '3.8'", # Used for OpenTelemetry unit tests "pytest-cov", "pytest-xdist", "pytest-randomly", @@ -78,7 +80,7 @@ dev = [ "cryptography" ] test-http-v2 = [ - "azurefunctions-extensions-http-fastapi==1.0.0b1", + "azurefunctions-extensions-http-fastapi==1.0.0b2", "ujson", "orjson" ] @@ -87,7 +89,7 @@ test-deferred-bindings = [ ] [build-system] -requires = ["setuptools>=42", "wheel"] +requires = ["setuptools>=62", "wheel"] build-backend = "setuptools.build_meta" diff --git a/python/prodV4/worker.config.json b/python/prodV4/worker.config.json index 98f1e56db..d01e5fe1c 100644 --- a/python/prodV4/worker.config.json +++ b/python/prodV4/worker.config.json @@ -8,10 +8,11 @@ "extensions":[".py"], "defaultExecutablePath":"python", "defaultWorkerPath":"%FUNCTIONS_WORKER_RUNTIME_VERSION%/{os}/{architecture}/worker.py", - "workerIndexing": "true" + "workerIndexing": "true", + "arguments": ["-X no_debug_ranges"] }, "processOptions": { "initializationTimeout": "00:02:00", "environmentReloadTimeout": "00:02:00" } -} \ No newline at end of file +} diff --git a/python/test/worker.config.json b/python/test/worker.config.json index a0b0ad3fe..f778e45f3 100644 --- a/python/test/worker.config.json +++ b/python/test/worker.config.json @@ -4,10 +4,11 @@ "extensions":[".py"], "defaultExecutablePath":"python", "defaultWorkerPath":"worker.py", - "workerIndexing": "true" + "workerIndexing": "true", + "arguments": ["-X no_debug_ranges"] }, "processOptions": { "initializationTimeout": "00:02:00", "environmentReloadTimeout": "00:02:00" } -} \ No newline at end of file +} diff --git a/tests/unittests/test_opentelemetry.py b/tests/unittests/test_opentelemetry.py index b26334bdf..3f560382e 100644 --- a/tests/unittests/test_opentelemetry.py +++ b/tests/unittests/test_opentelemetry.py @@ -1,6 +1,9 @@ import asyncio import os +import sys import unittest + +from unittest import skipIf from unittest.mock import MagicMock, patch from tests.unittests.test_dispatcher import FUNCTION_APP_DIRECTORY @@ -9,6 +12,8 @@ from azure_functions_worker import protos +@skipIf(sys.version_info.minor == 7, + "Packages are only supported for 3.8+") class TestOpenTelemetry(unittest.TestCase): def setUp(self): @@ -23,8 +28,9 @@ def test_update_opentelemetry_status_import_error(self): # Patch the built-in import mechanism with patch('builtins.__import__', side_effect=ImportError): self.dispatcher.update_opentelemetry_status() - # Verify that otel_libs_available is set to False due to ImportError - self.assertFalse(self.dispatcher._azure_monitor_available) + # Verify that context variables are None due to ImportError + self.assertIsNone(self.dispatcher._context_api) + self.assertIsNone(self.dispatcher._trace_context_propagator) @patch('builtins.__import__') def test_update_opentelemetry_status_success( @@ -54,12 +60,12 @@ def test_initialize_azure_monitor_import_error( with patch('builtins.__import__', side_effect=ImportError): self.dispatcher.initialize_azure_monitor() mock_update_ot.assert_called_once() - # Verify that otel_libs_available is set to False due to ImportError + # Verify that azure_monitor_available is set to False due to ImportError self.assertFalse(self.dispatcher._azure_monitor_available) - @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'}) + @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'true'}) @patch('builtins.__import__') - def test_init_request_otel_capability_enabled_app_setting( + def test_init_request_initialize_azure_monitor_enabled_app_setting( self, mock_imports, ): @@ -78,13 +84,45 @@ def test_init_request_otel_capability_enabled_app_setting( self.assertEqual(init_response.worker_init_response.result.status, protos.StatusResult.Success) + # Verify azure_monitor_available is set to True + self.assertTrue(self.dispatcher._azure_monitor_available) # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE capabilities = init_response.worker_init_response.capabilities self.assertIn("WorkerOpenTelemetryEnabled", capabilities) self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true") @patch("azure_functions_worker.dispatcher.Dispatcher.initialize_azure_monitor") - def test_init_request_otel_capability_disabled_app_setting( + def test_init_request_initialize_azure_monitor_default_app_setting( + self, + mock_initialize_azmon, + ): + + init_request = protos.StreamingMessage( + worker_init_request=protos.WorkerInitRequest( + host_version="2.3.4", + function_app_directory=str(FUNCTION_APP_DIRECTORY) + ) + ) + + init_response = self.loop.run_until_complete( + self.dispatcher._handle__worker_init_request(init_request)) + + self.assertEqual(init_response.worker_init_response.result.status, + protos.StatusResult.Success) + + # Azure monitor initialized not called + # Since default behavior is not enabled + mock_initialize_azmon.assert_not_called() + + # Verify azure_monitor_available is set to False + self.assertFalse(self.dispatcher._azure_monitor_available) + # Verify that WorkerOpenTelemetryEnabled capability is not set + capabilities = init_response.worker_init_response.capabilities + self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) + + @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'}) + @patch("azure_functions_worker.dispatcher.Dispatcher.initialize_azure_monitor") + def test_init_request_initialize_azure_monitor_disabled_app_setting( self, mock_initialize_azmon, ): @@ -105,6 +143,81 @@ def test_init_request_otel_capability_disabled_app_setting( # Azure monitor initialized not called mock_initialize_azmon.assert_not_called() + # Verify azure_monitor_available is set to False + self.assertFalse(self.dispatcher._azure_monitor_available) + # Verify that WorkerOpenTelemetryEnabled capability is not set + capabilities = init_response.worker_init_response.capabilities + self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) + + @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'true'}) + def test_init_request_enable_opentelemetry_enabled_app_setting( + self, + ): + + init_request = protos.StreamingMessage( + worker_init_request=protos.WorkerInitRequest( + host_version="2.3.4", + function_app_directory=str(FUNCTION_APP_DIRECTORY) + ) + ) + + init_response = self.loop.run_until_complete( + self.dispatcher._handle__worker_init_request(init_request)) + + self.assertEqual(init_response.worker_init_response.result.status, + protos.StatusResult.Success) + + # Verify otel_libs_available is set to True + self.assertTrue(self.dispatcher._otel_libs_available) + # Verify that WorkerOpenTelemetryEnabled capability is set to _TRUE + capabilities = init_response.worker_init_response.capabilities + self.assertIn("WorkerOpenTelemetryEnabled", capabilities) + self.assertEqual(capabilities["WorkerOpenTelemetryEnabled"], "true") + + @patch.dict(os.environ, {'PYTHON_ENABLE_OPENTELEMETRY': 'false'}) + def test_init_request_enable_opentelemetry_default_app_setting( + self, + ): + + init_request = protos.StreamingMessage( + worker_init_request=protos.WorkerInitRequest( + host_version="2.3.4", + function_app_directory=str(FUNCTION_APP_DIRECTORY) + ) + ) + + init_response = self.loop.run_until_complete( + self.dispatcher._handle__worker_init_request(init_request)) + + self.assertEqual(init_response.worker_init_response.result.status, + protos.StatusResult.Success) + + # Verify otel_libs_available is set to False by default + self.assertFalse(self.dispatcher._otel_libs_available) + # Verify that WorkerOpenTelemetryEnabled capability is not set + capabilities = init_response.worker_init_response.capabilities + self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) + + @patch.dict(os.environ, {'PYTHON_APPLICATIONINSIGHTS_ENABLE_TELEMETRY': 'false'}) + def test_init_request_enable_azure_monitor_disabled_app_setting( + self, + ): + + init_request = protos.StreamingMessage( + worker_init_request=protos.WorkerInitRequest( + host_version="2.3.4", + function_app_directory=str(FUNCTION_APP_DIRECTORY) + ) + ) + + init_response = self.loop.run_until_complete( + self.dispatcher._handle__worker_init_request(init_request)) + + self.assertEqual(init_response.worker_init_response.result.status, + protos.StatusResult.Success) + + # Verify otel_libs_available is set to False by default + self.assertFalse(self.dispatcher._otel_libs_available) # Verify that WorkerOpenTelemetryEnabled capability is not set capabilities = init_response.worker_init_response.capabilities self.assertNotIn("WorkerOpenTelemetryEnabled", capabilities) From d16a705bc2f9e8dce095116021ad213a23738d38 Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Thu, 10 Apr 2025 10:37:57 -0500 Subject: [PATCH 09/11] merge fixes --- python/proxyV4/worker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/proxyV4/worker.py b/python/proxyV4/worker.py index dce5d51e6..2f899f37e 100644 --- a/python/proxyV4/worker.py +++ b/python/proxyV4/worker.py @@ -2,7 +2,6 @@ import pathlib import sys - PKGS_PATH = "/home/site/wwwroot/.python_packages" PKGS = "lib/site-packages" From 191fa28948e6020b2e8374d8e71778651f70b556 Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Thu, 10 Apr 2025 14:59:09 -0500 Subject: [PATCH 10/11] don't run 313 unit tests yet --- eng/templates/jobs/ci-unit-tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml index e013207c3..5ff54888c 100644 --- a/eng/templates/jobs/ci-unit-tests.yml +++ b/eng/templates/jobs/ci-unit-tests.yml @@ -16,8 +16,6 @@ jobs: PYTHON_VERSION: '3.11' Python312: PYTHON_VERSION: '3.12' - Python313: - PYTHON_VERSION: '3.13' steps: - task: UsePythonVersion@0 From 2ce96574c5d138a53dd7114c426a599b502542c0 Mon Sep 17 00:00:00 2001 From: hallvictoria Date: Fri, 11 Apr 2025 10:58:47 -0500 Subject: [PATCH 11/11] changes for builds --- pack/templates/macos_64_env_gen.yml | 11 +++++++++-- pack/templates/nix_env_gen.yml | 11 +++++++++-- pack/templates/win_env_gen.yml | 12 ++++++++++-- 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/pack/templates/macos_64_env_gen.yml b/pack/templates/macos_64_env_gen.yml index 3e54ab812..75f33bc5f 100644 --- a/pack/templates/macos_64_env_gen.yml +++ b/pack/templates/macos_64_env_gen.yml @@ -8,6 +8,13 @@ steps: inputs: versionSpec: ${{ parameters.pythonVersion }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: ShellScript@2 inputs: disableAutoCwd: true @@ -45,7 +52,7 @@ steps: !werkzeug/debug/shared/debugger.js !proxy_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: lt(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') displayName: 'Copy azure_functions_worker files' - task: CopyFiles@2 inputs: @@ -68,5 +75,5 @@ steps: !azure_functions_worker/** !dateutil/** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: ge(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '13') displayName: 'Copy proxy_worker files' diff --git a/pack/templates/nix_env_gen.yml b/pack/templates/nix_env_gen.yml index 5de754ecd..db3820153 100644 --- a/pack/templates/nix_env_gen.yml +++ b/pack/templates/nix_env_gen.yml @@ -8,6 +8,13 @@ steps: inputs: versionSpec: ${{ parameters.pythonVersion }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: ShellScript@2 inputs: disableAutoCwd: true @@ -45,7 +52,7 @@ steps: !werkzeug/debug/shared/debugger.js !proxy_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: lt(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') displayName: 'Copy azure_functions_worker files' - task: CopyFiles@2 inputs: @@ -68,5 +75,5 @@ steps: !dateutil/** !azure_functions_worker/** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: ge(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '13') displayName: 'Copy proxy_worker files' diff --git a/pack/templates/win_env_gen.yml b/pack/templates/win_env_gen.yml index 167d56e10..b85bf9f89 100644 --- a/pack/templates/win_env_gen.yml +++ b/pack/templates/win_env_gen.yml @@ -9,6 +9,13 @@ steps: versionSpec: ${{ parameters.pythonVersion }} architecture: ${{ parameters.architecture }} addToPath: true +- bash: | + major=$(echo $(pythonVersion) | cut -d. -f1) + minor=$(echo $(pythonVersion) | cut -d. -f2) + echo "##vso[task.setvariable variable=pythonMajor]$major" + echo "##vso[task.setvariable variable=pythonMinor]$minor" + echo $pythonMinor + displayName: 'Parse pythonVersion' - task: PowerShell@2 inputs: filePath: 'pack\scripts\win_deps.ps1' @@ -44,7 +51,7 @@ steps: !werkzeug\debug\shared\debugger.js !proxy_worker\** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: lt(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '7', '8', '9', '10', '11', '12') displayName: 'Copy azure_functions_worker files' - task: CopyFiles@2 inputs: @@ -65,6 +72,7 @@ steps: !*.dist-info\** !werkzeug\debug\shared\debugger.js !dateutil\** + !azure_functions_worker\** targetFolder: '$(Build.ArtifactStagingDirectory)' - condition: ge(variables['pythonVersion'], '3.13') + condition: in(variables['pythonMinor'], '13') displayName: 'Copy proxy_worker files'