diff --git a/.flake8 b/.flake8 index 6e7ffa642..caba1c751 100644 --- a/.flake8 +++ b/.flake8 @@ -8,6 +8,8 @@ ignore = W503,E402,E731 exclude = .git, __pycache__, build, dist, .eggs, .github, .local, docs/, Samples, azure_functions_worker/protos/, azure_functions_worker/_thirdparty/typing_inspect.py, - tests/unittests/test_typing_inspect.py, .venv*, .env*, .vscode, venv* + tests/unittests/test_typing_inspect.py, + tests/unittests/broken_functions/syntax_error/main.py, + .env*, .vscode, venv*, *.venv*, -max-line-length = 80 \ No newline at end of file +max-line-length = 80 diff --git a/.github/workflows/ci_e2e_workflow.yml b/.github/workflows/ci_e2e_workflow.yml index 006f8b466..f0aad3634 100644 --- a/.github/workflows/ci_e2e_workflow.yml +++ b/.github/workflows/ci_e2e_workflow.yml @@ -38,6 +38,10 @@ jobs: uses: actions/setup-dotnet@v1 with: dotnet-version: '5.0.x' + - name: Set up Dotnet 6.x + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '6.x' - name: Install dependencies and the worker run: | retry() { diff --git a/.github/workflows/ut_ci_workflow.yml b/.github/workflows/ut_ci_workflow.yml index 6fc13a455..ea2dd0b4f 100644 --- a/.github/workflows/ut_ci_workflow.yml +++ b/.github/workflows/ut_ci_workflow.yml @@ -35,6 +35,10 @@ jobs: uses: actions/setup-dotnet@v1 with: dotnet-version: '5.0.x' + - name: Set up Dotnet 6.x + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '6.x' - name: Install dependencies and the worker run: | retry() { diff --git a/.gitignore b/.gitignore index 4d233c0fe..e8d9736a3 100644 --- a/.gitignore +++ b/.gitignore @@ -125,4 +125,9 @@ prof/ # E2E Integration Test Core Tools .ci/e2e_integration_test/Azure.Functions.Cli/ -.ci/e2e_integration_test/Azure.Functions.Cli* \ No newline at end of file +.ci/e2e_integration_test/Azure.Functions.Cli* + +# Lingering test files +tests/**/host.json +tests/**/bin +tests/**/extensions.csproj diff --git a/README.md b/README.md index ff526c582..7cdbb0e72 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ |master|[![Build Status](https://azfunc.visualstudio.com/Azure%20Functions/_apis/build/status/Azure.azure-functions-python-worker?branchName=master)](https://azfunc.visualstudio.com/Azure%20Functions/_build/latest?definitionId=57&branchName=master)|[![codecov](https://codecov.io/gh/Azure/azure-functions-python-worker/branch/master/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-worker)|![CI Unit tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20Unit%20tests/badge.svg?branch=master)|![CI E2E tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20E2E%20tests/badge.svg?branch=master) |dev|[![Build Status](https://azfunc.visualstudio.com/Azure%20Functions/_apis/build/status/Azure.azure-functions-python-worker?branchName=dev)](https://azfunc.visualstudio.com/Azure%20Functions/_build/latest?definitionId=57&branchName=dev)|[![codecov](https://codecov.io/gh/Azure/azure-functions-python-worker/branch/dev/graph/badge.svg)](https://codecov.io/gh/Azure/azure-functions-python-worker)|![CI Unit tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20Unit%20tests/badge.svg?branch=dev)|![CI E2E tests](https://github.com/Azure/azure-functions-python-worker/workflows/CI%20E2E%20tests/badge.svg?branch=dev) -Python support for Azure Functions is based on Python 3.6, Python 3.7, and Python 3.8, serverless hosting on Linux and the Functions 2.0 and 3.0 runtime. +Python support for Azure Functions is based on Python 3.6, Python 3.7, Python 3.8, and Python 3.9 serverless hosting on Linux and the Functions 2.0, 3.0 and 4.0 runtime. Here is the current status of Python in Azure Functions: @@ -14,9 +14,12 @@ What are the supported Python versions? |Azure Functions Runtime|Python 3.6|Python 3.7|Python 3.8|Python 3.9| |---|---|---|---|---| |Azure Functions 2.0|✔|✔|-|-| -|Azure Functions 3.0|✔|✔|✔|(preview)| +|Azure Functions 3.0|✔|✔|✔|✔| +|Azure Functions 4.0|-|✔|✔|✔| -What's available? +For information about Azure Functions Runtime, please refer to [Azure Functions runtime versions overview](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) page. + +### What's available? - Build, test, debug and publish using Azure Functions Core Tools (CLI) or Visual Studio Code - Deploy Python Function project onto consumption, dedicated, or elastic premium plan. @@ -28,7 +31,7 @@ What's coming? - [Durable Functions For Python](https://github.com/Azure/azure-functions-durable-python) -# Get Started +###Get Started - [Create your first Python function](https://docs.microsoft.com/en-us/azure/azure-functions/functions-create-first-function-python) - [Developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0e3228904..bfc57a6f2 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,14 +1,17 @@ name: $(Date:yyyyMMdd).$(Rev:r) trigger: -- release/2.* - release/3.* +- release/4.* - dev variables: DOTNET_VERSION: '3.1.405' DOTNET_VERSION_5: '5.0.x' + DOTNET_VERSION_6: '6.x' patchBuildNumberForDev: $(Build.BuildNumber) + PROD_V3_WORKER_PY : 'python/prodV3/worker.py' + PROD_V4_WORKER_PY : 'python/prodV4/worker.py' jobs: - job: Tests @@ -18,14 +21,14 @@ jobs: - ImageOverride -equals MMSUbuntu20.04TLS strategy: matrix: - Python36: - pythonVersion: '3.6' Python37: pythonVersion: '3.7' Python38: pythonVersion: '3.8' Python39: pythonVersion: '3.9' + Python310: + pythonVersion: '3.10' steps: - task: UsePythonVersion@0 inputs: @@ -41,6 +44,11 @@ jobs: inputs: packageType: 'sdk' version: $(DOTNET_VERSION_5) + - task: UseDotNet@2 + displayName: 'Install DotNet 6.x' + inputs: + packageType: 'sdk' + version: $(DOTNET_VERSION_6) - task: ShellScript@2 inputs: disableAutoCwd: true @@ -55,24 +63,27 @@ jobs: - ImageOverride -equals MMS2019TLS strategy: matrix: - Python36V2: - pythonVersion: '3.6' - workerPath: 'python/prodV2/worker.py' - Python37V2: - pythonVersion: '3.7' - workerPath: 'python/prodV2/worker.py' Python36V3: pythonVersion: '3.6' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python37V3: pythonVersion: '3.7' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python38V3: pythonVersion: '3.8' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python39V3: pythonVersion: '3.9' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) + Python37V4: + pythonVersion: '3.7' + workerPath: $(PROD_V4_WORKER_PY) + Python38V4: + pythonVersion: '3.8' + workerPath: $(PROD_V4_WORKER_PY) + Python39V4: + pythonVersion: '3.9' + workerPath: $(PROD_V4_WORKER_PY) steps: - template: pack/templates/win_env_gen.yml parameters: @@ -88,18 +99,24 @@ jobs: - ImageOverride -equals MMS2019TLS strategy: matrix: - Python37V2: - pythonVersion: '3.7' - workerPath: 'python/prodV2/worker.py' Python37V3: pythonVersion: '3.7' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python38V3: pythonVersion: '3.8' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python39V3: pythonVersion: '3.9' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) + Python37V4: + pythonVersion: '3.7' + workerPath: $(PROD_V4_WORKER_PY) + Python38V4: + pythonVersion: '3.8' + workerPath: $(PROD_V4_WORKER_PY) + Python39V4: + pythonVersion: '3.9' + workerPath: $(PROD_V4_WORKER_PY) steps: - template: pack/templates/win_env_gen.yml parameters: @@ -115,24 +132,27 @@ jobs: - ImageOverride -equals MMSUbuntu20.04TLS strategy: matrix: - Python36V2: - pythonVersion: '3.6' - workerPath: 'python/prodV2/worker.py' - Python37V2: - pythonVersion: '3.7' - workerPath: 'python/prodV2/worker.py' Python36V3: pythonVersion: '3.6' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python37V3: pythonVersion: '3.7' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python38V3: pythonVersion: '3.8' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python39V3: pythonVersion: '3.9' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) + Python37V4: + pythonVersion: '3.7' + workerPath: $(PROD_V4_WORKER_PY) + Python38V4: + pythonVersion: '3.8' + workerPath: $(PROD_V4_WORKER_PY) + Python39V4: + pythonVersion: '3.9' + workerPath: $(PROD_V4_WORKER_PY) steps: - template: pack/templates/nix_env_gen.yml parameters: @@ -145,24 +165,27 @@ jobs: vmImage: 'macOS-10.15' strategy: matrix: - Python36V2: - pythonVersion: '3.6' - workerPath: 'python/prodV2/worker.py' - Python37V2: - pythonVersion: '3.7' - workerPath: 'python/prodV2/worker.py' Python36V3: pythonVersion: '3.6' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python37V3: pythonVersion: '3.7' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python38V3: pythonVersion: '3.8' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) Python39V3: pythonVersion: '3.9' - workerPath: 'python/prodV3/worker.py' + workerPath: $(PROD_V3_WORKER_PY) + Python37V4: + pythonVersion: '3.7' + workerPath: $(PROD_V4_WORKER_PY) + Python38V4: + pythonVersion: '3.8' + workerPath: $(PROD_V4_WORKER_PY) + Python39V4: + pythonVersion: '3.9' + workerPath: $(PROD_V4_WORKER_PY) steps: - template: pack/templates/nix_env_gen.yml parameters: @@ -174,8 +197,8 @@ jobs: dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64'] condition: or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), eq(variables['Build.SourceBranch'], 'refs/heads/dev')) pool: - #vmImage: 'vs2017-win2016' - name: '1ES-Hosted-AzFunc' #MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu + # vmImage: 'vs2017-win2016' + name: '1ES-Hosted-AzFunc' # MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu demands: - ImageOverride -equals MMS2019TLS steps: @@ -183,23 +206,22 @@ jobs: echo "Releasing from $BUILD_SOURCEBRANCHNAME" apt install jq - if [[ $BUILD_SOURCEBRANCHNAME = 2\.* ]] - then - echo "Generating V2 Release Package for $BUILD_SOURCEBRANCHNAME" - NUSPEC="pack\Microsoft.Azure.Functions.V2.PythonWorker.nuspec" - WKVERSION="$BUILD_SOURCEBRANCHNAME" - elif [[ $BUILD_SOURCEBRANCHNAME = 3\.* ]] + if [[ $BUILD_SOURCEBRANCHNAME = 3\.* ]] then echo "Generating V3 Release Package for $BUILD_SOURCEBRANCHNAME" NUSPEC="pack\Microsoft.Azure.Functions.V3.PythonWorker.nuspec" WKVERSION="$BUILD_SOURCEBRANCHNAME" + elif [[ $BUILD_SOURCEBRANCHNAME = 4\.* ]] + then + echo "Generating V4 Release Package for $BUILD_SOURCEBRANCHNAME" + NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" + WKVERSION="$BUILD_SOURCEBRANCHNAME" elif [[ $BUILD_SOURCEBRANCHNAME = dev ]] then - echo "Generating V3 Integration Test Package for $BUILD_SOURCEBRANCHNAME" - LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) - NUSPEC="pack\Microsoft.Azure.Functions.V3.PythonWorker.nuspec" - # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) - WKVERSION="3.$LATEST_TAG-$(patchBuildNumberForDev)" + echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME" + VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g") + NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" + WKVERSION="$VERSION-$(patchBuildNumberForDev)" else echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" fi @@ -236,4 +258,4 @@ jobs: nuGetFeedType: 'internal' publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df' allowPackageConflicts: true - displayName: '[Integration Test] Push NuGet package to the AzureFunctionsPreRelease feed' \ No newline at end of file + displayName: '[Integration Test] Push NuGet package to the AzureFunctionsPreRelease feed' diff --git a/azure_functions_worker/__init__.py b/azure_functions_worker/__init__.py index 2e6e647f0..5b7f7a925 100644 --- a/azure_functions_worker/__init__.py +++ b/azure_functions_worker/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. - -__version__ = '1.2.6' diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py index a1e45d749..e5ea15597 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor.py @@ -82,3 +82,16 @@ def _set_mem_map_initialized(self, mem_map: mmap.mmap): mem_map.write(consts.HeaderFlags.Initialized) # Seek back the memory map to the original position mem_map.seek(original_pos) + + +class DummyFileAccessor(FileAccessor): + def open_mem_map(self, mem_map_name: str, mem_map_size: int, + access: int = mmap.ACCESS_READ) -> Optional[mmap.mmap]: + pass + + def create_mem_map(self, mem_map_name: str, + mem_map_size: int) -> Optional[mmap.mmap]: + pass + + def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: + pass diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py index e3b528132..1c0340222 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_factory.py @@ -2,8 +2,13 @@ # Licensed under the MIT License. import os +import sys + +from .file_accessor import DummyFileAccessor from .file_accessor_unix import FileAccessorUnix from .file_accessor_windows import FileAccessorWindows +from ...constants import FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED +from ...utils.common import is_envvar_true class FileAccessorFactory: @@ -13,6 +18,9 @@ class FileAccessorFactory: """ @staticmethod def create_file_accessor(): - if os.name == 'nt': + if sys.platform == "darwin" and not is_envvar_true( + FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED): + return DummyFileAccessor() + elif os.name == 'nt': return FileAccessorWindows() return FileAccessorUnix() diff --git a/azure_functions_worker/dispatcher.py b/azure_functions_worker/dispatcher.py index cf177f930..03c29478b 100644 --- a/azure_functions_worker/dispatcher.py +++ b/azure_functions_worker/dispatcher.py @@ -18,7 +18,6 @@ import grpc -from . import __version__ from . import bindings from . import constants from . import functions @@ -39,6 +38,8 @@ from .utils.dependency import DependencyManager from .utils.tracing import marshall_exception_trace from .utils.wrappers import disable_feature_by +from .version import VERSION + _TRUE = "true" @@ -262,7 +263,7 @@ async def _dispatch_grpc_request(self, request): async def _handle__worker_init_request(self, req): logger.info('Received WorkerInitRequest, ' 'python version %s, worker version %s, request ID %s', - sys.version, __version__, self.request_id) + sys.version, VERSION, self.request_id) enable_debug_logging_recommendation() worker_init_request = req.worker_init_request diff --git a/azure_functions_worker/testutils.py b/azure_functions_worker/testutils.py index 91656cf81..e92108c5a 100644 --- a/azure_functions_worker/testutils.py +++ b/azure_functions_worker/testutils.py @@ -71,26 +71,39 @@ HOST_JSON_TEMPLATE = """\ { "version": "2.0", - "logging": { - "logLevel": { - "default": "Trace" - } - }, - "http": { - "routePrefix": "api" - }, - "swagger": { - "enabled": true - }, - "eventHub": { - "maxBatchSize": 1000, - "prefetchCount": 1000, - "batchCheckpointFrequency": 1 - }, - "functionTimeout": "00:05:00" + "logging": {"logLevel": {"default": "Trace"}} } """ +EXTENSION_CSPROJ_TEMPLATE = """\ + + + netcoreapp3.1 + + ** + + + + + + + + + + + +""" + SECRETS_TEMPLATE = """\ { "masterKey": { @@ -246,7 +259,8 @@ def _run_test(self, test, *args, **kwargs): self.host_stdout.seek(last_pos) self.host_out = self.host_stdout.read() self.host_stdout_logger.error( - f'Captured WebHost stdout:\n{self.host_out}') + f'Captured WebHost stdout from {self.host_stdout.name} ' + f':\n{self.host_out}') finally: if test_exception is not None: raise test_exception @@ -731,7 +745,10 @@ def close(self): self._proc.stderr.close() self._proc.terminate() - self._proc.wait() + try: + self._proc.wait(20) + except subprocess.TimeoutExpired: + self._proc.kill() def _find_open_port(): @@ -882,7 +899,8 @@ def start_webhost(*, script_dir=None, stdout=None): addr = f'http://{LOCALHOST}:{port}' health_check_endpoint = f'{addr}/api/ping' - for _ in range(10): + host_out = stdout.readlines(100) + for _ in range(5): try: r = requests.get(health_check_endpoint, params={'code': 'testFunctionKey'}) @@ -891,13 +909,14 @@ def start_webhost(*, script_dir=None, stdout=None): if 200 <= r.status_code < 300: # Give the host a bit more time to settle - time.sleep(2) + time.sleep(1) break else: - print(f'Failed to ping {health_check_endpoint}', flush=True) + print(f'Failed to ping {health_check_endpoint}, status code: ' + f'{r.status_code}', flush=True) except requests.exceptions.ConnectionError: pass - time.sleep(2) + time.sleep(1) else: proc.terminate() try: @@ -905,7 +924,8 @@ def start_webhost(*, script_dir=None, stdout=None): except subprocess.TimeoutExpired: proc.kill() raise RuntimeError('could not start the webworker in time. Please' - f' check the log file for details: {stdout.name} ') + f' check the log file for details: {stdout.name} \n' + f' Captured WebHost stdout:\n{host_out}') return _WebHostProxy(proc, addr) @@ -963,11 +983,16 @@ def _setup_func_app(app_root): extensions = app_root / 'bin' ping_func = app_root / 'ping' host_json = app_root / 'host.json' + extensions_csproj_file = app_root / 'extensions.csproj' if not os.path.isfile(host_json): with open(host_json, 'w') as f: f.write(HOST_JSON_TEMPLATE) + if not os.path.isfile(extensions_csproj_file): + with open(extensions_csproj_file, 'w') as f: + f.write(EXTENSION_CSPROJ_TEMPLATE) + _symlink_dir(TESTS_ROOT / 'common' / 'ping', ping_func) _symlink_dir(EXTENSIONS_PATH, extensions) @@ -976,8 +1001,11 @@ def _teardown_func_app(app_root): extensions = app_root / 'bin' ping_func = app_root / 'ping' host_json = app_root / 'host.json' + extensions_csproj_file = app_root / 'extensions.csproj' + extensions_obj_file = app_root / 'obj' - for path in (extensions, ping_func, host_json): + for path in (extensions, ping_func, host_json, extensions_csproj_file, + extensions_obj_file): remove_path(path) diff --git a/azure_functions_worker/version.py b/azure_functions_worker/version.py new file mode 100644 index 000000000..e64b0c047 --- /dev/null +++ b/azure_functions_worker/version.py @@ -0,0 +1,4 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +VERSION = '4.0.0' diff --git a/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec new file mode 100644 index 000000000..65c26eb0c --- /dev/null +++ b/pack/Microsoft.Azure.Functions.V4.PythonWorker.nuspec @@ -0,0 +1,28 @@ + + + + Microsoft.Azure.Functions.PythonWorker + 1.1.0 + Microsoft + Microsoft + false + Microsoft Azure Functions Python Worker + © .NET Foundation. All rights reserved. + + + + + + + + + + + + + + + + + + diff --git a/python/prodV4/worker.config.json b/python/prodV4/worker.config.json new file mode 100644 index 000000000..d19d43b99 --- /dev/null +++ b/python/prodV4/worker.config.json @@ -0,0 +1,12 @@ +{ + "description":{ + "language":"python", + "defaultRuntimeVersion":"3.9", + "supportedOperatingSystems":["LINUX", "OSX", "WINDOWS"], + "supportedRuntimeVersions":["3.7", "3.8", "3.9"], + "supportedArchitectures":["X64", "X86"], + "extensions":[".py"], + "defaultExecutablePath":"python", + "defaultWorkerPath":"%FUNCTIONS_WORKER_RUNTIME_VERSION%/{os}/{architecture}/worker.py" + } +} \ No newline at end of file diff --git a/python/prodV4/worker.py b/python/prodV4/worker.py new file mode 100644 index 000000000..d9f4a697e --- /dev/null +++ b/python/prodV4/worker.py @@ -0,0 +1,74 @@ +import os +import sys + +from pathlib import Path + +# User packages +PKGS_PATH = "site/wwwroot/.python_packages" +VENV_PKGS_PATH = "site/wwwroot/worker_venv" + +PKGS = "lib/site-packages" + +# Azure environment variables +AZURE_WEBSITE_INSTANCE_ID = "WEBSITE_INSTANCE_ID" +AZURE_CONTAINER_NAME = "CONTAINER_NAME" +AZURE_WEBJOBS_SCRIPT_ROOT = "AzureWebJobsScriptRoot" + + +def is_azure_environment(): + """Check if the function app is running on the cloud""" + return (AZURE_CONTAINER_NAME in os.environ + or AZURE_WEBSITE_INSTANCE_ID in os.environ) + + +def add_script_root_to_sys_path(): + """Append function project root to module finding sys.path""" + functions_script_root = os.getenv(AZURE_WEBJOBS_SCRIPT_ROOT) + if functions_script_root is not None: + sys.path.append(functions_script_root) + + +def determine_user_pkg_paths(): + """This finds the user packages when function apps are running on the cloud + + For Python 3.7+, we only accept: + /home/site/wwwroot/.python_packages/lib/site-packages + """ + minor_version = sys.version_info[1] + + home = Path.home() + pkgs_path = os.path.join(home, PKGS_PATH) + user_pkg_paths = [] + + if minor_version in (7, 8, 9): + user_pkg_paths.append(os.path.join(pkgs_path, PKGS)) + else: + raise RuntimeError(f'Unsupported Python version: 3.{minor_version}') + + return user_pkg_paths + + +if __name__ == '__main__': + # worker.py lives in the same directory as azure_functions_worker + func_worker_dir = str(Path(__file__).absolute().parent) + env = os.environ + + if is_azure_environment(): + user_pkg_paths = determine_user_pkg_paths() + joined_pkg_paths = os.pathsep.join(user_pkg_paths) + + # On cloud, we prioritize third-party user packages + # over worker packages in PYTHONPATH + env['PYTHONPATH'] = f'{joined_pkg_paths}:{func_worker_dir}' + os.execve(sys.executable, + [sys.executable, '-m', 'azure_functions_worker'] + + sys.argv[1:], + env) + else: + # On local development, we prioritize worker packages over + # third-party user packages (in .venv) + sys.path.insert(1, func_worker_dir) + add_script_root_to_sys_path() + from azure_functions_worker import main + + main.main() diff --git a/setup.py b/setup.py index e4de0862c..f35c5142c 100644 --- a/setup.py +++ b/setup.py @@ -1,84 +1,138 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + import distutils.cmd import glob +import json import os import pathlib +import re import shutil import subprocess import sys -import json import tempfile import urllib.request import zipfile -import re from distutils import dir_util from distutils.command import build +from distutils.dist import Distribution from setuptools import setup from setuptools.command import develop -from azure_functions_worker import __version__ +from azure_functions_worker.version import VERSION # The GitHub repository of the Azure Functions Host WEBHOST_GITHUB_API = "https://api.github.com/repos/Azure/azure-functions-host" -WEBHOST_TAG_PREFIX = "v3." +WEBHOST_TAG_PREFIX = "v4." # Extensions necessary for non-core bindings. AZURE_EXTENSIONS = """\ + - - netcoreapp3.1 - v3 - - ** - - - - - - - - - + + netcoreapp3.1 + v4 + + ** + + + + + + + + + + """ NUGET_CONFIG = """\ - + - - - - - - - + + + + + + + """ +CLASSIFIERS = [ + "Development Status :: 5 - Production/Stable", + 'Programming Language :: Python', + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX", + "Operating System :: MacOS :: MacOS X", + "Environment :: Web Environment", + "License :: OSI Approved :: MIT License", + "Intended Audience :: Developers", +] + + +PACKAGES = [ + "azure_functions_worker", + "azure_functions_worker.protos", + "azure_functions_worker.protos.identity", + "azure_functions_worker.protos.shared", + "azure_functions_worker.bindings", + "azure_functions_worker.bindings.shared_memory_data_transfer", + "azure_functions_worker.utils", + "azure_functions_worker._thirdparty" +] + + +INSTALL_REQUIRES = [ + "grpcio~=1.43.0", + "grpcio-tools~=1.43.0", + "protobuf~=3.19.3", + "azure-functions==1.8.0" +] + + +EXTRA_REQUIRES = { + "dev": [ + "azure-eventhub~=5.1.0", + "python-dateutil~=2.8.1", + "pycryptodome~=3.10.1", + "flake8~=3.7.9", + "mypy", + "pytest", + "requests==2.*", + "coverage", + "pytest-sugar", + "pytest-cov", + "pytest-xdist", + "pytest-randomly", + "pytest-instafail", + "pytest-rerunfailures", + "ptvsd" + ] +} + + class BuildGRPC: """Generate gRPC bindings.""" def _gen_grpc(self): @@ -128,9 +182,10 @@ def _gen_grpc(self): # https://github.com/protocolbuffers/protobuf/issues/1491 self.make_absolute_imports(compiled_files) - dir_util.copy_tree(built_protos_dir, str(proto_root_dir)) + dir_util.copy_tree(str(built_protos_dir), str(proto_root_dir)) - def make_absolute_imports(self, compiled_files): + @staticmethod + def make_absolute_imports(compiled_files): for compiled in compiled_files: with open(compiled, 'r+') as f: content = f.read() @@ -153,19 +208,19 @@ def make_absolute_imports(self, compiled_files): f.truncate() -class build(build.build, BuildGRPC): +class BuildProtos(build.build, BuildGRPC): def run(self, *args, **kwargs): self._gen_grpc() - super().run(*args, **kwargs) + super().run() -class develop(develop.develop, BuildGRPC): +class Development(develop.develop, BuildGRPC): def run(self, *args, **kwargs): self._gen_grpc() - super().run(*args, **kwargs) + super().run() -class extension(distutils.cmd.Command): +class Extension(distutils.cmd.Command): description = ( 'Resolve WebJobs Extensions from AZURE_EXTENSIONS and NUGET_CONFIG.' ) @@ -174,9 +229,13 @@ class extension(distutils.cmd.Command): 'A path to the directory where extension should be installed') ] - def initialize_options(self): + def __init__(self, dist: Distribution): + super().__init__(dist) self.extensions_dir = None + def initialize_options(self): + pass + def finalize_options(self): if self.extensions_dir is None: self.extensions_dir = \ @@ -204,7 +263,7 @@ def _install_extensions(self): args=['dotnet', 'build', '-o', '.'], check=True, cwd=str(self.extensions_dir), stdout=sys.stdout, stderr=sys.stderr, env=env) - except Exception: + except Exception: # NoQA print(".NET Core SDK is required to build the extensions. " "Please visit https://aka.ms/dotnet-download") sys.exit(1) @@ -213,7 +272,7 @@ def run(self): self._install_extensions() -class webhost(distutils.cmd.Command): +class Webhost(distutils.cmd.Command): description = 'Download and setup Azure Functions Web Host.' user_options = [ ('webhost-version', None, @@ -222,9 +281,13 @@ class webhost(distutils.cmd.Command): 'A path to the directory where Azure Web Host will be installed.'), ] - def initialize_options(self): - self.webhost_version = None + def __init__(self, dist: Distribution): + super().__init__(dist) self.webhost_dir = None + self.webhost_version = None + + def initialize_options(self): + pass def finalize_options(self): if self.webhost_version is None: @@ -234,7 +297,8 @@ def finalize_options(self): self.webhost_dir = \ pathlib.Path(__file__).parent / 'build' / 'webhost' - def _get_webhost_version(self) -> str: + @staticmethod + def _get_webhost_version() -> str: # Return the latest matched version (e.g. 3.0.15278) github_api_url = f'{WEBHOST_GITHUB_API}/tags?page=1&per_page=10' print(f'Checking latest webhost version from {github_api_url}') @@ -243,12 +307,13 @@ def _get_webhost_version(self) -> str: # As tags are placed in time desending order, the latest v3 # tag should be the first occurance starts with 'v3.' string - latest_v3 = [ + latest = [ gt for gt in tags if gt['name'].startswith(WEBHOST_TAG_PREFIX) ] - return latest_v3[0]['name'].replace('v', '') + return latest[0]['name'].replace('v', '') - def _download_webhost_zip(self, version: str) -> str: + @staticmethod + def _download_webhost_zip(version: str) -> str: # Return the path of the downloaded host temporary_file = tempfile.NamedTemporaryFile() zip_url = ( @@ -269,13 +334,15 @@ def _download_webhost_zip(self, version: str) -> str: print(f'Functions Host is downloaded into {temporary_file.name}') return temporary_file.name - def _create_webhost_folder(self, dest_folder: pathlib.Path): + @staticmethod + def _create_webhost_folder(dest_folder: pathlib.Path): if dest_folder.exists(): shutil.rmtree(dest_folder) os.makedirs(dest_folder, exist_ok=True) print(f'Functions Host folder is created in {dest_folder}') - def _extract_webhost_zip(self, version: str, src_zip: str, dest: str): + @staticmethod + def _extract_webhost_zip(version: str, src_zip: str, dest: str): print(f'Extracting Functions Host from {src_zip}') with zipfile.ZipFile(src_zip) as archive: @@ -311,7 +378,8 @@ def _extract_webhost_zip(self, version: str, src_zip: str, dest: str): print(f'Functions Host is extracted into {dest}') - def _chmod_protobuf_generation_script(self, webhost_dir: pathlib.Path): + @staticmethod + def _chmod_protobuf_generation_script(webhost_dir: pathlib.Path): # This script is needed to set to executable in order to build the # WebJobs.Script.Grpc project in Linux and MacOS script_path = ( @@ -321,7 +389,8 @@ def _chmod_protobuf_generation_script(self, webhost_dir: pathlib.Path): print('Change generate_protos.sh script permission') os.chmod(script_path, 0o555) - def _compile_webhost(self, webhost_dir: pathlib.Path): + @staticmethod + def _compile_webhost(webhost_dir: pathlib.Path): print(f'Compiling Functions Host from {webhost_dir}') try: @@ -330,7 +399,7 @@ def _compile_webhost(self, webhost_dir: pathlib.Path): check=True, cwd=str(webhost_dir), stdout=sys.stdout, stderr=sys.stderr) - except Exception: + except Exception: # NoQA print(f"Failed to compile webhost in {webhost_dir}. " ".NET Core SDK is required to build the solution. " "Please visit https://aka.ms/dotnet-download", @@ -350,73 +419,54 @@ def run(self): self._compile_webhost(self.webhost_dir) -with open("README.md") as readme: - long_description = readme.read() +class Clean(distutils.cmd.Command): + description = 'Clean up build generated files' + user_options = [] + + def __init__(self, dist: Distribution): + super().__init__(dist) + self.dir_list_to_delete = [ + "build" + ] + + def initialize_options(self) -> None: + pass + + def finalize_options(self) -> None: + pass + + def run(self) -> None: + for dir_to_delete in self.dir_list_to_delete: + dir_delete = pathlib.Path(dir_to_delete) + if dir_delete.exists(): + dir_util.remove_tree(str(dir_delete)) + + +COMMAND_CLASS = { + 'develop': Development, + 'build': BuildProtos, + 'webhost': Webhost, + 'extension': Extension, + 'clean': Clean +} setup( - name='azure-functions-worker', - version=__version__, - description='Python Language Worker for Azure Functions Host', - author="Microsoft Corp.", + name="azure-functions-worker", + version=VERSION, + description="Python Language Worker for Azure Functions Host", + author="Azure Functions team at Microsoft Corp.", author_email="azurefunctions@microsoft.com", - keywords="azure azurefunctions python", + keywords="azure functions azurefunctions python serverless", url="https://github.com/Azure/azure-functions-python-worker", - long_description=long_description, - long_description_content_type='text/markdown', - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'License :: OSI Approved :: MIT License', - 'Intended Audience :: Developers', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Operating System :: Microsoft :: Windows', - 'Operating System :: POSIX', - 'Operating System :: MacOS :: MacOS X', - 'Environment :: Web Environment', - ], - license='MIT', - packages=['azure_functions_worker', - 'azure_functions_worker.protos', - 'azure_functions_worker.protos.identity', - 'azure_functions_worker.protos.shared', - 'azure_functions_worker.bindings', - 'azure_functions_worker.bindings.shared_memory_data_transfer', - 'azure_functions_worker.utils', - 'azure_functions_worker._thirdparty'], - install_requires=[ - 'grpcio~=1.33.2', - 'grpcio-tools~=1.33.2', - ], - extras_require={ - 'dev': [ - 'azure-functions==1.8.0', - 'azure-eventhub~=5.1.0', - 'python-dateutil~=2.8.1', - 'pycryptodome~=3.10.1', - 'flake8~=3.7.9', - 'mypy', - 'pytest', - 'requests==2.*', - 'coverage', - 'pytest-sugar', - 'pytest-cov', - 'pytest-xdist', - 'pytest-randomly', - 'pytest-instafail', - 'pytest-rerunfailures', - 'ptvsd' - ] - }, + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + classifiers=CLASSIFIERS, + license="MIT", + packages=PACKAGES, + install_requires=INSTALL_REQUIRES, + extras_require=EXTRA_REQUIRES, include_package_data=True, - cmdclass={ - 'develop': develop, - 'build': build, - 'webhost': webhost, - 'extension': extension - }, + cmdclass=COMMAND_CLASS, test_suite='tests' ) diff --git a/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json b/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json index 52a8f89a7..5449a6921 100644 --- a/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json +++ b/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/function.json @@ -9,7 +9,7 @@ { "type": "blob", "direction": "in", - "name": "input_file_1", + "name": "inputfile1", "dataType": "binary", "connection": "AzureWebJobsStorage", "path": "python-worker-tests/shmem-test-bytes-1.txt" @@ -17,7 +17,7 @@ { "type": "blob", "direction": "in", - "name": "input_file_2", + "name": "inputfile2", "dataType": "binary", "connection": "AzureWebJobsStorage", "path": "python-worker-tests/shmem-test-bytes-2.txt" @@ -25,7 +25,7 @@ { "type": "blob", "direction": "out", - "name": "output_file_1", + "name": "outputfile1", "dataType": "binary", "connection": "AzureWebJobsStorage", "path": "python-worker-tests/shmem-test-bytes-out-1.txt" @@ -33,7 +33,7 @@ { "type": "blob", "direction": "out", - "name": "output_file_2", + "name": "outputfile2", "dataType": "binary", "connection": "AzureWebJobsStorage", "path": "python-worker-tests/shmem-test-bytes-out-2.txt" diff --git a/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py b/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py index 224d33d02..abb87fbe1 100644 --- a/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py +++ b/tests/endtoend/blob_functions/put_get_multiple_blobs_as_bytes_return_http_response/main.py @@ -15,10 +15,10 @@ def _generate_content_and_digest(content_size): def main( req: azf.HttpRequest, - input_file_1: bytes, - input_file_2: bytes, - output_file_1: azf.Out[bytes], - output_file_2: azf.Out[bytes]) -> azf.HttpResponse: + inputfile1: bytes, + inputfile2: bytes, + outputfile1: azf.Out[bytes], + outputfile2: azf.Out[bytes]) -> azf.HttpResponse: """ Read two blobs (bytes) and respond back (in HTTP response) with the number of bytes read from each blob and the MD5 digest of the content of each. @@ -26,11 +26,11 @@ def main( bytes written in each blob and the MD5 digest of the content of each. The number of bytes to write are specified in the input HTTP request. """ - input_content_size_1 = len(input_file_1) - input_content_size_2 = len(input_file_2) + input_content_size_1 = len(inputfile1) + input_content_size_2 = len(inputfile2) - input_content_md5_1 = hashlib.md5(input_file_1).hexdigest() - input_content_md5_2 = hashlib.md5(input_file_2).hexdigest() + input_content_md5_1 = hashlib.md5(inputfile1).hexdigest() + input_content_md5_2 = hashlib.md5(inputfile2).hexdigest() output_content_size_1 = int(req.params['output_content_size_1']) output_content_size_2 = int(req.params['output_content_size_2']) @@ -40,8 +40,8 @@ def main( output_content_2, output_content_md5_2 = \ _generate_content_and_digest(output_content_size_2) - output_file_1.set(output_content_1) - output_file_2.set(output_content_2) + outputfile1.set(output_content_1) + outputfile2.set(output_content_2) response_dict = { 'input_content_size_1': input_content_size_1, diff --git a/tests/endtoend/test_eventhub_batch_functions.py b/tests/endtoend/test_eventhub_batch_functions.py index 5d452757a..e0851174b 100644 --- a/tests/endtoend/test_eventhub_batch_functions.py +++ b/tests/endtoend/test_eventhub_batch_functions.py @@ -94,7 +94,7 @@ def test_eventhub_multiple_with_metadata(self): r = self.webhost.request('GET', 'get_metadata_batch_triggered') self.assertEqual(r.status_code, 200) - # Check metadata and events length, events should be batch processed + # Check metadata and events length, events should be batched processed events = r.json() self.assertIsInstance(events, list) self.assertGreater(len(events), 1) @@ -104,7 +104,8 @@ def test_eventhub_multiple_with_metadata(self): event = events[event_index] # Check if the event is enqueued between start_time and end_time - enqueued_time = parser.isoparse(event['enqueued_time']) + enqueued_time = parser.isoparse(event['enqueued_time']).astimezone( + tz=tz.UTC) self.assertTrue(start_time < enqueued_time < end_time) # Check if event properties are properly set @@ -120,7 +121,8 @@ def test_eventhub_multiple_with_metadata(self): enqueued_time = parser.isoparse(sys_props['EnqueuedTimeUtc']) # Check event trigger time and other system properties - self.assertTrue(start_time < enqueued_time < end_time) + self.assertTrue( + start_time.timestamp() < enqueued_time.timestamp() < end_time.timestamp()) # NoQA self.assertIsNone(sys_props['PartitionKey']) self.assertGreaterEqual(sys_props['SequenceNumber'], 0) self.assertIsNotNone(sys_props['Offset']) diff --git a/tests/endtoend/test_eventhub_functions.py b/tests/endtoend/test_eventhub_functions.py index 0c7e298fe..250e43122 100644 --- a/tests/endtoend/test_eventhub_functions.py +++ b/tests/endtoend/test_eventhub_functions.py @@ -89,8 +89,11 @@ def test_eventhub_trigger_with_metadata(self): self.assertIsNotNone(event['metadata']) metadata = event['metadata'] sys_props = metadata['SystemProperties'] - enqueued_time = parser.isoparse(metadata['EnqueuedTimeUtc']) - self.assertTrue(start_time < enqueued_time < end_time) + enqueued_time = parser.isoparse(metadata['EnqueuedTimeUtc']).astimezone( + tz=tz.UTC) + + self.assertTrue( + start_time.timestamp() < enqueued_time.timestamp() < end_time.timestamp()) # NoQA self.assertIsNone(sys_props['PartitionKey']) self.assertGreaterEqual(sys_props['SequenceNumber'], 0) self.assertIsNotNone(sys_props['Offset']) diff --git a/tests/endtoend/test_queue_functions.py b/tests/endtoend/test_queue_functions.py index 023ca0917..dba16287e 100644 --- a/tests/endtoend/test_queue_functions.py +++ b/tests/endtoend/test_queue_functions.py @@ -76,7 +76,9 @@ def test_queue_untyped_return(self): def test_queue_return_multiple(self): r = self.webhost.request('POST', 'put_queue_return_multiple', data='foo') - self.assertTrue(200 <= r.status_code < 300) + self.assertTrue(200 <= r.status_code < 300, + f"Returned status code {r.status_code}, " + "not in the 200-300 range.") # wait for queue_trigger to process the queue item time.sleep(1) @@ -85,5 +87,7 @@ def test_queue_return_multiple(self): def test_queue_return_multiple_outparam(self): r = self.webhost.request('POST', 'put_queue_multiple_out', data='foo') - self.assertTrue(200 <= r.status_code < 300) + self.assertTrue(200 <= r.status_code < 300, + f"Returned status code {r.status_code}, " + "not in the 200-300 range.") self.assertEqual(r.text, 'HTTP response: foo') diff --git a/tests/unittests/test_file_accessor.py b/tests/unittests/test_file_accessor.py index 238ce9e52..7fcd2a156 100644 --- a/tests/unittests/test_file_accessor.py +++ b/tests/unittests/test_file_accessor.py @@ -2,12 +2,18 @@ # Licensed under the MIT License. import os +import sys import unittest +from unittest import skipIf + from azure_functions_worker import testutils from azure_functions_worker.bindings.shared_memory_data_transfer \ import SharedMemoryException +@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' + 'shared memory filesystems and thus skipping' + ' these tests for the time being') class TestFileAccessor(testutils.SharedMemoryTestCase): """ Tests for FileAccessor. diff --git a/tests/unittests/test_file_accessor_factory.py b/tests/unittests/test_file_accessor_factory.py index e3bc225a2..3c83f2310 100644 --- a/tests/unittests/test_file_accessor_factory.py +++ b/tests/unittests/test_file_accessor_factory.py @@ -2,7 +2,10 @@ # Licensed under the MIT License. import os +import sys import unittest +from unittest.mock import patch + from azure_functions_worker.bindings.shared_memory_data_transfer \ import FileAccessorFactory from azure_functions_worker.bindings.\ @@ -15,6 +18,15 @@ class TestFileAccessorFactory(unittest.TestCase): """ Tests for FileAccessorFactory. """ + def setUp(self): + env = os.environ.copy() + env['FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED'] = "true" + self.mock_environ = patch.dict('os.environ', env) + self.mock_environ.start() + + def tearDown(self): + self.mock_environ.stop() + @unittest.skipIf(os.name != 'nt', 'FileAccessorWindows is only valid on Windows') def test_file_accessor_windows_created(self): @@ -24,7 +36,7 @@ def test_file_accessor_windows_created(self): file_accessor = FileAccessorFactory.create_file_accessor() self.assertTrue(type(file_accessor) is FileAccessorWindows) - @unittest.skipIf(os.name == 'nt', + @unittest.skipIf(os.name == 'nt' or sys.platform == 'darwin', 'FileAccessorUnix is only valid on Unix') def test_file_accessor_unix_created(self): """ diff --git a/tests/unittests/test_mock_blob_shared_memory_functions.py b/tests/unittests/test_mock_blob_shared_memory_functions.py index 487b665bd..60204ad39 100644 --- a/tests/unittests/test_mock_blob_shared_memory_functions.py +++ b/tests/unittests/test_mock_blob_shared_memory_functions.py @@ -3,6 +3,10 @@ import json import hashlib +import time +from unittest import skipIf +import sys + from azure_functions_worker.bindings.shared_memory_data_transfer \ import SharedMemoryMap from azure_functions_worker.bindings.shared_memory_data_transfer \ @@ -11,6 +15,9 @@ from azure_functions_worker import testutils +@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' + 'shared memory filesystems and thus skipping' + ' these tests for the time being') class TestMockBlobSharedMemoryFunctions(testutils.SharedMemoryTestCase, testutils.AsyncTestCase): """ @@ -440,14 +447,15 @@ async def test_multiple_input_output_blobs(self): method='GET', query=http_params))), protos.ParameterBinding( - name='input_file_1', + name='inputfile1', rpc_shared_memory=input_value_1 ), protos.ParameterBinding( - name='input_file_2', + name='inputfile2', rpc_shared_memory=input_value_2 ) ]) + time.sleep(1) # Dispose the shared memory map since the function is done using it input_shared_mem_map_1.dispose() diff --git a/tests/unittests/test_shared_memory_manager.py b/tests/unittests/test_shared_memory_manager.py index 0cdb7c234..016a0fbac 100644 --- a/tests/unittests/test_shared_memory_manager.py +++ b/tests/unittests/test_shared_memory_manager.py @@ -5,6 +5,7 @@ import os import json import sys +from unittest import skipIf from unittest.mock import patch from azure_functions_worker.utils.common import is_envvar_true from azure.functions import meta as bind_meta @@ -17,12 +18,17 @@ import FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED +@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' + 'shared memory filesystems and thus skipping' + ' these tests for the time being') class TestSharedMemoryManager(testutils.SharedMemoryTestCase): """ Tests for SharedMemoryManager. """ def setUp(self): - self.mock_environ = patch.dict('os.environ', os.environ.copy()) + env = os.environ.copy() + env['FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED'] = "true" + self.mock_environ = patch.dict('os.environ', env) self.mock_sys_module = patch.dict('sys.modules', sys.modules.copy()) self.mock_sys_path = patch('sys.path', sys.path.copy()) self.mock_environ.start() diff --git a/tests/unittests/test_shared_memory_map.py b/tests/unittests/test_shared_memory_map.py index 86680474c..c8ac94948 100644 --- a/tests/unittests/test_shared_memory_map.py +++ b/tests/unittests/test_shared_memory_map.py @@ -2,7 +2,10 @@ # Licensed under the MIT License. import os +import sys import unittest +from unittest import skipIf + from azure_functions_worker import testutils from azure_functions_worker.bindings.shared_memory_data_transfer \ import SharedMemoryMap @@ -12,6 +15,9 @@ import SharedMemoryException +@skipIf(sys.platform == 'darwin', 'MacOS M1 machines do not correctly test the' + 'shared memory filesystems and thus skipping' + ' these tests for the time being') class TestSharedMemoryMap(testutils.SharedMemoryTestCase): """ Tests for SharedMemoryMap. diff --git a/tests/unittests/test_utilities_dependency.py b/tests/unittests/test_utilities_dependency.py index 59fb7a6b6..2f1ba56a1 100644 --- a/tests/unittests/test_utilities_dependency.py +++ b/tests/unittests/test_utilities_dependency.py @@ -98,7 +98,7 @@ def test_initialize_in_linux_dedicated(self): def test_initialize_in_windows_core_tools(self): os.environ['AzureWebJobsScriptRoot'] = 'C:\\FunctionApp' sys.path.extend([ - 'C:\\Users\\hazeng\\AppData\\Roaming\\npm\\' + 'C:\\Users\\user\\AppData\\Roaming\\npm\\' 'node_modules\\azure-functions-core-tools\\bin\\' 'workers\\python\\3.6\\WINDOWS\\X64', 'C:\\FunctionApp\\.venv38\\lib\\site-packages', @@ -115,7 +115,7 @@ def test_initialize_in_windows_core_tools(self): ) self.assertEqual( DependencyManager.worker_deps_path, - 'C:\\Users\\hazeng\\AppData\\Roaming\\npm\\node_modules\\' + 'C:\\Users\\user\\AppData\\Roaming\\npm\\node_modules\\' 'azure-functions-core-tools\\bin\\workers\\python\\3.6\\WINDOWS' '\\X64' ) @@ -183,12 +183,12 @@ def test_get_worker_deps_path_with_no_worker_sys_path(self): def test_get_worker_deps_path_from_windows_core_tools(self): # Test for Windows Core Tools Environment - sys.path.append('C:\\Users\\hazeng\\AppData\\Roaming\\npm\\' + sys.path.append('C:\\Users\\user\\AppData\\Roaming\\npm\\' 'node_modules\\azure-functions-core-tools\\bin\\' 'workers\\python\\3.6\\WINDOWS\\X64') result = DependencyManager._get_worker_deps_path() self.assertEqual(result, - 'C:\\Users\\hazeng\\AppData\\Roaming\\npm\\' + 'C:\\Users\\user\\AppData\\Roaming\\npm\\' 'node_modules\\azure-functions-core-tools\\bin\\' 'workers\\python\\3.6\\WINDOWS\\X64') @@ -267,7 +267,7 @@ def test_add_to_sys_path_disallow_module_resolution_from_namespace(self): def test_add_to_sys_path_allow_resolution_from_import_statement(self): """The standard Python import mechanism allows deriving a specific - module in a import statement, e.g. + module in an import statement, e.g. from azure import functions # OK """