Skip to content

Commit

Permalink
Merge from aws/aws-sam-cli/develop
Browse files Browse the repository at this point in the history
  • Loading branch information
aws-sam-cli-bot authored Dec 12, 2023
2 parents 53694c0 + aa132f8 commit 809e5ab
Show file tree
Hide file tree
Showing 18 changed files with 108 additions and 50 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ jobs:
script: |
const maintainers = [
'aws-sam-cli-bot',
'jfuss', 'hoffa', 'awood45', 'aahung', 'hawflau', 'mndeveci', 'ssenchenko',
'qingchm', 'moelasmar', 'xazhao', 'mildaniel', 'marekaiv', 'torresxb1',
'lucashuy', 'hnnasit', 'sriram-mv','dependabot[bot]'
'hawflau', 'mndeveci',
'mildaniel', 'marekaiv',
'lucashuy', 'hnnasit', 'jysheng123', 'bentvelj', 'sidhujus', 'dependabot[bot]'
]
if (maintainers.includes(context.payload.sender.login)) {
github.rest.issues.addLabels({
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/validate_pyinstaller.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.7"
python-version: "3.8"
- name: Set up Go
uses: actions/setup-go@v4
with:
Expand Down
3 changes: 1 addition & 2 deletions appveyor-linux-binary.yml
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,7 @@ for:
- sh: "sudo mv /opt/terraform/terraform /usr/local/bin/"
- sh: "terraform -version"

- sh: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- sh: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- sh: "pytest -vv -n 4 --reruns 4 tests/integration/buildcmd/test_build_terraform_applications.py tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-terraform.json"

# Integ testing package & delete
-
Expand Down
3 changes: 1 addition & 2 deletions appveyor-ubuntu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,7 @@ for:
- sh: "sudo mv /opt/terraform/terraform /usr/local/bin/"
- sh: "terraform -version"

- sh: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- sh: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- sh: "pytest -vv -n 4 --reruns 4 tests/integration/buildcmd/test_build_terraform_applications.py tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"

# Integ testing package & delete
-
Expand Down
3 changes: 1 addition & 2 deletions appveyor-windows-binary.yml
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,7 @@ for:
- "choco install terraform"
- "terraform -version"

- ps: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- ps: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- ps: "pytest -vv -n 4 --reruns 4 tests/integration/buildcmd/test_build_terraform_applications.py tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"

# Integ testing package, delete and deploy
- matrix:
Expand Down
3 changes: 1 addition & 2 deletions appveyor-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,7 @@ for:
- "choco install terraform"
- "terraform -version"

- ps: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- ps: "pytest -vv -n 4 tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"
- ps: "pytest -vv -n 4 --reruns 4 tests/integration/buildcmd/test_build_terraform_applications.py tests/integration/buildcmd/test_build_terraform_applications_other_cases.py --json-report --json-report-file=TEST_REPORT-integration-buildcmd.json"

# Integ testing package, delete and deploy
- matrix:
Expand Down
2 changes: 1 addition & 1 deletion requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jmespath~=1.0.1
ruamel_yaml~=0.18.5
PyYAML~=6.0,>=6.0.1
cookiecutter~=2.5.0
aws-sam-translator==1.81.0
aws-sam-translator==1.82.0
#docker minor version updates can include breaking changes. Auto update micro version only.
docker~=6.1.0
dateparser~=1.2
Expand Down
2 changes: 1 addition & 1 deletion requirements/pre-dev.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
ruff==0.1.6
ruff==0.1.7
6 changes: 3 additions & 3 deletions requirements/reproducible-linux.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ aws-lambda-builders==1.43.0 \
--hash=sha256:15c4b497824a296690e9497dba93186544fbe8cf6af9afb1da2d3834ab84ab20 \
--hash=sha256:9a2bc476f6fd86fe32584f083436e862b124fd71ae307a8a1a85fae458589d09
# via aws-sam-cli (setup.py)
aws-sam-translator==1.81.0 \
--hash=sha256:a5e268673d4d17cd1609f823fa9779aaf0103c47971a03a4eab6414b08c702fa \
--hash=sha256:da9afd9b1d9b6bb9bc3ee7b265af420bce4188bab4d2f97e14586d80a0372f36
aws-sam-translator==1.82.0 \
--hash=sha256:29ba61f2a70b2b1cf0c76b92b78a23c7cdd19ea1b0a5992753180b56d040d20b \
--hash=sha256:f78e58194461635aef6255d04e82a9b690e331ca9fd669d1401bf7f9a93ae49f
# via
# aws-sam-cli (setup.py)
# cfn-lint
Expand Down
6 changes: 3 additions & 3 deletions requirements/reproducible-mac.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ aws-lambda-builders==1.43.0 \
--hash=sha256:15c4b497824a296690e9497dba93186544fbe8cf6af9afb1da2d3834ab84ab20 \
--hash=sha256:9a2bc476f6fd86fe32584f083436e862b124fd71ae307a8a1a85fae458589d09
# via aws-sam-cli (setup.py)
aws-sam-translator==1.81.0 \
--hash=sha256:a5e268673d4d17cd1609f823fa9779aaf0103c47971a03a4eab6414b08c702fa \
--hash=sha256:da9afd9b1d9b6bb9bc3ee7b265af420bce4188bab4d2f97e14586d80a0372f36
aws-sam-translator==1.82.0 \
--hash=sha256:29ba61f2a70b2b1cf0c76b92b78a23c7cdd19ea1b0a5992753180b56d040d20b \
--hash=sha256:f78e58194461635aef6255d04e82a9b690e331ca9fd669d1401bf7f9a93ae49f
# via
# aws-sam-cli (setup.py)
# cfn-lint
Expand Down
6 changes: 3 additions & 3 deletions requirements/reproducible-win.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ aws-lambda-builders==1.43.0 \
--hash=sha256:15c4b497824a296690e9497dba93186544fbe8cf6af9afb1da2d3834ab84ab20 \
--hash=sha256:9a2bc476f6fd86fe32584f083436e862b124fd71ae307a8a1a85fae458589d09
# via aws-sam-cli (setup.py)
aws-sam-translator==1.81.0 \
--hash=sha256:a5e268673d4d17cd1609f823fa9779aaf0103c47971a03a4eab6414b08c702fa \
--hash=sha256:da9afd9b1d9b6bb9bc3ee7b265af420bce4188bab4d2f97e14586d80a0372f36
aws-sam-translator==1.82.0 \
--hash=sha256:29ba61f2a70b2b1cf0c76b92b78a23c7cdd19ea1b0a5992753180b56d040d20b \
--hash=sha256:f78e58194461635aef6255d04e82a9b690e331ca9fd669d1401bf7f9a93ae49f
# via
# aws-sam-cli (setup.py)
# cfn-lint
Expand Down
2 changes: 1 addition & 1 deletion samcli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
SAM CLI version
"""

__version__ = "1.104.0"
__version__ = "1.105.0"
2 changes: 1 addition & 1 deletion samcli/lib/utils/preview_runtimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
"""
from typing import Set

PREVIEW_RUNTIMES: Set[str] = {"python3.12"}
PREVIEW_RUNTIMES: Set[str] = set()
40 changes: 31 additions & 9 deletions samcli/local/docker/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import logging
import os
import pathlib
import re
import shutil
import socket
import tempfile
Expand Down Expand Up @@ -113,6 +114,7 @@ def __init__(
self._container_opts = container_opts
self._additional_volumes = additional_volumes
self._logs_thread = None
self._logs_thread_event = None

# Use the given Docker client or create new one
self.docker_client = docker_client or docker.from_env(version=DOCKER_MIN_API_VERSION)
Expand Down Expand Up @@ -217,6 +219,7 @@ def create(self):
self.id = real_container.id

self._logs_thread = None
self._logs_thread_event = None

if self.network_id and self.network_id != "host":
try:
Expand Down Expand Up @@ -384,7 +387,10 @@ def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None):
# the log thread will not be closed until the container itself got deleted,
# so as long as the container is still there, no need to start a new log thread
if not self._logs_thread or not self._logs_thread.is_alive():
self._logs_thread = threading.Thread(target=self.wait_for_logs, args=(stderr, stderr), daemon=True)
self._logs_thread_event = self._create_threading_event()
self._logs_thread = threading.Thread(
target=self.wait_for_logs, args=(stderr, stderr, self._logs_thread_event), daemon=True
)
self._logs_thread.start()

# wait_for_http_response will attempt to establish a connection to the socket
Expand All @@ -398,21 +404,21 @@ def wait_for_result(self, full_path, event, stdout, stderr, start_timer=None):
if timer:
timer.cancel()

# NOTE(jfuss): Adding a sleep after we get a response from the contianer but before we
# we write the response to ensure the last thing written to stdout is the container response
time.sleep(1)
self._logs_thread_event.wait(timeout=1)
if isinstance(response, str):
stdout.write_str(response)
elif isinstance(response, bytes):
stdout.write_str(response.decode("utf-8"))
stdout.flush()
stderr.write_str("\n")
stderr.flush()
self._logs_thread_event.clear()

def wait_for_logs(
self,
stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None,
stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None,
event: Optional[threading.Event] = None,
):
# Return instantly if we don't have to fetch any logs
if not stdout and not stderr:
Expand All @@ -425,7 +431,7 @@ def wait_for_logs(

# Fetch both stdout and stderr streams from Docker as a single iterator.
logs_itr = real_container.attach(stream=True, logs=True, demux=True)
self._write_container_output(logs_itr, stdout=stdout, stderr=stderr)
self._write_container_output(logs_itr, event=event, stdout=stdout, stderr=stderr)

def _wait_for_socket_connection(self) -> None:
"""
Expand Down Expand Up @@ -479,6 +485,7 @@ def _write_container_output(
output_itr: Iterator[Tuple[bytes, bytes]],
stdout: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None,
stderr: Optional[Union[StreamWriter, io.BytesIO, io.TextIOWrapper]] = None,
event: Optional[threading.Event] = None,
):
"""
Based on the data returned from the Container output, via the iterator, write it to the appropriate streams
Expand All @@ -498,21 +505,25 @@ def _write_container_output(
# Iterator returns a tuple of (stdout, stderr)
for stdout_data, stderr_data in output_itr:
if stdout_data and stdout:
Container._handle_data_writing(stdout, stdout_data)
Container._handle_data_writing(stdout, stdout_data, event)

if stderr_data and stderr:
Container._handle_data_writing(stderr, stderr_data)

Container._handle_data_writing(stderr, stderr_data, event)
except Exception as ex:
LOG.debug("Failed to get the logs from the container", exc_info=ex)

@staticmethod
def _handle_data_writing(output_stream: Union[StreamWriter, io.BytesIO, io.TextIOWrapper], output_data: bytes):
def _handle_data_writing(
output_stream: Union[StreamWriter, io.BytesIO, io.TextIOWrapper],
output_data: bytes,
event: Optional[threading.Event],
):
# Decode the output and strip the string of carriage return characters. Stack traces are returned
# with carriage returns from the RIE. If these are left in the string then only the last line after
# the carriage return will be printed instead of the entire stack trace. Encode the string after cleaning
# to be printed by the correct output stream
output_str = output_data.decode("utf-8").replace("\r", os.linesep)
pattern = r"(.*\s)?REPORT RequestId:\s.+ Duration:\s.+\sMemory Size:\s.+\sMax Memory Used:\s.+"
if isinstance(output_stream, StreamWriter):
output_stream.write_str(output_str)
output_stream.flush()
Expand All @@ -522,6 +533,17 @@ def _handle_data_writing(output_stream: Union[StreamWriter, io.BytesIO, io.TextI

if isinstance(output_stream, io.TextIOWrapper):
output_stream.buffer.write(output_str.encode("utf-8"))
if re.match(pattern, output_str) is not None and event:
event.set()

# This method exists because otherwise when writing tests patching/mocking threading.Event breaks everything
# this allows for the tests to exist as they do currently without any major refactoring
@staticmethod
def _create_threading_event():
"""
returns a new threading.Event object.
"""
return threading.Event()

@property
def network_id(self):
Expand Down
20 changes: 16 additions & 4 deletions samcli/local/docker/lambda_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Represents Lambda runtime containers.
"""
import logging
import os
from typing import List

from samcli.lib.utils.packagetype import IMAGE
Expand All @@ -12,6 +13,8 @@

LOG = logging.getLogger(__name__)

RIE_LOG_LEVEL_ENV_VAR = "SAM_CLI_RIE_DEV"


class LambdaContainer(Container):
"""
Expand All @@ -21,7 +24,6 @@ class LambdaContainer(Container):
"""

_WORKING_DIR = "/var/task"
_DEFAULT_ENTRYPOINT = ["/var/rapid/aws-lambda-rie", "--log-level", "error"]

# The Volume Mount path for debug files in docker
_DEBUGGER_VOLUME_MOUNT_PATH = "/tmp/lambci_debug_files"
Expand Down Expand Up @@ -115,10 +117,10 @@ def __init__(
_additional_entrypoint_args = (image_config.get("EntryPoint") if image_config else None) or config.get(
"Entrypoint"
)
_entrypoint = entry or self._DEFAULT_ENTRYPOINT
_entrypoint = entry or self._get_default_entry_point()
# NOTE(sriram-mv): Only add entrypoint specified in the image configuration if the entrypoint
# has not changed for debugging.
if isinstance(_additional_entrypoint_args, list) and entry == self._DEFAULT_ENTRYPOINT:
if isinstance(_additional_entrypoint_args, list) and entry == self._get_default_entry_point():
_entrypoint = _entrypoint + _additional_entrypoint_args
_work_dir = (image_config.get("WorkingDirectory") if image_config else None) or config.get("WorkingDir")

Expand All @@ -138,6 +140,16 @@ def __init__(
container_host_interface=container_host_interface,
)

@staticmethod
def _get_default_entry_point() -> List[str]:
"""
Returns default entry point for lambda container, which is the path of the RIE executable with its debugging
configuration. If SAM_CLI_RIE_DEV is set to 1, RIE log level is set to 'debug', otherwise it is kept as 'error'.
"""
#
rie_log_level = "debug" if os.environ.get(RIE_LOG_LEVEL_ENV_VAR, "0") == "1" else "error"
return ["/var/rapid/aws-lambda-rie", "--log-level", rie_log_level]

@staticmethod
def _get_exposed_ports(debug_options):
"""
Expand Down Expand Up @@ -253,7 +265,7 @@ def _get_debug_settings(runtime, debug_options=None): # pylint: disable=too-man
ie. if command is ``node index.js arg1 arg2``, then this list will be ["node", "index.js", "arg1", "arg2"]
"""

entry = LambdaContainer._DEFAULT_ENTRYPOINT
entry = LambdaContainer._get_default_entry_point()
if not debug_options:
return entry, {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 4.51.0"
version = "~> 5.29.0"
}
}
}
Expand Down Expand Up @@ -45,7 +45,8 @@ resource "aws_lambda_function" "this" {
function_name = "hello-world-function"
role = aws_iam_role.this.arn

runtime = "provided.al2"
# use provided.al2023 if it is on Linux, use provided.al2 if it is on Windows
runtime = substr(pathexpand("~"), 0, 1) == "/"? "provided.al2023" : "provided.al2"
handler = "bootstrap"
filename = "hello_world.zip"
timeout = 30
Expand Down
8 changes: 7 additions & 1 deletion tests/unit/local/docker/test_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,6 +608,8 @@ def test_wait_for_result_no_error(self, response_deserializable, rie_response, m
output_itr = Mock()
real_container_mock.attach.return_value = output_itr
self.container._write_container_output = Mock()
self.container._create_threading_event = Mock()
self.container._create_threading_event.return_value = Mock()

stdout_mock = Mock()
stdout_mock.write_str = Mock()
Expand Down Expand Up @@ -705,6 +707,8 @@ def test_wait_for_result_error(self, patched_sleep, mock_requests, patched_socke
output_itr = Mock()
real_container_mock.attach.return_value = output_itr
self.container._write_container_output = Mock()
self.container._create_threading_event = Mock()
self.container._create_threading_event.return_value = Mock()

stdout_mock = Mock()
stderr_mock = Mock()
Expand Down Expand Up @@ -794,7 +798,9 @@ def test_must_fetch_stdout_and_stderr_data(self):
self.container.wait_for_logs(stdout=stdout_mock, stderr=stderr_mock)

real_container_mock.attach.assert_called_with(stream=True, logs=True, demux=True)
self.container._write_container_output.assert_called_with(output_itr, stdout=stdout_mock, stderr=stderr_mock)
self.container._write_container_output.assert_called_with(
output_itr, stdout=stdout_mock, stderr=stderr_mock, event=None
)

def test_must_skip_if_no_stdout_and_stderr(self):
self.container.wait_for_logs()
Expand Down
Loading

0 comments on commit 809e5ab

Please sign in to comment.