diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index 2a23516f..1b675507 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -190,17 +190,17 @@ def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response: self.stats.add_rate_limit_error(attempt) except Exception as e: - logger.debug('Request threw exception', exc_info=e) + logger.warning('Request threw exception', exc_info=e) if not is_retryable_error(e): - logger.debug('Exception is not retryable', exc_info=e) + logger.warning('Exception is not retryable', exc_info=e) stop_retrying() raise # We want to retry only requests which are server errors (status >= 500) and could resolve on their own, # and also retry rate limited requests that throw 429 Too Many Requests errors - logger.debug('Request unsuccessful', extra={'status_code': response.status_code}) + logger.warning('Request unsuccessful', extra={'status_code': response.status_code}) if response.status_code < 500 and response.status_code != HTTPStatus.TOO_MANY_REQUESTS: # noqa: PLR2004 - logger.debug('Status code is not retryable', extra={'status_code': response.status_code}) + logger.warning('Status code is not retryable', extra={'status_code': response.status_code}) stop_retrying() raise ApifyApiError(response, attempt) @@ -269,17 +269,17 @@ async def _make_request(stop_retrying: Callable, attempt: int) -> httpx.Response self.stats.add_rate_limit_error(attempt) except Exception as e: - logger.debug('Request threw exception', exc_info=e) + logger.warning('Request threw exception', exc_info=e) if not is_retryable_error(e): - logger.debug('Exception is not retryable', exc_info=e) + logger.warning('Exception is not retryable', exc_info=e) stop_retrying() raise # We want to retry only requests which are server errors (status >= 500) and could resolve on their own, # and also retry rate limited requests that throw 429 Too Many Requests errors - logger.debug('Request unsuccessful', extra={'status_code': response.status_code}) + logger.warning('Request unsuccessful', extra={'status_code': response.status_code}) if response.status_code < 500 and response.status_code != HTTPStatus.TOO_MANY_REQUESTS: # noqa: PLR2004 - logger.debug('Status code is not retryable', extra={'status_code': response.status_code}) + logger.warning('Status code is not retryable', extra={'status_code': response.status_code}) stop_retrying() raise ApifyApiError(response, attempt) diff --git a/src/apify_client/_utils.py b/src/apify_client/_utils.py index 0bbc04e8..fa427e2b 100644 --- a/src/apify_client/_utils.py +++ b/src/apify_client/_utils.py @@ -3,13 +3,15 @@ import asyncio import base64 import json +import logging import random import time from http import HTTPStatus from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast from apify_shared.utils import is_file_or_bytes, maybe_extract_enum_member_value - +from apify_client._logging import log_context, logger_name +logger = logging.getLogger(logger_name) if TYPE_CHECKING: from collections.abc import Awaitable @@ -73,6 +75,7 @@ def stop_retrying() -> None: backoff_exp_factor = backoff_factor ** (attempt - 1) sleep_time_secs = random_sleep_factor * backoff_base_secs * backoff_exp_factor + logger.warning(f'Backoff sleep {sleep_time_secs}. for attempt {attempt}') time.sleep(sleep_time_secs) return func(stop_retrying, max_retries + 1) @@ -96,7 +99,11 @@ def stop_retrying() -> None: for attempt in range(1, max_retries + 1): try: - return await async_func(stop_retrying, attempt) + response = await async_func(stop_retrying, attempt) + if attempt > 1 : + logger.warning(response) + logger.warning(response.text) + return response except Exception: if not swallow: raise @@ -106,6 +113,7 @@ def stop_retrying() -> None: backoff_exp_factor = backoff_factor ** (attempt - 1) sleep_time_secs = random_sleep_factor * backoff_base_secs * backoff_exp_factor + logger.warning(f'Backoff sleep {sleep_time_secs}. for attempt {attempt}. Max retries: {max_retries}') await asyncio.sleep(sleep_time_secs) return await async_func(stop_retrying, max_retries + 1)