Files

144 lines
3.9 KiB
Python
Executable File

"""
Simple Python script to check URLs
"""
import time
import logging
import argparse
from typing import Optional, Dict, Any
import requests
# -------------------------------------------------------------------
# Web request with retries
# -------------------------------------------------------------------
def fetch_url(
url: str,
timeout: int = 5,
max_retries: int = 3,
) -> Dict[str, Any]:
"""
Fetch a URL with retry logic and logging.
Returns:
dict with:
- success (bool)
- status_code (int | None)
- response_text (str | None)
- error (str | None)
- attempts (int)
"""
delays = [0, 3, 6] # first try immediately, then 3s, then 6s
attempts = 0
last_exception: Optional[str] = None
for attempt in range(max_retries):
attempts += 1
if delays[attempt] > 0:
time.sleep(delays[attempt])
try:
logging.info("Attempt %s - Requesting %s", attempts, url)
response = requests.get(url, timeout=timeout)
# Raise exception for HTTP 4xx / 5xx
response.raise_for_status()
# Catch time between sending the request and receiving the response headers
elapsed_ms = response.elapsed.total_seconds() * 1000
logging.info(
"Attempt %s - SUCCESS (status_code=%s, time=%.2f ms)",
attempts,
response.status_code,
elapsed_ms,
)
return {
"success": True,
"status_code": response.status_code,
"response_text": response.text,
"response_time_ms": elapsed_ms,
"error": None,
"attempts": attempts,
}
except requests.exceptions.Timeout:
last_exception = "Timeout"
logging.warning("Attempt %s - TIMEOUT", attempts)
except requests.exceptions.HTTPError as e:
last_exception = "HTTP error: %s", e.response.status_code
logging.warning(
"Attempt %s - HTTP ERROR (status_code=%s)",
attempts,
e.response.status_code,
)
except requests.exceptions.RequestException as e:
last_exception = str(e)
logging.error("Attempt %s - NETWORK ERROR: %s", attempts, e)
# All retries failed
logging.error("FAILED after %s attempts - Last error: %s", attempts, last_exception)
return {
"success": False,
"status_code": None,
"response_text": None,
"response_time_ms": None,
"error": last_exception,
"attempts": attempts,
}
# -------------------------------------------------------------------
# CLI handling
# -------------------------------------------------------------------
def main() -> None:
"""This add url as an argument and add timeout with a default value."""
parser = argparse.ArgumentParser(
description="Make a web request with retries and logging"
)
parser.add_argument(
"url",
help="URL to request (e.g. https://example.com)",
)
parser.add_argument(
"--timeout",
type=int,
default=5,
help="Request timeout in seconds (default: 5)",
)
parser.add_argument(
"--debug",
action="store_true",
help="Enable debug output",
)
args = parser.parse_args()
# Logging setup
logging.basicConfig(
filename="simple-requests.log",
level=logging.DEBUG if args.debug else logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
)
result = fetch_url(
url=args.url,
timeout=args.timeout,
)
logging.debug("Result: %s", result)
# -------------------------------------------------------------------
# Entrypoint
# -------------------------------------------------------------------
if __name__ == "__main__":
main()