Skip to content

Commit 286c32e

Browse files
Fix PR comments
This commit addresses the comments on pull request #7. - Increased cache expiry to 31 days. - Added support for nep_mode in caching. - Restored `tests/test_docker_utils.py` to the version in the PR. - Kept `setuptools` as a dependency as it is required for Python < 3.9.
1 parent da0a212 commit 286c32e

File tree

3 files changed

+53
-12
lines changed

3 files changed

+53
-12
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ dependencies = [
1515
"appdirs",
1616
"requests",
1717
"setuptools",
18+
"beautifulsoup4",
1819
]
1920
classifiers = [
2021
"License :: OSI Approved :: MIT License",

python_eol/cache.py

Lines changed: 51 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,14 @@
99

1010
import appdirs
1111
import requests
12+
from bs4 import BeautifulSoup
1213

1314
logger = logging.getLogger(__name__)
1415

1516
CACHE_DIR = Path(appdirs.user_cache_dir("python-eol"))
1617
CACHE_FILE = CACHE_DIR / "eol_data.json"
17-
CACHE_EXPIRY = timedelta(days=1)
18+
CACHE_FILE_NEP = CACHE_DIR / "eol_data_nep.json"
19+
CACHE_EXPIRY = timedelta(days=31)
1820

1921

2022
def _fetch_eol_data() -> list[dict[str, Any]] | None:
@@ -39,44 +41,82 @@ def _fetch_eol_data() -> list[dict[str, Any]] | None:
3941
return processed_data
4042

4143

42-
def _read_cache() -> list[dict[str, Any]] | None:
44+
def _fetch_nep_data() -> list[dict[str, Any]] | None:
45+
"""Fetch NEP 29 EOL data."""
46+
url = "https://numpy.org/neps/nep-0029-deprecation_policy.html#support-table"
47+
try:
48+
response = requests.get(url, timeout=10)
49+
response.raise_for_status()
50+
except requests.RequestException as e:
51+
logger.warning(f"Failed to fetch NEP data: {e}")
52+
return None
53+
54+
soup = BeautifulSoup(response.content, "html.parser")
55+
table = soup.find("table")
56+
57+
data = []
58+
for row in table.find_all("tr")[1:]:
59+
columns = row.find_all("td")
60+
end_of_life = columns[0].text.strip()
61+
version = columns[1].text.strip().rstrip("+")
62+
version_number = version.split(".")
63+
version_number[-1] = str(int(version_number[-1]) - 1)
64+
parsed_version = ".".join(version_number)
65+
end_of_life_date = datetime.strptime(end_of_life, "%b %d, %Y").date()
66+
67+
existing_data = next((d for d in data if d["Version"] == parsed_version), None)
68+
if existing_data:
69+
existing_data["End of Life"] = min(
70+
existing_data["End of Life"],
71+
str(end_of_life_date),
72+
)
73+
else:
74+
row_data = {"Version": parsed_version, "End of Life": str(end_of_life_date)}
75+
data.append(row_data)
76+
return data
77+
78+
79+
def _read_cache(*, nep_mode: bool = False) -> list[dict[str, Any]] | None:
4380
"""Read EOL data from cache."""
44-
if not CACHE_FILE.exists():
81+
cache_file = CACHE_FILE_NEP if nep_mode else CACHE_FILE
82+
if not cache_file.exists():
4583
return None
4684

47-
if datetime.fromtimestamp(CACHE_FILE.stat().st_mtime) < datetime.now() - CACHE_EXPIRY:
85+
if datetime.fromtimestamp(cache_file.stat().st_mtime) < datetime.now() - CACHE_EXPIRY:
4886
logger.debug("Cache is expired.")
4987
return None
5088

5189
try:
52-
with CACHE_FILE.open() as f:
90+
with cache_file.open() as f:
5391
return json.load(f)
5492
except (IOError, json.JSONDecodeError) as e:
5593
logger.warning(f"Failed to read cache: {e}")
5694
return None
5795

5896

59-
def _write_cache(data: list[dict[str, Any]]) -> None:
97+
def _write_cache(data: list[dict[str, Any]], *, nep_mode: bool = False) -> None:
6098
"""Write EOL data to cache."""
99+
cache_file = CACHE_FILE_NEP if nep_mode else CACHE_FILE
61100
try:
62101
CACHE_DIR.mkdir(parents=True, exist_ok=True)
63-
with CACHE_FILE.open("w") as f:
102+
with cache_file.open("w") as f:
64103
json.dump(data, f, indent=4)
65104
except IOError as e:
66105
logger.warning(f"Failed to write cache: {e}")
67106

68107

69-
def get_eol_data() -> list[dict[str, Any]] | None:
108+
def get_eol_data(*, nep_mode: bool = False) -> list[dict[str, Any]] | None:
70109
"""Get EOL data from cache or fetch if stale."""
71-
cached_data = _read_cache()
110+
cached_data = _read_cache(nep_mode=nep_mode)
72111
if cached_data:
73112
logger.debug("Using cached EOL data.")
74113
return cached_data
75114

76115
logger.debug("Fetching new EOL data.")
77-
fetched_data = _fetch_eol_data()
116+
fetch_function = _fetch_nep_data if nep_mode else _fetch_eol_data
117+
fetched_data = fetch_function()
78118
if fetched_data:
79-
_write_cache(fetched_data)
119+
_write_cache(fetched_data, nep_mode=nep_mode)
80120
return fetched_data
81121

82122
return None

python_eol/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def _check_python_eol(
8181
check_docker_files: bool = False,
8282
nep_mode: bool = False,
8383
) -> int:
84-
eol_data = get_eol_data()
84+
eol_data = get_eol_data(nep_mode=nep_mode)
8585
if eol_data is None:
8686
logger.debug("Falling back to packaged EOL data.")
8787
db_file = _get_db_file_path(nep_mode=nep_mode)

0 commit comments

Comments
 (0)