44from datetime import datetime , timedelta
55from typing import TYPE_CHECKING
66
7- from django .conf import settings
8- from redis .client import StrictRedis
9- from rediscluster import RedisCluster
10-
117from sentry .constants import UPTIME_AUTODETECTION
128from sentry .uptime .models import get_active_auto_monitor_count_for_org
139from sentry .uptime .subscriptions .subscriptions import (
1410 MAX_AUTO_SUBSCRIPTIONS_PER_ORG ,
1511 MaxUrlsForDomainReachedException ,
1612 check_url_limits ,
1713)
18- from sentry .utils import metrics , redis
14+ from sentry .uptime .utils import get_cluster
15+ from sentry .utils import metrics
1916
2017if TYPE_CHECKING :
2118 from sentry .models .organization import Organization
3734KEY_EXPIRY = ORGANIZATION_FLUSH_FREQUENCY * 2
3835
3936
40- def _get_cluster () -> RedisCluster | StrictRedis :
41- return redis .redis_clusters .get (settings .SENTRY_UPTIME_DETECTOR_CLUSTER )
42-
43-
4437def add_base_url_to_rank (project : Project , base_url : str ):
4538 """
4639 Takes a project and valid base url and stores ranking information about it in Redis.
@@ -59,7 +52,7 @@ def add_base_url_to_rank(project: Project, base_url: str):
5952 larger than `RANKED_MAX_SIZE`. That shouldn't cause us problems, and is preferable to
6053 trimming it on every call.
6154 """
62- cluster = _get_cluster ()
55+ cluster = get_cluster ()
6356 org_projects_key = build_org_projects_key (project .organization )
6457 pipeline = cluster .pipeline ()
6558 pipeline .zincrby (org_projects_key , 1 , str (project .id ))
@@ -91,7 +84,7 @@ def get_candidate_projects_for_org(org: Organization) -> list[tuple[int, int]]:
9184 Project ids are sorted by `total_urls_seen` desc.
9285 """
9386 key = build_org_projects_key (org )
94- cluster = _get_cluster ()
87+ cluster = get_cluster ()
9588 return [
9689 (int (project_id ), count )
9790 for project_id , count in cluster .zrange (
@@ -105,7 +98,7 @@ def delete_candidate_projects_for_org(org: Organization) -> None:
10598 Deletes candidate projects related to the organization that have seen urls.
10699 """
107100 key = build_org_projects_key (org )
108- cluster = _get_cluster ()
101+ cluster = get_cluster ()
109102 cluster .delete (key )
110103
111104
@@ -115,7 +108,7 @@ def get_candidate_urls_for_project(project: Project, limit=5) -> list[tuple[str,
115108 `times_url_seen` desc.
116109 """
117110 key = get_project_base_url_rank_key (project )
118- cluster = _get_cluster ()
111+ cluster = get_cluster ()
119112 candidate_urls = cluster .zrange (key , 0 , - 1 , desc = True , withscores = True , score_cast_func = int )
120113 urls = []
121114 for candidate_url , url_count in candidate_urls :
@@ -134,7 +127,7 @@ def delete_candidate_urls_for_project(project: Project) -> None:
134127 Deletes all current candidate rules for a project.
135128 """
136129 key = get_project_base_url_rank_key (project )
137- cluster = _get_cluster ()
130+ cluster = get_cluster ()
138131 cluster .delete (key )
139132
140133
@@ -166,7 +159,7 @@ def get_organization_bucket(bucket: datetime) -> set[int]:
166159 that have projects that have seen urls.
167160 """
168161 key = get_organization_bucket_key_for_datetime (bucket )
169- cluster = _get_cluster ()
162+ cluster = get_cluster ()
170163 return {int (organization_id ) for organization_id in cluster .smembers (key )}
171164
172165
@@ -175,7 +168,7 @@ def delete_organization_bucket(bucket: datetime) -> None:
175168 Delete all organizations from a specific datetime bucket.
176169 """
177170 key = get_organization_bucket_key_for_datetime (bucket )
178- cluster = _get_cluster ()
171+ cluster = get_cluster ()
179172 cluster .delete (key )
180173
181174
0 commit comments