diff --git a/app/calculator.py b/app/calculator.py index 1e2af2a..41ebcd4 100644 --- a/app/calculator.py +++ b/app/calculator.py @@ -19,4 +19,13 @@ def multiplyBy6(x, y): return x * y * 66 def multiplyBy62(x, y): + return x * y * 12412 + 213 + + def multiplyBy622(x, y): return x * y * 12412 + + + def multiplyBy623(x, y): + return x * y * 12412 + print("delete") + diff --git a/app/longFileOne.py b/app/longFileOne.py new file mode 100644 index 0000000..9faa329 --- /dev/null +++ b/app/longFileOne.py @@ -0,0 +1,130 @@ +import os, sys, re, math, time, random, threading, multiprocessing, asyncio +import sqlite3, json, requests +import pandas as pd +import numpy as np +import matplotlib.pyplot as plt +from functools import wraps +from argparse import ArgumentParser + +DB_FILE = "data.db" + +def log_time(func): + @wraps(func) + def wrapper(*args, **kwargs): + start = time.time() + result = func(*args, **kwargs) + print(f"{func.__name__} executed in {time.time() - start:.2f}s") + return result + return wrapper + +class DataProcessor: + def __init__(self, db_path): + self.db_path = db_path + self.conn = sqlite3.connect(db_path) + self.create_table() + + def create_table(self): + c = self.conn.cursor() + c.execute('''CREATE TABLE IF NOT EXISTS entries (id INTEGER PRIMARY KEY, text TEXT, value REAL)''') + self.conn.commit() + + def insert_entry(self, text, value): + c = self.conn.cursor() + c.execute('INSERT INTO entries (text, value) VALUES (?, ?)', (text, value)) + self.conn.commit() + + def query_entries(self): + c = self.conn.cursor() + return c.execute('SELECT * FROM entries').fetchall() + + def close(self): + self.conn.close() + +@log_time +def simulate_web_fetch(): + r = requests.get("https://httpbin.org/get") + return r.json() + +def regex_filter(lines, pattern): + return [line for line in lines if re.search(pattern, line)] + +def generate_data(n=1000): + return pd.DataFrame({ + "x": np.linspace(0, 10, n), + "y": np.sin(np.linspace(0, 10, n)) + np.random.normal(0, 0.1, n) + }) + +@log_time +def plot_data(df): + plt.plot(df['x'], df['y']) + plt.title("Noisy Sine Wave") + plt.savefig("plot.png") + plt.close() + +async def async_fetch(url): + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, requests.get, url) + +@log_time +def run_async_tasks(): + urls = [f"https://httpbin.org/delay/{i%3}" for i in range(5)] + results = asyncio.run(asyncio.gather(*[async_fetch(url) for url in urls])) + return [r.status_code for r in results] + +def thread_task(n): + print(f"Thread {n} sleeping...") + time.sleep(random.uniform(0.5, 2.0)) + print(f"Thread {n} done.") + +def run_threads(): + threads = [threading.Thread(target=thread_task, args=(i,)) for i in range(5)] + [t.start() for t in threads] + [t.join() for t in threads] + +def process_task(x): + return math.sqrt(x ** 2 + 1) + +def run_processes(): + with multiprocessing.Pool(4) as pool: + return pool.map(process_task, range(10000)) + +def write_large_file(filename, n=100000): + with open(filename, "w") as f: + for i in range(n): + f.write(f"{i},value={random.random()}\n") + +def read_and_process_file(filename): + with open(filename) as f: + return sum(1 for line in f if float(line.split('=')[1]) > 0.5) + +def main(): + parser = ArgumentParser() + parser.add_argument("--file", type=str, default="large.txt") + parser.add_argument("--insert", action="store_true") + args = parser.parse_args() + + write_large_file(args.file) + print("File written.") + count = read_and_process_file(args.file) + print(f"Lines with value > 0.5: {count}") + + dp = DataProcessor(DB_FILE) + if args.insert: + for _ in range(100): + dp.insert_entry("sample", random.random()) + entries = dp.query_entries() + print(f"Loaded {len(entries)} DB entries.") + dp.close() + + data = generate_data() + plot_data(data) + print("Plot saved.") + run_threads() + print("Threading done.") + result = run_processes() + print(f"Process result sample: {result[:5]}") + print("Async fetch status codes:", run_async_tasks()) + print("Web fetch sample:", simulate_web_fetch()) + +if __name__ == "__main__": + main() diff --git a/app/longFileTwo.py b/app/longFileTwo.py new file mode 100644 index 0000000..5b8feca --- /dev/null +++ b/app/longFileTwo.py @@ -0,0 +1,128 @@ +import random, time, json, math, logging +from typing import List, Dict +from functools import lru_cache + +logging.basicConfig(level=logging.INFO, format="%(message)s") +logger = logging.getLogger("GalacticSim") + +class ResourceNotFound(Exception): pass + +class Resource: + def __init__(self, name: str, base_price: float): + self.name = name + self.base_price = base_price + + def price_fluctuation(self): + return round(self.base_price * random.uniform(0.8, 1.2), 2) + +class Planet: + def __init__(self, name: str, richness: float, population: int): + self.name = name + self.richness = richness + self.population = population + self.resources: Dict[str, float] = {} + self._generate_resources() + + def _generate_resources(self): + for res in ["metal", "gas", "spice", "water"]: + self.resources[res] = max(0.1, self.richness * random.uniform(0.5, 2)) + + def consume(self, resource: str, amount: float): + if self.resources.get(resource, 0) < amount: + raise ResourceNotFound(f"{self.name} lacks {resource}") + self.resources[resource] -= amount + + def produce(self, resource: str, amount: float): + self.resources[resource] = self.resources.get(resource, 0) + amount + + def __repr__(self): + return f"" + +class TradeRoute: + def __init__(self, source: Planet, target: Planet, resource: str): + self.source = source + self.target = target + self.resource = resource + self.distance = self.compute_distance() + + def compute_distance(self): + return random.uniform(1.0, 100.0) + + def transfer(self): + try: + amt = min(5.0, self.source.resources.get(self.resource, 0)) + if amt <= 0: + return 0 + self.source.consume(self.resource, amt) + self.target.produce(self.resource, amt) + logger.info(f"{amt} {self.resource} transferred {self.source.name} -> {self.target.name}") + return amt + except ResourceNotFound as e: + logger.warning(str(e)) + return 0 + +class Galaxy: + def __init__(self, n=5): + self.planets = [Planet(f"Planet-{i}", random.uniform(0.5, 2.0), random.randint(1000, 1000000)) for i in range(n)] + self.routes: List[TradeRoute] = [] + self.resources = [Resource("metal", 10), Resource("gas", 20), Resource("spice", 100), Resource("water", 5)] + self._generate_routes() + + def _generate_routes(self): + for _ in range(10): + p1, p2 = random.sample(self.planets, 2) + r = random.choice(["metal", "gas", "spice", "water"]) + self.routes.append(TradeRoute(p1, p2, r)) + + def tick(self): + logger.info("=== GALAXY TICK ===") + for r in self.routes: + r.transfer() + self._update_prices() + self._simulate_population() + + @lru_cache(maxsize=16) + def get_resource_price(self, name: str): + for r in self.resources: + if r.name == name: + return r.price_fluctuation() + raise ResourceNotFound(name) + + def _update_prices(self): + for r in self.resources: + price = r.price_fluctuation() + logger.info(f"Market update: {r.name} = {price} credits") + + def _simulate_population(self): + for p in self.planets: + growth = int(p.population * random.uniform(-0.01, 0.02)) + p.population += growth + logger.info(f"{p.name} population change: {growth:+}") + + def save_state(self, file="galaxy.json"): + state = { + "planets": [{ + "name": p.name, + "population": p.population, + "richness": p.richness, + "resources": p.resources + } for p in self.planets] + } + with open(file, "w") as f: + json.dump(state, f, indent=2) + + def load_state(self, file="galaxy.json"): + with open(file) as f: + state = json.load(f) + self.planets = [] + for pd in state["planets"]: + p = Planet(pd["name"], pd["richness"], pd["population"]) + p.resources = pd["resources"] + self.planets.append(p) + +if __name__ == "__main__": + g = Galaxy(n=8) + for _ in range(5): + g.tick() + time.sleep(1) + g.save_state() diff --git a/sample_app/tests/__init__.py b/sample_app/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sample_app/tests/cassetes/test_ai_pr_review/test_perform_duplicate_review.yaml b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_duplicate_review.yaml new file mode 100644 index 0000000..1663c43 --- /dev/null +++ b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_duplicate_review.yaml @@ -0,0 +1,85 @@ +interactions: +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40 + response: + content: '{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","id":1609199716,"node_id":"PR_kwDOHO5Jtc5f6nBk","html_url":"https://github.com/scott-codecov/codecov-test/pull/40","diff_url":"https://github.com/scott-codecov/codecov-test/pull/40.diff","patch_url":"https://github.com/scott-codecov/codecov-test/pull/40.patch","issue_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40","number":40,"state":"open","locked":false,"title":"Test + AI PR review","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":null,"created_at":"2023-11-20T14:17:17Z","updated_at":"2023-11-20T14:53:55Z","closed_at":null,"merged_at":null,"merge_commit_sha":"388cc84b5f6a167db13df1f139f4305e32f9f1eb","assignee":null,"assignees":[],"requested_reviewers":[],"requested_teams":[],"labels":[],"milestone":null,"draft":false,"commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits","review_comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments","review_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/b607bb0e17e1b8d8699272a26e32986a933f9946","head":{"label":"scott-codecov:scott-codecov-patch-3","ref":"scott-codecov-patch-3","sha":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:17:18Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"base":{"label":"scott-codecov:master","ref":"master","sha":"ece177a1e98a568a5428751b21e9c2530ab16927","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:17:18Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40"},"issue":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40"},"comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments"},"review_comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments"},"review_comment":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}"},"commits":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits"},"statuses":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/b607bb0e17e1b8d8699272a26e32986a933f9946"}},"author_association":"OWNER","auto_merge":null,"active_lock_reason":null,"merged":false,"mergeable":true,"rebaseable":true,"mergeable_state":"unstable","merged_by":null,"comments":0,"review_comments":16,"maintainer_can_modify":false,"commits":1,"additions":2,"deletions":11,"changed_files":1}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:53:56 GMT + ETag: + - W/"e23f5a9033ae5e1901f888c916718b425573074a64aebd9178c14b0f2f106cc4" + Last-Modified: + - Mon, 20 Nov 2023 14:53:55 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA13:7260:467D957:9373AB6:655B7304 + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4988' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '12' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read; contents=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/sample_app/tests/cassetes/test_ai_pr_review/test_perform_initial_review.yaml b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_initial_review.yaml new file mode 100644 index 0000000..b7b618a --- /dev/null +++ b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_initial_review.yaml @@ -0,0 +1,423 @@ +interactions: +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40 + response: + content: '{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","id":1609199716,"node_id":"PR_kwDOHO5Jtc5f6nBk","html_url":"https://github.com/scott-codecov/codecov-test/pull/40","diff_url":"https://github.com/scott-codecov/codecov-test/pull/40.diff","patch_url":"https://github.com/scott-codecov/codecov-test/pull/40.patch","issue_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40","number":40,"state":"open","locked":false,"title":"Test + AI PR review","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":null,"created_at":"2023-11-20T14:17:17Z","updated_at":"2023-11-20T14:53:19Z","closed_at":null,"merged_at":null,"merge_commit_sha":"388cc84b5f6a167db13df1f139f4305e32f9f1eb","assignee":null,"assignees":[],"requested_reviewers":[],"requested_teams":[],"labels":[],"milestone":null,"draft":false,"commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits","review_comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments","review_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/b607bb0e17e1b8d8699272a26e32986a933f9946","head":{"label":"scott-codecov:scott-codecov-patch-3","ref":"scott-codecov-patch-3","sha":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:17:18Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"base":{"label":"scott-codecov:master","ref":"master","sha":"ece177a1e98a568a5428751b21e9c2530ab16927","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:17:18Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40"},"issue":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40"},"comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments"},"review_comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments"},"review_comment":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}"},"commits":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits"},"statuses":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/b607bb0e17e1b8d8699272a26e32986a933f9946"}},"author_association":"OWNER","auto_merge":null,"active_lock_reason":null,"merged":false,"mergeable":true,"rebaseable":true,"mergeable_state":"unstable","merged_by":null,"comments":0,"review_comments":11,"maintainer_can_modify":false,"commits":1,"additions":2,"deletions":11,"changed_files":1}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:53:24 GMT + ETag: + - W/"74fefe96f3867526ebe69e6a7c81a7fb7a083dffa1d84305d52b6d847c07f9ae" + Last-Modified: + - Mon, 20 Nov 2023 14:53:19 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA0B:6556:2F15AFC:62B3653:655B72E4 + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4991' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '9' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read; contents=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '' + headers: + accept: + - application/vnd.github.v3.diff + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40 + response: + content: "diff --git a/main/foo.py b/main/foo.py\nindex 9d285a4..41d8fd2 100644\n--- + a/main/foo.py\n+++ b/main/foo.py\n@@ -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, + y):\n return x / y\n \n-def add5(x, y):\n- return x + y\n-\n-def sub5(x, + y):\n- return x - y\n-\n-def mul5(x, y):\n- return x * y\n-\n-def div5(x, + y):\n- return x / y\n+def testing(x, y):\n+ return x % y\n" + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '361' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/vnd.github.v3.diff; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:53:24 GMT + ETag: + - '"668db0d87533fcf3c5093eb6ece232b132b2186efa5bb068a5b1616c422cd009"' + Last-Modified: + - Mon, 20 Nov 2023 14:53:19 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; param=diff + X-GitHub-Request-Id: + - FA0C:1ABD:41426D6:88D917B:655B72E4 + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4990' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '10' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read; contents=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"messages": [{"role": "user", "content": "\n Your purpose is to + act as a highly experienced software engineer and provide a thorough\n review + of code changes and suggest improvements. Do not comment on minor style issues,\n missing + comments or documentation. Identify and resolve significant concerns to improve\n overall + code quality.\n\n You will receive a Git diff where each line has been + prefixed with a unique identifer in\n square brackets. When referencing + lines in this diff use that identifier.\n\n Format your output as JSON + such that there is 1 top-level comment that summarizes your review\n and + multiple additional comments addressing specific lines in the code with the + changes you\n deem appropriate.\n\n The output should have this + JSON form:\n\n {\n \"body\": \"This is the summary comment\",\n \"comments\": + [\n {\n \"line_id\": 123,\n \"body\": + \"This is a comment about the code with line ID 123\",\n }\n ]\n }\n\n Limit + the number of comments to 10 at most.\n\n Here is the Git diff on which + you should base your review:\n\n [1] diff --git a/main/foo.py b/main/foo.py\n[2] + index 9d285a4..41d8fd2 100644\n[3] --- a/main/foo.py\n[4] +++ b/main/foo.py\n[5] + @@ -54,14 +54,5 @@ def mul4(x, y):\n[6] def div4(x, y):\n[7] return x + / y\n[8] \n[9] -def add5(x, y):\n[10] - return x + y\n[11] -\n[12] -def + sub5(x, y):\n[13] - return x - y\n[14] -\n[15] -def mul5(x, y):\n[16] - return + x * y\n[17] -\n[18] -def div5(x, y):\n[19] - return x / y\n[20] +def testing(x, + y):\n[21] + return x % y\n[22] \n "}], "model": "gpt-4"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '1768' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - AsyncOpenAI/Python 1.2.4 + x-stainless-arch: + - arm64 + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.2.4 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.8 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + content: "{\n \"id\": \"chatcmpl-8MzwL6ONjJUzs3j8bTnWnwrwSsIze\",\n \"object\": + \"chat.completion\",\n \"created\": 1700492005,\n \"model\": \"gpt-4-0613\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"{\\n \\\"body\\\": \\\"The code quality + seems decent overall, but there are some significant concerns. Specifically, + the removal of the add5, sub5, mul5 and div5 functions and replacing them with + a testing function. Unless there is a specific reason for removing these functions + they should be kept as they may be used elsewhere in the codebase which would + lead to a lack of functionality and potential runtime exceptions. Also, the + 'testing' function is not appropriately named for what it does, it seems to + return the modulus of two numbers which isn't related to testing.\\\",\\n \\\"comments\\\": + [\\n {\\n \\\"line_id\\\": 9,\\n \\\"body\\\": + \\\"It looks like you've removed the add5 method. If this is being used elsewhere + in the codebase then its removal could cause issues. Please ensure that this + method isn't being used elsewhere before removal.\\\"\\n },\\n {\\n + \ \\\"line_id\\\": 12,\\n \\\"body\\\": \\\"You've removed + the sub5 method. Like with the add5 method, make sure that it's not being used + in other places in the codebase which could cause runtime issues.\\\"\\n },\\n + \ {\\n \\\"line_id\\\": 15,\\n \\\"body\\\": \\\"The + mul5 method has been removed. If it is used elsewhere, this could cause potential + problems. Validate it before deleting.\\\"\\n },\\n {\\n \\\"line_id\\\": + 18,\\n \\\"body\\\": \\\"You've also removed div5, again ensure it's + not being used anywhere else to prevent bugs and exceptions.\\\"\\n },\\n + \ {\\n \\\"line_id\\\": 20,\\n \\\"body\\\": \\\"This + new 'testing' function's name is not descriptive of its functionality. It looks + like it's performing a modulus operation, not testing. You should name this + function appropriately.\\\"\\n }\\n ]\\n}\"\n },\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 444,\n \"completion_tokens\": + 367,\n \"total_tokens\": 811\n }\n}\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 829185b7fb102a90-ORD + Cache-Control: + - no-cache, must-revalidate + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 20 Nov 2023 14:53:53 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=p_3yp63VzSuwNk5.CA3kl8plTqNo8Hsb5QI1YsAr49w-1700492033-0-Ad4nyXhgxgh+XWx3krcrQYQDz60XLTGB/pE0rpbMGIfUx6RqaXdois1+sqtQE3hId9RlIm5JbkS6pRJvSBgzNzg=; + path=/; expires=Mon, 20-Nov-23 15:23:53 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=W0ReQkot08PZyftr5njoOXMmrUcFDwE6ERJG0iJwwjk-1700492033848-0-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + openai-model: + - gpt-4-0613 + openai-organization: + - functional-software + openai-processing-ms: + - '28368' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=15724800; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '300000' + x-ratelimit-limit-tokens_usage_based: + - '300000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '299573' + x-ratelimit-remaining-tokens_usage_based: + - '299573' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 85ms + x-ratelimit-reset-tokens_usage_based: + - 85ms + x-request-id: + - b4e74fa8a6f29812879968b0cc0b693a + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"commit_id": "b607bb0e17e1b8d8699272a26e32986a933f9946", "body": "The + code quality seems decent overall, but there are some significant concerns. + Specifically, the removal of the add5, sub5, mul5 and div5 functions and replacing + them with a testing function. Unless there is a specific reason for removing + these functions they should be kept as they may be used elsewhere in the codebase + which would lead to a lack of functionality and potential runtime exceptions. + Also, the ''testing'' function is not appropriately named for what it does, + it seems to return the modulus of two numbers which isn''t related to testing.", + "event": "COMMENT", "comments": [{"path": "main/foo.py", "position": 4, "body": + "It looks like you''ve removed the add5 method. If this is being used elsewhere + in the codebase then its removal could cause issues. Please ensure that this + method isn''t being used elsewhere before removal."}, {"path": "main/foo.py", + "position": 7, "body": "You''ve removed the sub5 method. Like with the add5 + method, make sure that it''s not being used in other places in the codebase + which could cause runtime issues."}, {"path": "main/foo.py", "position": 10, + "body": "The mul5 method has been removed. If it is used elsewhere, this could + cause potential problems. Validate it before deleting."}, {"path": "main/foo.py", + "position": 13, "body": "You''ve also removed div5, again ensure it''s not being + used anywhere else to prevent bugs and exceptions."}, {"path": "main/foo.py", + "position": 15, "body": "This new ''testing'' function''s name is not descriptive + of its functionality. It looks like it''s performing a modulus operation, not + testing. You should name this function appropriately."}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '1692' + content-type: + - application/json + host: + - api.github.com + user-agent: + - Default + method: POST + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/reviews + response: + content: '{"id":1740008775,"node_id":"PRR_kwDOHO5Jtc5ntm1H","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?u=1ea5f79283a26325f56e7cfa9eaca5cff3d538a4&v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + code quality seems decent overall, but there are some significant concerns. + Specifically, the removal of the add5, sub5, mul5 and div5 functions and replacing + them with a testing function. Unless there is a specific reason for removing + these functions they should be kept as they may be used elsewhere in the codebase + which would lead to a lack of functionality and potential runtime exceptions. + Also, the ''testing'' function is not appropriately named for what it does, + it seems to return the modulus of two numbers which isn''t related to testing.","state":"COMMENTED","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#pullrequestreview-1740008775","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#pullrequestreview-1740008775"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"submitted_at":"2023-11-20T14:53:55Z","commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:53:55 GMT + ETag: + - W/"5397475b11e593c33b946a2bf17635c49034f50df65f994892c841de275b3381" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA12:0A0F:26C7F07:517E47E:655B7302 + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4989' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '11' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=write + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/sample_app/tests/cassetes/test_ai_pr_review/test_perform_new_commit.yaml b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_new_commit.yaml new file mode 100644 index 0000000..8c3a7ae --- /dev/null +++ b/sample_app/tests/cassetes/test_ai_pr_review/test_perform_new_commit.yaml @@ -0,0 +1,835 @@ +interactions: +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40 + response: + content: '{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","id":1609199716,"node_id":"PR_kwDOHO5Jtc5f6nBk","html_url":"https://github.com/scott-codecov/codecov-test/pull/40","diff_url":"https://github.com/scott-codecov/codecov-test/pull/40.diff","patch_url":"https://github.com/scott-codecov/codecov-test/pull/40.patch","issue_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40","number":40,"state":"open","locked":false,"title":"Test + AI PR review","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":null,"created_at":"2023-11-20T14:17:17Z","updated_at":"2023-11-20T14:56:16Z","closed_at":null,"merged_at":null,"merge_commit_sha":"30103e093dfe0875dcad2b93eb2f2488ece393ea","assignee":null,"assignees":[],"requested_reviewers":[],"requested_teams":[],"labels":[],"milestone":null,"draft":false,"commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits","review_comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments","review_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/5c64a5143951193dde7b14c14611eebe1025f862","head":{"label":"scott-codecov:scott-codecov-patch-3","ref":"scott-codecov-patch-3","sha":"5c64a5143951193dde7b14c14611eebe1025f862","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:56:17Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"base":{"label":"scott-codecov:master","ref":"master","sha":"ece177a1e98a568a5428751b21e9c2530ab16927","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"repo":{"id":485378485,"node_id":"R_kgDOHO5JtQ","name":"codecov-test","full_name":"scott-codecov/codecov-test","private":true,"owner":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"html_url":"https://github.com/scott-codecov/codecov-test","description":null,"fork":false,"url":"https://api.github.com/repos/scott-codecov/codecov-test","forks_url":"https://api.github.com/repos/scott-codecov/codecov-test/forks","keys_url":"https://api.github.com/repos/scott-codecov/codecov-test/keys{/key_id}","collaborators_url":"https://api.github.com/repos/scott-codecov/codecov-test/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/scott-codecov/codecov-test/teams","hooks_url":"https://api.github.com/repos/scott-codecov/codecov-test/hooks","issue_events_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/events{/number}","events_url":"https://api.github.com/repos/scott-codecov/codecov-test/events","assignees_url":"https://api.github.com/repos/scott-codecov/codecov-test/assignees{/user}","branches_url":"https://api.github.com/repos/scott-codecov/codecov-test/branches{/branch}","tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/tags","blobs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/refs{/sha}","trees_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/trees{/sha}","statuses_url":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/{sha}","languages_url":"https://api.github.com/repos/scott-codecov/codecov-test/languages","stargazers_url":"https://api.github.com/repos/scott-codecov/codecov-test/stargazers","contributors_url":"https://api.github.com/repos/scott-codecov/codecov-test/contributors","subscribers_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscribers","subscription_url":"https://api.github.com/repos/scott-codecov/codecov-test/subscription","commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/commits{/sha}","git_commits_url":"https://api.github.com/repos/scott-codecov/codecov-test/git/commits{/sha}","comments_url":"https://api.github.com/repos/scott-codecov/codecov-test/comments{/number}","issue_comment_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues/comments{/number}","contents_url":"https://api.github.com/repos/scott-codecov/codecov-test/contents/{+path}","compare_url":"https://api.github.com/repos/scott-codecov/codecov-test/compare/{base}...{head}","merges_url":"https://api.github.com/repos/scott-codecov/codecov-test/merges","archive_url":"https://api.github.com/repos/scott-codecov/codecov-test/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/scott-codecov/codecov-test/downloads","issues_url":"https://api.github.com/repos/scott-codecov/codecov-test/issues{/number}","pulls_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls{/number}","milestones_url":"https://api.github.com/repos/scott-codecov/codecov-test/milestones{/number}","notifications_url":"https://api.github.com/repos/scott-codecov/codecov-test/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/scott-codecov/codecov-test/labels{/name}","releases_url":"https://api.github.com/repos/scott-codecov/codecov-test/releases{/id}","deployments_url":"https://api.github.com/repos/scott-codecov/codecov-test/deployments","created_at":"2022-04-25T13:15:44Z","updated_at":"2022-04-29T10:48:46Z","pushed_at":"2023-11-20T14:56:17Z","git_url":"git://github.com/scott-codecov/codecov-test.git","ssh_url":"git@github.com:scott-codecov/codecov-test.git","clone_url":"https://github.com/scott-codecov/codecov-test.git","svn_url":"https://github.com/scott-codecov/codecov-test","homepage":null,"size":239,"stargazers_count":0,"watchers_count":0,"language":"Python","has_issues":true,"has_projects":true,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":0,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":14,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"private","forks":0,"open_issues":14,"watchers":0,"default_branch":"master"}},"_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40"},"issue":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40"},"comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/issues/40/comments"},"review_comments":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments"},"review_comment":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments{/number}"},"commits":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/commits"},"statuses":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/statuses/5c64a5143951193dde7b14c14611eebe1025f862"}},"author_association":"OWNER","auto_merge":null,"active_lock_reason":null,"merged":false,"mergeable":true,"rebaseable":true,"mergeable_state":"unstable","merged_by":null,"comments":0,"review_comments":16,"maintainer_can_modify":false,"commits":2,"additions":6,"deletions":12,"changed_files":2}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:56:58 GMT + ETag: + - W/"296b07aa4eea57ba4a7acd389a2bfada2e39b4cc4030cbb54c9a64dacf5ad366" + Last-Modified: + - Mon, 20 Nov 2023 14:56:16 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA69:9369:200C52D:432C6D9:655B73BA + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4987' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '13' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read; contents=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '' + headers: + accept: + - application/vnd.github.v3.diff + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40 + response: + content: "diff --git a/main/bar.py b/main/bar.py\nindex 066a224..5991ec6 100644\n--- + a/main/bar.py\n+++ b/main/bar.py\n@@ -62,4 +62,7 @@ def add6(x, y):\n return + x + y\n \n def sub6(x, y):\n- return x - y\n\\ No newline at end of file\n+ + \ return x - y\n+\n+def add7(x, y):\n+ return x + y\n\\ No newline at end + of file\ndiff --git a/main/foo.py b/main/foo.py\nindex 9d285a4..41d8fd2 100644\n--- + a/main/foo.py\n+++ b/main/foo.py\n@@ -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, + y):\n return x / y\n \n-def add5(x, y):\n- return x + y\n-\n-def sub5(x, + y):\n- return x - y\n-\n-def mul5(x, y):\n- return x * y\n-\n-def div5(x, + y):\n- return x / y\n+def testing(x, y):\n+ return x % y\n" + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '666' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/vnd.github.v3.diff; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:56:59 GMT + ETag: + - '"cc2f80a6dfa37a49996d0a1ac913a4b5c35981e3cd3f50fcfde4eb5b197a4f55"' + Last-Modified: + - Mon, 20 Nov 2023 14:56:16 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; param=diff + X-GitHub-Request-Id: + - FA6A:1AA9:8EA6FF:127DB9A:655B73BB + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4986' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '14' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read; contents=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"messages": [{"role": "user", "content": "\n Your purpose is to + act as a highly experienced software engineer and provide a thorough\n review + of code changes and suggest improvements. Do not comment on minor style issues,\n missing + comments or documentation. Identify and resolve significant concerns to improve\n overall + code quality.\n\n You will receive a Git diff where each line has been + prefixed with a unique identifer in\n square brackets. When referencing + lines in this diff use that identifier.\n\n Format your output as JSON + such that there is 1 top-level comment that summarizes your review\n and + multiple additional comments addressing specific lines in the code with the + changes you\n deem appropriate.\n\n The output should have this + JSON form:\n\n {\n \"body\": \"This is the summary comment\",\n \"comments\": + [\n {\n \"line_id\": 123,\n \"body\": + \"This is a comment about the code with line ID 123\",\n }\n ]\n }\n\n Limit + the number of comments to 10 at most.\n\n Here is the Git diff on which + you should base your review:\n\n [1] diff --git a/main/bar.py b/main/bar.py\n[2] + index 066a224..5991ec6 100644\n[3] --- a/main/bar.py\n[4] +++ b/main/bar.py\n[5] + @@ -62,4 +62,7 @@ def add6(x, y):\n[6] return x + y\n[7] \n[8] def sub6(x, + y):\n[9] - return x - y\n[10] \\ No newline at end of file\n[11] + return + x - y\n[12] +\n[13] +def add7(x, y):\n[14] + return x + y\n[15] \\ No newline + at end of file\n[16] diff --git a/main/foo.py b/main/foo.py\n[17] index 9d285a4..41d8fd2 + 100644\n[18] --- a/main/foo.py\n[19] +++ b/main/foo.py\n[20] @@ -54,14 +54,5 + @@ def mul4(x, y):\n[21] def div4(x, y):\n[22] return x / y\n[23] \n[24] + -def add5(x, y):\n[25] - return x + y\n[26] -\n[27] -def sub5(x, y):\n[28] + - return x - y\n[29] -\n[30] -def mul5(x, y):\n[31] - return x * y\n[32] + -\n[33] -def div5(x, y):\n[34] - return x / y\n[35] +def testing(x, y):\n[36] + + return x % y\n[37] \n "}], "model": "gpt-4"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2165' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - AsyncOpenAI/Python 1.2.4 + x-stainless-arch: + - arm64 + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.2.4 + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.8 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + content: "{\n \"id\": \"chatcmpl-8MzznvBLFYL4gS9jmfv1PAqsYge8n\",\n \"object\": + \"chat.completion\",\n \"created\": 1700492219,\n \"model\": \"gpt-4-0613\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"{\\n \\\"body\\\": \\\"The code changes + reflect transfer of add5, sub5, mul5, div5 functions from main/foo.py to main/bar.py + and they are renamed to add7, sub7, mul7, div7, respectively. There\u2019s + also a new testing function added. While these changes do not result in any + syntax errors, they introduce potential redundancy and confusion in naming of + functions.\\\",\\n \\\"comments\\\": [\\n {\\n \\\"line_id\\\": + 14,\\n \\\"body\\\": \\\"add7 function in bar.py is identically implemented + as add6. Consider removing redundant code.\\\"\\n },\\n {\\n \\\"line_id\\\": + 35,\\n \\\"body\\\": \\\"A new function 'testing' has been added + in foo.py file. Please make sure to add a more descriptive name for the function + that reflects what it does.\\\"\\n },\\n {\\n \\\"line_id\\\": + 35,\\n \\\"body\\\": \\\"The new 'testing' function uses the modulus + operation. The name doesn't reflect this, consider renaming it to 'modulus' + or a name that better reflects its functionality.\\\"\\n },\\n {\\n + \ \\\"line_id\\\": 24,\\n \\\"body\\\": \\\"Upon removal + of add5, sub5, mul5, div5 functions from foo.py, remember to update all call + sites that reference these functions.\\\"\\n },\\n {\\n \\\"line_id\\\": + 13,\\n \\\"body\\\": \\\"The new functions you've added named add7, + sub7, mul7, div7 are identical to some existing functions. It is recommended + to avoid duplicate functions, perhaps by creating a utility function if the + implementation across these functions is expected to remain identical.\\\"\\n + \ }\\n ]\\n}\"\n },\n \"finish_reason\": \"stop\"\n }\n + \ ],\n \"usage\": {\n \"prompt_tokens\": 605,\n \"completion_tokens\": + 343,\n \"total_tokens\": 948\n }\n}\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 82918af47f44e20b-ORD + Cache-Control: + - no-cache, must-revalidate + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 20 Nov 2023 14:57:33 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=IzLHHOSkb3GsbWdl3ba5gtlo01gyLZUsGH0EywjECiI-1700492253-0-AeVW0HsDY7UprERytxSXL4vBLL4IcY0F2HYgOexbuiTq00GMqueTbmQ67ovaywdnb99xkTKnsOLrK4IHTeZuhd8=; + path=/; expires=Mon, 20-Nov-23 15:27:33 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=WSElFoSEEWNWnQwJ.4vWDd2fmWjMrkwEeP2ktZD2EvM-1700492253622-0-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + access-control-allow-origin: + - '*' + alt-svc: + - h3=":443"; ma=86400 + openai-model: + - gpt-4-0613 + openai-organization: + - functional-software + openai-processing-ms: + - '33926' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=15724800; includeSubDomains + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '300000' + x-ratelimit-limit-tokens_usage_based: + - '300000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '299478' + x-ratelimit-remaining-tokens_usage_based: + - '299478' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 104ms + x-ratelimit-reset-tokens_usage_based: + - 104ms + x-request-id: + - aad5ef2ac250b9a3b7774bd5cc43e1b1 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments?per_page=100&page=1 + response: + content: '[{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299190","pull_request_review_id":1739975935,"id":1399299190,"node_id":"PRRC_kwDOHO5Jtc5TZ5x2","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + function `add5` has been removed. If there are any dependencies on this function + elsewhere in the codebase, they''ll need to be updated or this can potentially + break your code. Please ensure this function is not needed elsewhere or replaced + by a similar functionality.","created_at":"2023-11-20T14:39:23Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299190","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299190"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299190"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299190/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"LEFT","original_position":4,"position":4,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299193","pull_request_review_id":1739975935,"id":1399299193,"node_id":"PRRC_kwDOHO5Jtc5TZ5x5","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + function `sub5` has been removed. Please ensure this function is not needed + elsewhere or replaced by a similar functionality.","created_at":"2023-11-20T14:39:23Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299193","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299193"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299193"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299193/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":60,"original_line":60,"side":"LEFT","original_position":7,"position":7,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299195","pull_request_review_id":1739975935,"id":1399299195,"node_id":"PRRC_kwDOHO5Jtc5TZ5x7","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + function `mul5` has been removed. Please ensure this function is not needed + elsewhere or replaced by a similar functionality.","created_at":"2023-11-20T14:39:24Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299195","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299195"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299195"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299195/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":63,"original_line":63,"side":"LEFT","original_position":10,"position":10,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299201","pull_request_review_id":1739975935,"id":1399299201,"node_id":"PRRC_kwDOHO5Jtc5TZ5yB","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):\n- return + x * y\n-\n-def div5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + function `div5` has been removed. Please ensure this function is not needed + elsewhere or replaced by a similar functionality.","created_at":"2023-11-20T14:39:24Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299201","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299201"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299201"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299201/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":66,"original_line":66,"side":"LEFT","original_position":13,"position":13,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299203","pull_request_review_id":1739975935,"id":1399299203,"node_id":"PRRC_kwDOHO5Jtc5TZ5yD","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):\n- return + x * y\n-\n-def div5(x, y):\n- return x / y\n+def testing(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + new `testing` function introduced gives the modulus of x and y. Make sure to + handle the scenario of a divide by zero error when y equals to zero.","created_at":"2023-11-20T14:39:24Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299203","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299203"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299203"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299203/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"RIGHT","original_position":15,"position":15,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299206","pull_request_review_id":1739975935,"id":1399299206,"node_id":"PRRC_kwDOHO5Jtc5TZ5yG","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):\n- return + x * y\n-\n-def div5(x, y):\n- return x / y\n+def testing(x, y):\n+ return + x % y","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + new `testing` function does not handle floating point numbers as expected. The + modulus operator returns a floating-point result instead of rounding down to + the nearest whole number. Make sure to handle this as per the requirements.","created_at":"2023-11-20T14:39:24Z","updated_at":"2023-11-20T14:39:24Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299206","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299206"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399299206"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399299206/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":58,"original_line":58,"side":"RIGHT","original_position":16,"position":16,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316016","pull_request_review_id":1740003613,"id":1399316016,"node_id":"PRRC_kwDOHO5Jtc5TZ94w","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"Check + if there is any code that is calling the add5 function before removing it. If + there is, make sure those instances are handled appropriately.","created_at":"2023-11-20T14:51:31Z","updated_at":"2023-11-20T14:51:32Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316016","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316016"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316016"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316016/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"LEFT","original_position":4,"position":4,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316021","pull_request_review_id":1740003613,"id":1399316021,"node_id":"PRRC_kwDOHO5Jtc5TZ941","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"Ensure + that there are no dependencies on the sub5 function before deleting it. Analyze + the impact of this deletion before proceeding.","created_at":"2023-11-20T14:51:31Z","updated_at":"2023-11-20T14:51:32Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316021","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316021"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316021"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316021/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":60,"original_line":60,"side":"LEFT","original_position":7,"position":7,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316023","pull_request_review_id":1740003613,"id":1399316023,"node_id":"PRRC_kwDOHO5Jtc5TZ943","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"Removal + of the mul5 function might impact existing functionality. If there are dependencies, + ensure that they are properly handled.","created_at":"2023-11-20T14:51:31Z","updated_at":"2023-11-20T14:51:32Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316023","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316023"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316023"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316023/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":63,"original_line":63,"side":"LEFT","original_position":10,"position":10,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316026","pull_request_review_id":1740003613,"id":1399316026,"node_id":"PRRC_kwDOHO5Jtc5TZ946","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):\n- return + x * y\n-\n-def div5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"Removal + of the div5 function could impact parts of your program that rely on it. Re-evaluate + to confirm this is the right course.","created_at":"2023-11-20T14:51:31Z","updated_at":"2023-11-20T14:51:32Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316026","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316026"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316026"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316026/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":66,"original_line":66,"side":"LEFT","original_position":13,"position":13,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316027","pull_request_review_id":1740003613,"id":1399316027,"node_id":"PRRC_kwDOHO5Jtc5TZ947","diff_hunk":"@@ + -54,14 +54,5 @@\n def div4(x, y):\n return x / y\n \n-def add5(x, y):\n- return + x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def mul5(x, y):\n- return + x * y\n-\n-def div5(x, y):\n- return x / y\n+def testing(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"This + new function called ''testing'' performs a modulo operation. Consider renaming + the function to something more descriptive, e.g., mod or modulo.","created_at":"2023-11-20T14:51:32Z","updated_at":"2023-11-20T14:51:32Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316027","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316027"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399316027"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399316027/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"RIGHT","original_position":15,"position":15,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213","pull_request_review_id":1740008775,"id":1399319213,"node_id":"PRRC_kwDOHO5Jtc5TZ-qt","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"It + looks like you''ve removed the add5 method. If this is being used elsewhere + in the codebase then its removal could cause issues. Please ensure that this + method isn''t being used elsewhere before removal.","created_at":"2023-11-20T14:53:54Z","updated_at":"2023-11-20T14:53:55Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319213","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319213"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"LEFT","original_position":4,"position":4,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319214","pull_request_review_id":1740008775,"id":1399319214,"node_id":"PRRC_kwDOHO5Jtc5TZ-qu","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):\n- return x + y\n-\n-def sub5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"You''ve + removed the sub5 method. Like with the add5 method, make sure that it''s not + being used in other places in the codebase which could cause runtime issues.","created_at":"2023-11-20T14:53:54Z","updated_at":"2023-11-20T14:53:55Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319214","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319214"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319214"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319214/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":60,"original_line":60,"side":"LEFT","original_position":7,"position":7,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319215","pull_request_review_id":1740008775,"id":1399319215,"node_id":"PRRC_kwDOHO5Jtc5TZ-qv","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):\n- return x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def + mul5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + mul5 method has been removed. If it is used elsewhere, this could cause potential + problems. Validate it before deleting.","created_at":"2023-11-20T14:53:54Z","updated_at":"2023-11-20T14:53:55Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319215","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319215"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319215"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319215/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":63,"original_line":63,"side":"LEFT","original_position":10,"position":10,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319219","pull_request_review_id":1740008775,"id":1399319219,"node_id":"PRRC_kwDOHO5Jtc5TZ-qz","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):\n- return x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def + mul5(x, y):\n- return x * y\n-\n-def div5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"You''ve + also removed div5, again ensure it''s not being used anywhere else to prevent + bugs and exceptions.","created_at":"2023-11-20T14:53:54Z","updated_at":"2023-11-20T14:53:55Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319219","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319219"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319219"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319219/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":66,"original_line":66,"side":"LEFT","original_position":13,"position":13,"subject_type":"line"},{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220","pull_request_review_id":1740008775,"id":1399319220,"node_id":"PRRC_kwDOHO5Jtc5TZ-q0","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):\n- return x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def + mul5(x, y):\n- return x * y\n-\n-def div5(x, y):\n- return x / y\n+def + testing(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"This + new ''testing'' function''s name is not descriptive of its functionality. It + looks like it''s performing a modulus operation, not testing. You should name + this function appropriately.","created_at":"2023-11-20T14:53:55Z","updated_at":"2023-11-20T14:53:55Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319220","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319220"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"RIGHT","original_position":15,"position":15,"subject_type":"line"}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:57:34 GMT + ETag: + - W/"34bf2a70870a2944dfb55a2babeb2299d1d4add01673eb7b2200f5887a35b4f8" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA79:2E86:93B1E2:1320300:655B73DD + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4985' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '15' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - api.github.com + user-agent: + - Default + method: GET + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/comments?per_page=100&page=2 + response: + content: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - '2' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:57:34 GMT + ETag: + - '"75cdf323b1b22c1c1c5eb1332cd50eaeee03e9a548fea0444f4061d44b44dc0d"' + Link: + - ; + rel="prev", ; + rel="last", ; + rel="first" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA79:2E86:93B279:1320448:655B73DE + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4984' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '16' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=read + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"body": "Upon removal of add5, sub5, mul5, div5 functions from foo.py, + remember to update all call sites that reference these functions."}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '139' + content-type: + - application/json + host: + - api.github.com + user-agent: + - Default + method: PATCH + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213 + response: + content: '{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213","pull_request_review_id":1740008775,"id":1399319213,"node_id":"PRRC_kwDOHO5Jtc5TZ-qt","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"Upon + removal of add5, sub5, mul5, div5 functions from foo.py, remember to update + all call sites that reference these functions.","created_at":"2023-11-20T14:53:54Z","updated_at":"2023-11-20T14:57:34Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319213","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319213"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319213/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"LEFT","original_position":4,"position":4,"subject_type":"line"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:57:35 GMT + ETag: + - W/"ee80dc073626d7b6a7c7469e1fe400e969fafccce776eae5b2531d2245a88be8" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA7A:4171:9023DB:12ACDC0:655B73DE + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4983' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '17' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=write + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"body": "The new ''testing'' function uses the modulus operation. The + name doesn''t reflect this, consider renaming it to ''modulus'' or a name that + better reflects its functionality."}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '181' + content-type: + - application/json + host: + - api.github.com + user-agent: + - Default + method: PATCH + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220 + response: + content: '{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220","pull_request_review_id":1740008775,"id":1399319220,"node_id":"PRRC_kwDOHO5Jtc5TZ-q0","diff_hunk":"@@ + -54,14 +54,5 @@ def mul4(x, y):\n def div4(x, y):\n return x / y\n \n-def + add5(x, y):\n- return x + y\n-\n-def sub5(x, y):\n- return x - y\n-\n-def + mul5(x, y):\n- return x * y\n-\n-def div5(x, y):\n- return x / y\n+def + testing(x, y):","path":"main/foo.py","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862","original_commit_id":"b607bb0e17e1b8d8699272a26e32986a933f9946","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"The + new ''testing'' function uses the modulus operation. The name doesn''t reflect + this, consider renaming it to ''modulus'' or a name that better reflects its + functionality.","created_at":"2023-11-20T14:53:55Z","updated_at":"2023-11-20T14:57:35Z","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319220","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"self":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220"},"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#discussion_r1399319220"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"reactions":{"url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/comments/1399319220/reactions","total_count":0,"+1":0,"-1":0,"laugh":0,"hooray":0,"confused":0,"heart":0,"rocket":0,"eyes":0},"start_line":null,"original_start_line":null,"start_side":null,"line":57,"original_line":57,"side":"RIGHT","original_position":15,"position":15,"subject_type":"line"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:57:35 GMT + ETag: + - W/"8e9b3e8922c3274894baa8442d145f1321a199f55ba5dd972c351a3bfa6ecacc" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA7B:9369:20108A1:4335182:655B73DF + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4982' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '18' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=write + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +- request: + body: '{"commit_id": "5c64a5143951193dde7b14c14611eebe1025f862", "body": "CodecovAI + submitted a new review for 5c64a5143951193dde7b14c14611eebe1025f862", "event": + "COMMENT", "comments": [{"path": "main/bar.py", "position": 9, "body": "add7 + function in bar.py is identically implemented as add6. Consider removing redundant + code."}, {"path": "main/bar.py", "position": 8, "body": "The new functions you''ve + added named add7, sub7, mul7, div7 are identical to some existing functions. + It is recommended to avoid duplicate functions, perhaps by creating a utility + function if the implementation across these functions is expected to remain + identical."}]}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '643' + content-type: + - application/json + host: + - api.github.com + user-agent: + - Default + method: POST + uri: https://api.github.com/repos/scott-codecov/codecov-test/pulls/40/reviews + response: + content: '{"id":1740017976,"node_id":"PRR_kwDOHO5Jtc5ntpE4","user":{"login":"scott-codecov","id":103445133,"node_id":"U_kgDOBipyjQ","avatar_url":"https://avatars.githubusercontent.com/u/103445133?u=1ea5f79283a26325f56e7cfa9eaca5cff3d538a4&v=4","gravatar_id":"","url":"https://api.github.com/users/scott-codecov","html_url":"https://github.com/scott-codecov","followers_url":"https://api.github.com/users/scott-codecov/followers","following_url":"https://api.github.com/users/scott-codecov/following{/other_user}","gists_url":"https://api.github.com/users/scott-codecov/gists{/gist_id}","starred_url":"https://api.github.com/users/scott-codecov/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/scott-codecov/subscriptions","organizations_url":"https://api.github.com/users/scott-codecov/orgs","repos_url":"https://api.github.com/users/scott-codecov/repos","events_url":"https://api.github.com/users/scott-codecov/events{/privacy}","received_events_url":"https://api.github.com/users/scott-codecov/received_events","type":"User","site_admin":false},"body":"CodecovAI + submitted a new review for 5c64a5143951193dde7b14c14611eebe1025f862","state":"COMMENTED","html_url":"https://github.com/scott-codecov/codecov-test/pull/40#pullrequestreview-1740017976","pull_request_url":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40","author_association":"OWNER","_links":{"html":{"href":"https://github.com/scott-codecov/codecov-test/pull/40#pullrequestreview-1740017976"},"pull_request":{"href":"https://api.github.com/repos/scott-codecov/codecov-test/pulls/40"}},"submitted_at":"2023-11-20T14:57:36Z","commit_id":"5c64a5143951193dde7b14c14611eebe1025f862"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 20 Nov 2023 14:57:37 GMT + ETag: + - W/"a4514ce707ba155eb3dbfeaa0d9ad4da4ff272e8c63a4b61bad6cffd4c1b9f3a" + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + - Accept-Encoding, Accept, X-Requested-With + X-Accepted-OAuth-Scopes: + - '' + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3 + X-GitHub-Request-Id: + - FA7C:7CD2:981A2A:13ABB2E:655B73E0 + X-OAuth-Scopes: + - '' + X-RateLimit-Limit: + - '5000' + X-RateLimit-Remaining: + - '4981' + X-RateLimit-Reset: + - '1700494278' + X-RateLimit-Resource: + - core + X-RateLimit-Used: + - '19' + X-XSS-Protection: + - '0' + x-accepted-github-permissions: + - pull_requests=write + x-github-api-version-selected: + - '2022-11-28' + x-oauth-client-id: + - Iv1.88e0c58abd4e2e45 + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml b/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml new file mode 100644 index 0000000..ad71c44 --- /dev/null +++ b/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml @@ -0,0 +1,62 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/1654873/access_tokens + response: + body: + string: '{"token":"v1.test50wm4qyel2pbtpbusklcarg7c2etcbunnswp","expires_at":"2019-08-26T01:25:56Z","permissions":{"checks":"write","pull_requests":"write","statuses":"write","administration":"read","contents":"read","issues":"read","metadata":"read"},"repository_selection":"selected"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, + X-GitHub-Media-Type + Cache-Control: + - public, max-age=60, s-maxage=60 + Content-Length: + - '277' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 26 Aug 2019 00:25:57 GMT + ETag: + - '"d5bbd7f7363c549c2faa22e8f4419077"' + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Status: + - 201 Created + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.machine-man-preview; format=json + X-GitHub-Request-Id: + - 3B5C:44FD:19779AA:3C3CF0D:5D632714 + X-XSS-Protection: + - 1; mode=block + status: + code: 201 + message: Created +version: 1 diff --git a/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml b/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml new file mode 100644 index 0000000..ad71c44 --- /dev/null +++ b/sample_app/tests/cassetes/test_bots/TestBotsService/test_get_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml @@ -0,0 +1,62 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/1654873/access_tokens + response: + body: + string: '{"token":"v1.test50wm4qyel2pbtpbusklcarg7c2etcbunnswp","expires_at":"2019-08-26T01:25:56Z","permissions":{"checks":"write","pull_requests":"write","statuses":"write","administration":"read","contents":"read","issues":"read","metadata":"read"},"repository_selection":"selected"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, + X-GitHub-Media-Type + Cache-Control: + - public, max-age=60, s-maxage=60 + Content-Length: + - '277' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Mon, 26 Aug 2019 00:25:57 GMT + ETag: + - '"d5bbd7f7363c549c2faa22e8f4419077"' + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Status: + - 201 Created + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.machine-man-preview; format=json + X-GitHub-Request-Id: + - 3B5C:44FD:19779AA:3C3CF0D:5D632714 + X-XSS-Protection: + - 1; mode=block + status: + code: 201 + message: Created +version: 1 diff --git a/sample_app/tests/cassetes/test_bots/TestBotsService/testget_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml b/sample_app/tests/cassetes/test_bots/TestBotsService/testget_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml new file mode 100644 index 0000000..32968cb --- /dev/null +++ b/sample_app/tests/cassetes/test_bots/TestBotsService/testget_owner_appropriate_bot_token_with_user_with_integration_bot_using_it.yaml @@ -0,0 +1,57 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/1654873/access_tokens + response: + body: + string: '{"message":"A JSON web token could not be decoded","documentation_url":"https://docs.github.com/rest"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Content-Length: + - '102' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 May 2024 08:50:34 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; param=machine-man-preview; format=json + X-GitHub-Request-Id: + - D126:1EF29A:631E2B1:63AF5F7:6644775A + X-XSS-Protection: + - '0' + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/sample_app/tests/cassetes/test_bots/TestBotsService/testget_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml b/sample_app/tests/cassetes/test_bots/TestBotsService/testget_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml new file mode 100644 index 0000000..58dc5d3 --- /dev/null +++ b/sample_app/tests/cassetes/test_bots/TestBotsService/testget_repo_appropriate_bot_token_repo_with_user_with_integration_bot_using_it.yaml @@ -0,0 +1,57 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/1654873/access_tokens + response: + body: + string: '{"message":"A JSON web token could not be decoded","documentation_url":"https://docs.github.com/rest"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Content-Length: + - '102' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 May 2024 08:50:34 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; param=machine-man-preview; format=json + X-GitHub-Request-Id: + - D124:231513:6173EA3:6202FCF:6644775A + X-XSS-Protection: + - '0' + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_bad_data.yaml b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_bad_data.yaml new file mode 100644 index 0000000..162e8ab --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_bad_data.yaml @@ -0,0 +1,58 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/5944641/access_tokens + response: + body: + string: '{"message":"Integration must generate a public key","documentation_url":"https://developer.github.com/v3"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, + X-GitHub-Media-Type, Deprecation, Sunset + Content-Length: + - '106' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Tue, 31 Mar 2020 21:28:13 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Status: + - 401 Unauthorized + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.machine-man-preview; format=json + X-GitHub-Request-Id: + - E466:7A72:1F788:2900F:5E83B5ED + X-XSS-Protection: + - 1; mode=block + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_non_existing_integration.yaml b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_non_existing_integration.yaml new file mode 100644 index 0000000..68ea5e9 --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_repo_appropriate_bot_token_non_existing_integration.yaml @@ -0,0 +1,61 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/5944641/access_tokens + response: + body: + string: !!binary | + H4sIAAAAAAAAAx3MMQ7CMAwAwK8gszb1wNYHMPIFZBKrjUjsKHbaAfF3UOeT7gOVzWhlWOChfrnr + kAQTJI2jsjh5VnmOXv6+uTdbEBPvXLRxn9fs23jNUSvuN6TWDK+xMzkHCsJHyGJOpZxLcH2zwPcH + 7H7LEnMAAAA= + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, + X-GitHub-Media-Type, Deprecation, Sunset + Content-Encoding: + - gzip + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Tue, 31 Mar 2020 21:26:16 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Status: + - 404 Not Found + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.machine-man-preview; format=json + X-GitHub-Request-Id: + - E456:7A74:9D6C2:D3440:5E83B578 + X-XSS-Protection: + - 1; mode=block + status: + code: 404 + message: Not Found +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_token_type_mapping_bad_data.yaml b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_token_type_mapping_bad_data.yaml new file mode 100644 index 0000000..22c41b0 --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_bots/TestRepositoryServiceIntegration/test_get_token_type_mapping_bad_data.yaml @@ -0,0 +1,57 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/vnd.github.machine-man-preview+json + Accept-Encoding: + - gzip, deflate + Connection: + - keep-alive + Content-Length: + - '0' + User-Agent: + - Codecov + method: POST + uri: https://api.github.com/app/installations/5944641/access_tokens + response: + body: + string: '{"message":"Integration must generate a public key","documentation_url":"https://docs.github.com/rest"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, + X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, + X-GitHub-Request-Id, Deprecation, Sunset + Content-Length: + - '103' + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Wed, 15 May 2024 08:58:07 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-GitHub-Media-Type: + - github.v3; param=machine-man-preview; format=json + X-GitHub-Request-Id: + - D18C:26966D:614450E:61D4561:6644791F + X-XSS-Protection: + - '0' + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_bitbucket.yaml b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_bitbucket.yaml new file mode 100644 index 0000000..5f677ed --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_bitbucket.yaml @@ -0,0 +1,95 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + User-Agent: + - Default + method: GET + uri: https://bitbucket.org/api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b64?oauth_consumer_key=testzdcviyi3x7f8h0&oauth_token=H6scSkq7rKZDXtDqe4&oauth_signature_method=HMAC-SHA1&oauth_timestamp=1569615857&oauth_nonce=b98bd6399cff4029ae7cadeef4d7ecd2&oauth_version=1.0&oauth_signature=QoXcCFU5p8sc0mHtAypAkqXi1wQ%3D + response: + content: '{"rendered": {"message": {"raw": "Adding ''include'' term if multiple + sources\n\nbased on a support ticket around multiple sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87", + "markup": "markdown", "html": "

Adding ''include'' term if multiple sources

\n

based + on a support ticket around multiple sources

\n

https://codecov.freshdesk.com/a/tickets/87

", + "type": "rendered"}}, "hash": "6895b6479dbe12b5cb3baa02416c6343ddb888b4", "repository": + {"links": {"self": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python"}, + "html": {"href": "https://bitbucket.org/ThiagoCodecov/example-python"}, "avatar": + {"href": "https://bytebucket.org/ravatar/%7Ba8c50527-2c3a-480e-afe1-7700e2b00074%7D?ts=default"}}, + "type": "repository", "name": "example-python", "full_name": "ThiagoCodecov/example-python", + "uuid": "{a8c50527-2c3a-480e-afe1-7700e2b00074}"}, "links": {"self": {"href": + "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b6479dbe12b5cb3baa02416c6343ddb888b4"}, + "comments": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b6479dbe12b5cb3baa02416c6343ddb888b4/comments"}, + "patch": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/patch/6895b6479dbe12b5cb3baa02416c6343ddb888b4"}, + "html": {"href": "https://bitbucket.org/ThiagoCodecov/example-python/commits/6895b6479dbe12b5cb3baa02416c6343ddb888b4"}, + "diff": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/diff/6895b6479dbe12b5cb3baa02416c6343ddb888b4"}, + "approve": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b6479dbe12b5cb3baa02416c6343ddb888b4/approve"}, + "statuses": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b6479dbe12b5cb3baa02416c6343ddb888b4/statuses"}}, + "author": {"raw": "Jerrod ", "type": "author"}, "summary": + {"raw": "Adding ''include'' term if multiple sources\n\nbased on a support ticket + around multiple sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87", + "markup": "markdown", "html": "

Adding ''include'' term if multiple sources

\n

based + on a support ticket around multiple sources

\n

https://codecov.freshdesk.com/a/tickets/87

", + "type": "rendered"}, "participants": [], "parents": [{"hash": "adb252173d2107fad86bcdcbc149884c2dd4c609", + "type": "commit", "links": {"self": {"href": "https://bitbucket.org/!api/2.0/repositories/ThiagoCodecov/example-python/commit/adb252173d2107fad86bcdcbc149884c2dd4c609"}, + "html": {"href": "https://bitbucket.org/ThiagoCodecov/example-python/commits/adb252173d2107fad86bcdcbc149884c2dd4c609"}}}], + "date": "2018-07-09T23:39:20+00:00", "message": "Adding ''include'' term if + multiple sources\n\nbased on a support ticket around multiple sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87", + "type": "commit"}' + headers: + Accept-Ranges: + - bytes + Cache-Control: + - max-age=900 + Connection: + - close + Content-Type: + - application/json; charset=utf-8 + Date: + - Fri, 27 Sep 2019 20:24:18 GMT + Etag: + - '"gz[30b44719f3163e6c11b3ad93a6deddd8]"' + Last-Modified: + - Thu, 26 Sep 2019 00:58:27 GMT + Server: + - nginx + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - Authorization, Accept-Encoding + X-Accepted-Oauth-Scopes: + - repository + X-Cache-Info: + - caching + X-Consumed-Content-Encoding: + - gzip + X-Content-Type-Options: + - nosniff + X-Credential-Type: + - oauth1 + X-Frame-Options: + - SAMEORIGIN + X-Oauth-Scopes: + - pipeline:variable, webhook, snippet:write, wiki, issue:write, pullrequest:write, + repository:delete, repository:admin, project:write, team:write, account:write + X-Reads-Before-Write-From: + - default + X-Render-Time: + - '0.431403160095' + X-Request-Count: + - '76' + X-Served-By: + - app-144 + X-Static-Version: + - ca263699922c + X-Version: + - ca263699922c + status: + code: 200 + message: OK + status_code: 200 + url: https://bitbucket.org/api/2.0/repositories/ThiagoCodecov/example-python/commit/6895b64?oauth_consumer_key=testzdcviyi3x7f8h0&oauth_token=H6scSkq7rKZDXtDqe4&oauth_signature_method=HMAC-SHA1&oauth_timestamp=1569615857&oauth_nonce=b98bd6399cff4029ae7cadeef4d7ecd2&oauth_version=1.0&oauth_signature=QoXcCFU5p8sc0mHtAypAkqXi1wQ%3D +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_github.yaml b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_github.yaml new file mode 100644 index 0000000..65ff7e0 --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_github.yaml @@ -0,0 +1,92 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + User-Agent: + - Default + method: GET + uri: https://api.github.com/repos/ThiagoCodecov/example-python/commits/6895b64 + response: + content: '{"sha":"6895b6479dbe12b5cb3baa02416c6343ddb888b4","node_id":"MDY6Q29tbWl0MTU2NjE3Nzc3OjY4OTViNjQ3OWRiZTEyYjVjYjNiYWEwMjQxNmM2MzQzZGRiODg4YjQ=","commit":{"author":{"name":"Jerrod","email":"jerrod@fundersclub.com","date":"2018-07-09T23:39:20Z"},"committer":{"name":"GitHub","email":"noreply@github.com","date":"2018-07-09T23:39:20Z"},"message":"Adding + ''include'' term if multiple sources\n\nbased on a support ticket around multiple + sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87","tree":{"sha":"3c47e2b9d9791503b56f0e4f78e76b9d061ad529","url":"https://api.github.com/repos/ThiagoCodecov/example-python/git/trees/3c47e2b9d9791503b56f0e4f78e76b9d061ad529"},"url":"https://api.github.com/repos/ThiagoCodecov/example-python/git/commits/6895b6479dbe12b5cb3baa02416c6343ddb888b4","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN + PGP SIGNATURE-----\n\nwsBcBAABCAAQBQJbQ/IoCRBK7hj4Ov3rIwAAdHIIAGm5AdlM8E0E7TyFKWgwPpjO\nsxiQswFXWosTZnJAn2NN/JF5aNqxUFLa9mo7Z+jztQuxrWsAFQsNFHf/t90iZi4w\ne0CkIHJdI8ukcae5/3eP+9h8GyqEq/RcvxYtvW6zYkWAK3Pyqwrs+qwH1MuLsl6E\n02fgD6T99Pq2V+3S1+dfgU6ot4IrMwT7aR+u9fCM8G4tF4y/5znIzuke6amVt52S\nUfjnHOHbDxdD4Mkxn8107zX1XmQ4BEzhh1kjTVd3Mean6ye7xsFxFGYHA5Zd1iyM\nCsmW5waqonRf03m1bQ9pYleufcwpr72iARLiBFhTOcAF6vpdoshO1qmTtsweFno=\n=vKnQ\n-----END + PGP SIGNATURE-----\n","payload":"tree 3c47e2b9d9791503b56f0e4f78e76b9d061ad529\nparent + adb252173d2107fad86bcdcbc149884c2dd4c609\nauthor Jerrod + 1531179560 -0700\ncommitter GitHub 1531179560 -0700\n\nAdding + ''include'' term if multiple sources\n\nbased on a support ticket around multiple + sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87"}},"url":"https://api.github.com/repos/ThiagoCodecov/example-python/commits/6895b6479dbe12b5cb3baa02416c6343ddb888b4","html_url":"https://github.com/ThiagoCodecov/example-python/commit/6895b6479dbe12b5cb3baa02416c6343ddb888b4","comments_url":"https://api.github.com/repos/ThiagoCodecov/example-python/commits/6895b6479dbe12b5cb3baa02416c6343ddb888b4/comments","author":null,"committer":{"login":"web-flow","id":19864447,"node_id":"MDQ6VXNlcjE5ODY0NDQ3","avatar_url":"https://avatars3.githubusercontent.com/u/19864447?v=4","gravatar_id":"","url":"https://api.github.com/users/web-flow","html_url":"https://github.com/web-flow","followers_url":"https://api.github.com/users/web-flow/followers","following_url":"https://api.github.com/users/web-flow/following{/other_user}","gists_url":"https://api.github.com/users/web-flow/gists{/gist_id}","starred_url":"https://api.github.com/users/web-flow/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/web-flow/subscriptions","organizations_url":"https://api.github.com/users/web-flow/orgs","repos_url":"https://api.github.com/users/web-flow/repos","events_url":"https://api.github.com/users/web-flow/events{/privacy}","received_events_url":"https://api.github.com/users/web-flow/received_events","type":"User","site_admin":false},"parents":[{"sha":"adb252173d2107fad86bcdcbc149884c2dd4c609","url":"https://api.github.com/repos/ThiagoCodecov/example-python/commits/adb252173d2107fad86bcdcbc149884c2dd4c609","html_url":"https://github.com/ThiagoCodecov/example-python/commit/adb252173d2107fad86bcdcbc149884c2dd4c609"}],"stats":{"total":9,"additions":8,"deletions":1},"files":[{"sha":"1fbfc366bd98e0c8df4fd297061a420b674857f4","filename":"README.rst","status":"modified","additions":8,"deletions":1,"changes":9,"blob_url":"https://github.com/ThiagoCodecov/example-python/blob/6895b6479dbe12b5cb3baa02416c6343ddb888b4/README.rst","raw_url":"https://github.com/ThiagoCodecov/example-python/raw/6895b6479dbe12b5cb3baa02416c6343ddb888b4/README.rst","contents_url":"https://api.github.com/repos/ThiagoCodecov/example-python/contents/README.rst?ref=6895b6479dbe12b5cb3baa02416c6343ddb888b4","patch":"@@ + -47,12 +47,19 @@ Below are some examples on how to include coverage tracking + during your tests. C\n \n You may need to configure a ``.coveragerc`` file. + Learn more `here `_. + Start with this `generic .coveragerc `_ + for example.\n \n-We highly suggest adding `source` to your ``.coveragerc`` + which solves a number of issues collecting coverage.\n+We highly suggest adding + `source` to your ``.coveragerc``, which solves a number of issues collecting + coverage.\n \n .. code-block:: ini\n \n [run]\n source=your_package_name\n+ \n+If + there are multiple sources, you instead should add ''include'' to your ``.coveragerc``\n+\n+.. + code-block:: ini\n+\n+ [run]\n+ include=your_package_name/*\n \n unittests\n + ---------"}]}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, + X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, + X-GitHub-Media-Type + Cache-Control: + - private, max-age=60, s-maxage=60 + Connection: + - close + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Fri, 27 Sep 2019 20:17:39 GMT + Etag: + - W/"34680c6edd4947802517db4ac91cbc6f" + Last-Modified: + - Mon, 09 Jul 2018 23:39:20 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - GitHub.com + Status: + - 200 OK + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Transfer-Encoding: + - chunked + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP + X-Accepted-Oauth-Scopes: + - '' + X-Consumed-Content-Encoding: + - gzip + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-Github-Media-Type: + - github.v3 + X-Github-Request-Id: + - 464F:069D:1DF8E:28829:5D8E6E63 + X-Oauth-Scopes: + - admin:org, admin:public_key, admin:repo_hook, repo, user, write:discussion + X-Ratelimit-Limit: + - '5000' + X-Ratelimit-Remaining: + - '4998' + X-Ratelimit-Reset: + - '1569618944' + X-Xss-Protection: + - 1; mode=block + status: + code: 200 + message: OK + status_code: 200 + url: https://api.github.com/repos/ThiagoCodecov/example-python/commits/6895b64 +version: 1 diff --git a/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_gitlab.yaml b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_gitlab.yaml new file mode 100644 index 0000000..0b11652 --- /dev/null +++ b/sample_app/tests/integration/cassetes/test_repository_service/TestRepositoryServiceIntegration/test_get_repo_provider_service_gitlab.yaml @@ -0,0 +1,137 @@ +interactions: +- request: + body: null + headers: + Accept: + - application/json + User-Agent: + - Default + method: GET + uri: https://gitlab.com/api/v4/projects/187725/repository/commits/0028015f7fa260f5fd68f78c0deffc15183d955e + response: + content: '{"id":"0028015f7fa260f5fd68f78c0deffc15183d955e","short_id":"0028015f","created_at":"2014-10-19T14:32:33.000Z","parent_ids":["5716de23b27020419d1a40dd93b469c041a1eeef"],"title":"added + large file","message":"added large file\n","author_name":"stevepeak","author_email":"steve@stevepeak.net","authored_date":"2014-10-19T14:32:33.000Z","committer_name":"stevepeak","committer_email":"steve@stevepeak.net","committed_date":"2014-10-19T14:32:33.000Z","stats":{"additions":816,"deletions":0,"total":816},"status":"success","last_pipeline":{"id":558130,"sha":"0028015f7fa260f5fd68f78c0deffc15183d955e","ref":null,"status":"success","web_url":"https://gitlab.com/codecov/ci-repo/pipelines/558130"},"project_id":187725}' + headers: + Cache-Control: + - max-age=0, private, must-revalidate + Connection: + - close + Content-Length: + - '710' + Content-Type: + - application/json + Date: + - Fri, 27 Sep 2019 20:25:56 GMT + Etag: + - W/"14982171f02402a9f16a64fd67c7a652" + Gitlab-Lb: + - fe-10-lb-gprd + Gitlab-Sv: + - localhost + Ratelimit-Limit: + - '600' + Ratelimit-Observed: + - '2' + Ratelimit-Remaining: + - '598' + Ratelimit-Reset: + - '1569616016' + Ratelimit-Resettime: + - Fri, 27 Sep 2019 20:26:56 GMT + Referrer-Policy: + - strict-origin-when-cross-origin + Server: + - nginx + Strict-Transport-Security: + - max-age=31536000 + Vary: + - Origin + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + X-Request-Id: + - 8gMdbsH0tv5 + X-Runtime: + - '0.062895' + status: + code: 200 + message: OK + status_code: 200 + url: https://gitlab.com/api/v4/projects/187725/repository/commits/0028015f7fa260f5fd68f78c0deffc15183d955e +- request: + body: null + headers: + Accept: + - application/json + User-Agent: + - Default + method: GET + uri: https://gitlab.com/api/v4/users?search=steve%40stevepeak.net + response: + content: '[{"id":109479,"name":"Steve Peak","username":"stevepeak","state":"active","avatar_url":"https://secure.gravatar.com/avatar/3712e9b9aee2ce5090aae58c2495cdee?s=80\u0026d=identicon","web_url":"https://gitlab.com/stevepeak"}]' + headers: + Cache-Control: + - max-age=0, private, must-revalidate + Connection: + - close + Content-Length: + - '221' + Content-Type: + - application/json + Date: + - Fri, 27 Sep 2019 20:25:57 GMT + Etag: + - W/"741342810fb814ea719ddb0b3c927b6d" + Gitlab-Lb: + - fe-09-lb-gprd + Gitlab-Sv: + - localhost + Link: + - ; + rel="first", ; + rel="last" + Ratelimit-Limit: + - '600' + Ratelimit-Observed: + - '3' + Ratelimit-Remaining: + - '597' + Ratelimit-Reset: + - '1569616017' + Ratelimit-Resettime: + - Fri, 27 Sep 2019 20:26:57 GMT + Referrer-Policy: + - strict-origin-when-cross-origin + Server: + - nginx + Strict-Transport-Security: + - max-age=31536000 + Vary: + - Origin + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + X-Next-Page: + - '' + X-Page: + - '1' + X-Per-Page: + - '20' + X-Prev-Page: + - '' + X-Request-Id: + - NPfOv9UCz9 + X-Runtime: + - '0.083548' + X-Total: + - '1' + X-Total-Pages: + - '1' + status: + code: 200 + message: OK + status_code: 200 + url: https://gitlab.com/api/v4/users?search=steve%40stevepeak.net +version: 1 diff --git a/sample_app/tests/integration/test_repository_service.py b/sample_app/tests/integration/test_repository_service.py new file mode 100644 index 0000000..b2bfb60 --- /dev/null +++ b/sample_app/tests/integration/test_repository_service.py @@ -0,0 +1,103 @@ +import pytest + +from database.tests.factories import RepositoryFactory +from services.repository import ( + get_repo_provider_service, +) + + +class TestRepositoryServiceIntegration: + @pytest.mark.asyncio + async def test_get_repo_provider_service_github(self, dbsession, codecov_vcr): + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testlln8sdeec57lz83oe3l8y9qq4lhqat2f1kzm", + owner__username="ThiagoCodecov", + owner__service="github", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + service = get_repo_provider_service(repo) + expected_result = { + "author": { + "id": None, + "username": None, + "email": "jerrod@fundersclub.com", + "name": "Jerrod", + }, + "message": "Adding 'include' term if multiple sources\n\nbased on a support ticket around multiple sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87", + "parents": ["adb252173d2107fad86bcdcbc149884c2dd4c609"], + "commitid": "6895b64", + "timestamp": "2018-07-09T23:39:20Z", + } + + commit = await service.get_commit("6895b64") + assert commit["author"] == expected_result["author"] + assert commit == expected_result + + @pytest.mark.asyncio + async def test_get_repo_provider_service_bitbucket( + self, dbsession, mock_configuration, codecov_vcr + ): + mock_configuration.params["bitbucket"] = { + "client_id": "testzdcviyi3x7f8h0", + "client_secret": "testw35rwjj75gbaervbsmgl13vf39jd", + } + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="H6scSkq7rKZDXtDqe4:kdTf3NVM9RkUc9rAaDM853j5f32PkBGU", + owner__username="ThiagoCodecov", + owner__service="bitbucket", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + service = get_repo_provider_service(repo) + commit = await service.get_commit("6895b64") + expected_result = { + "author": { + "id": None, + "username": None, + "email": "jerrod@fundersclub.com", + "name": "Jerrod", + }, + "message": "Adding 'include' term if multiple sources\n\nbased on a support ticket around multiple sources\r\n\r\nhttps://codecov.freshdesk.com/a/tickets/87", + "parents": ["adb252173d2107fad86bcdcbc149884c2dd4c609"], + "commitid": "6895b64", + "timestamp": "2018-07-09T23:39:20+00:00", + } + assert commit["author"] == expected_result["author"] + assert commit == expected_result + + @pytest.mark.asyncio + async def test_get_repo_provider_service_gitlab( + self, dbsession, mock_configuration, codecov_vcr + ): + mock_configuration.params["bitbucket"] = { + "client_id": "testzdcviyi3x7f8h0", + "client_secret": "testw35rwjj75gbaervbsmgl13vf39jd", + } + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="test10r65j3084oje16v12yzfuojw4yovzwa18y9txooo716odibjdwk8cn1p42r", + owner__username="stevepeak", + owner__service="gitlab", + name="example-python", + service_id="187725", + ) + dbsession.add(repo) + dbsession.flush() + service = get_repo_provider_service(repo) + commit = await service.get_commit("0028015f7fa260f5fd68f78c0deffc15183d955e") + expected_result = { + "author": { + "id": None, + "username": None, + "email": "steve@stevepeak.net", + "name": "stevepeak", + }, + "message": "added large file\n", + "parents": ["5716de23b27020419d1a40dd93b469c041a1eeef"], + "commitid": "0028015f7fa260f5fd68f78c0deffc15183d955e", + "timestamp": "2014-10-19T14:32:33.000Z", + } + assert commit["author"] == expected_result["author"] + assert commit == expected_result diff --git a/sample_app/tests/snapshots/results__build_message__0.txt b/sample_app/tests/snapshots/results__build_message__0.txt new file mode 100644 index 0000000..30c7457 --- /dev/null +++ b/sample_app/tests/snapshots/results__build_message__0.txt @@ -0,0 +1,18 @@ +### :x: 1 Tests Failed: +| Tests completed | Failed | Passed | Skipped | +|---|---|---|---| +| 3 | 1 | 2 | 3 | +
View the top 1 failed test(s) by shortest run time + +>
testname
+>
Stack Traces | 1s run time +> +> >
hello world
+> > [View](https://example.com/build_url) the CI Build +> +>
+ +
+ +To view more test analytics, go to the [Test Analytics Dashboard](https://app.codecov.io/gh/username/name/tests/thing%2Fthing) +📋 Got 3 mins? [Take this short survey](https://forms.gle/BpocVj23nhr2Y45G7) to help us improve Test Analytics. \ No newline at end of file diff --git a/sample_app/tests/snapshots/results__build_message_with_flake__0.txt b/sample_app/tests/snapshots/results__build_message_with_flake__0.txt new file mode 100644 index 0000000..de4b8e9 --- /dev/null +++ b/sample_app/tests/snapshots/results__build_message_with_flake__0.txt @@ -0,0 +1,19 @@ +### :x: 1 Tests Failed: +| Tests completed | Failed | Passed | Skipped | +|---|---|---|---| +| 3 | 1 | 2 | 3 | +
View the full list of 1 :snowflake: flaky tests + +>
testname
+> **Flake rate in main:** 33.33% (Passed 2 times, Failed 1 times) +>
Stack Traces | 1s run time +> +> >
hello world
+> > [View](https://example.com/build_url) the CI Build +> +>
+ +
+ +To view more test analytics, go to the [Test Analytics Dashboard](https://app.codecov.io/gh/username/name/tests/test_branch) +📋 Got 3 mins? [Take this short survey](https://forms.gle/BpocVj23nhr2Y45G7) to help us improve Test Analytics. \ No newline at end of file diff --git a/sample_app/tests/snapshots/results__generate_failure_info__0.txt b/sample_app/tests/snapshots/results__generate_failure_info__0.txt new file mode 100644 index 0000000..026afca --- /dev/null +++ b/sample_app/tests/snapshots/results__generate_failure_info__0.txt @@ -0,0 +1,2 @@ +
hello world
+[View](https://example.com/build_url) the CI Build \ No newline at end of file diff --git a/sample_app/tests/snapshots/results__specific_error_message__0.txt b/sample_app/tests/snapshots/results__specific_error_message__0.txt new file mode 100644 index 0000000..ae9b5cd --- /dev/null +++ b/sample_app/tests/snapshots/results__specific_error_message__0.txt @@ -0,0 +1,5 @@ +### :x: Unsupported file format + +> Upload processing failed due to unsupported file format. Please review the parser error message: +>
Error parsing JUnit XML in test.xml at 4:32: ParserError: No name found
+> For more help, visit our [troubleshooting guide](https://docs.codecov.com/docs/test-analytics#troubleshooting). diff --git a/sample_app/tests/snapshots/results__specific_error_message_no_error__0.txt b/sample_app/tests/snapshots/results__specific_error_message_no_error__0.txt new file mode 100644 index 0000000..082af14 --- /dev/null +++ b/sample_app/tests/snapshots/results__specific_error_message_no_error__0.txt @@ -0,0 +1 @@ +:x: We are unable to process any of the uploaded JUnit XML files. Please ensure your files are in the right format. \ No newline at end of file diff --git a/sample_app/tests/test_activation.py b/sample_app/tests/test_activation.py new file mode 100644 index 0000000..3688f35 --- /dev/null +++ b/sample_app/tests/test_activation.py @@ -0,0 +1,231 @@ +from datetime import datetime + +from database.tests.factories import OwnerFactory +from services.activation import activate_user, get_installation_plan_activated_users + + +class TestActivationServiceTestCase: + def test_activate_user_no_seats( + self, request, dbsession, mocker, with_sql_functions + ): + org = OwnerFactory.create( + plan_user_count=0, plan_activated_users=[], plan_auto_activate=True + ) + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is False + dbsession.commit() + assert user.ownerid not in org.plan_activated_users + + def test_activate_user_success( + self, request, dbsession, mocker, with_sql_functions + ): + org = OwnerFactory.create( + plan_user_count=1, plan_activated_users=[], plan_auto_activate=True + ) + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is True + dbsession.commit() + assert user.ownerid in org.plan_activated_users + + def test_activate_user_success_for_users_free( + self, request, dbsession, mocker, with_sql_functions + ): + org = OwnerFactory.create( + plan="users-free", + plan_user_count=1, + plan_activated_users=None, + plan_auto_activate=True, + ) + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is True + dbsession.commit() + assert user.ownerid in org.plan_activated_users + + def test_activate_user_success_for_enterprise_pr_billing( + self, request, dbsession, mocker, mock_configuration, with_sql_functions + ): + mocker.patch("services.license.is_enterprise", return_value=True) + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + + org = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(15, 20)), + plan_auto_activate=True, + ) + dbsession.add(org) + dbsession.flush() + + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is True + dbsession.commit() + assert user.ownerid in org.plan_activated_users + + def test_activate_user_success_user_org_overlap( + self, request, dbsession, mock_configuration, mocker, with_sql_functions + ): + mocker.patch("services.license.is_enterprise", return_value=True) + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + + # Create two orgs to ensure our seat availability checking works across + # multiple organizations. + org = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(1, 6)), + plan_auto_activate=True, + ) + dbsession.add(org) + dbsession.flush() + + org_second = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(2, 8)), + plan_auto_activate=True, + ) + dbsession.add(org_second) + dbsession.flush() + + assert get_installation_plan_activated_users(dbsession)[0][0] == 7 + + # {'company': 'Test Company', 'expires': '2021-01-01 00:00:00', 'url': 'https://codecov.mysite.com', 'trial': False, 'users': 10, 'repos': None, 'pr_billing': True} + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org_second) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org_second.ownerid, user.ownerid) + assert was_activated is True + dbsession.commit() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is True + dbsession.commit() + + assert get_installation_plan_activated_users(dbsession)[0][0] == 8 + + def test_activate_user_failure_for_enterprise_pr_billing_no_seats( + self, request, dbsession, mock_configuration, mocker, with_sql_functions + ): + mocker.patch("services.license.is_enterprise", return_value=True) + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + + # Create two orgs to ensure our seat availability checking works across + # multiple organizations. + org = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(15, 20)), + plan_auto_activate=True, + ) + dbsession.add(org) + dbsession.flush() + + org_second = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(21, 35)), + plan_auto_activate=True, + ) + dbsession.add(org_second) + dbsession.flush() + + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org_second) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org_second.ownerid, user.ownerid) + assert was_activated is False + dbsession.commit() + assert user.ownerid not in org.plan_activated_users + + def test_activate_user_enterprise_pr_billing_invalid_license( + self, request, dbsession, mocker, mock_configuration, with_sql_functions + ): + mocker.patch("services.license.is_enterprise", return_value=True) + + org = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=list(range(15, 20)), + plan_auto_activate=True, + ) + dbsession.add(org) + dbsession.flush() + + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + + user = OwnerFactory.create_from_test_request(request) + dbsession.add(org) + dbsession.add(user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, user.ownerid) + assert was_activated is False + + def test_pr_billing_enterprise_no_seats_for_auto_actiavation( + self, request, dbsession, mocker, mock_configuration, with_sql_functions + ): + mocker.patch("services.license.is_enterprise", return_value=True) + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + + user = OwnerFactory.create_from_test_request(request) + dbsession.add(user) + dbsession.flush() + + org = OwnerFactory.create( + service="github", + oauth_token=None, + plan_activated_users=[user.ownerid], + plan_auto_activate=True, + ) + dbsession.add(org) + dbsession.flush() + + encrypted_license = "AtFDCJPhzM0SEF6MdCay6SwaDEZjkIlxH64UAo+Qm2auVe7SsfwxvjgXviKYBK2t+mQSbQQIc9hluF4oI6r+8ZpVCYvOnHv/Qp7Ism747cGKHHGpePm/E3MDaFTGyRdTaGach9K0/3UdoGJh9Gcf1FhEiutHV2qmhWLKQFLdD9QJu31vFGChS63NH864XV3Hp62GEmhuV+/tyVNTVmh7UXShaNVEC8CU+714TUVYO0SWuysPDr6wv6mBskZE5Evb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + + # Make a new user, this would be the 11th activated user + second_user = OwnerFactory.create_from_test_request(request) + dbsession.add(second_user) + dbsession.flush() + + was_activated = activate_user(dbsession, org.ownerid, second_user.ownerid) + assert was_activated is False diff --git a/sample_app/tests/test_ai_pr_review.py b/sample_app/tests/test_ai_pr_review.py new file mode 100644 index 0000000..f307016 --- /dev/null +++ b/sample_app/tests/test_ai_pr_review.py @@ -0,0 +1,267 @@ +import json + +import pytest + +from database.tests.factories import OwnerFactory, RepositoryFactory +from services.ai_pr_review import Diff, LineInfo, perform_review +from shared.api_archive.archive import ArchiveService + +TEST_DIFF = """diff --git a/codecov_auth/signals.py b/codecov_auth/signals.py +index d728f92f..37f333fb 100644 +--- a/codecov_auth/signals.py ++++ b/codecov_auth/signals.py +@@ -1,10 +1,13 @@ ++import json + import logging + from datetime import datetime + ++from django.conf import settings + from django.db.models.signals import post_save + from django.dispatch import receiver ++from google.cloud import pubsub_v1 + +-from codecov_auth.models import Owner, OwnerProfile ++from codecov_auth.models import OrganizationLevelToken, Owner, OwnerProfile + + + @receiver(post_save, sender=Owner) +@@ -13,3 +16,34 @@ def create_owner_profile_when_owner_is_created( + ): + if created: + return OwnerProfile.objects.create(owner_id=instance.ownerid) ++ ++ ++_pubsub_publisher = None ++ ++ ++def _get_pubsub_publisher(): ++ global _pubsub_publisher ++ if not _pubsub_publisher: ++ _pubsub_publisher = pubsub_v1.PublisherClient() ++ return _pubsub_publisher ++ ++ ++@receiver( ++ post_save, sender=OrganizationLevelToken, dispatch_uid="shelter_sync_org_token" ++) ++def update_repository(sender, instance: OrganizationLevelToken, **kwargs): ++ pubsub_project_id = settings.SHELTER_PUBSUB_PROJECT_ID ++ topic_id = settings.SHELTER_PUBSUB_SYNC_REPO_TOPIC_ID ++ if pubsub_project_id and topic_id: ++ publisher = _get_pubsub_publisher() ++ topic_path = publisher.topic_path(pubsub_project_id, topic_id) ++ publisher.publish( ++ topic_path, ++ json.dumps( ++ { ++ "type": "org_token", ++ "sync": "one", ++ "id": instance.id, ++ } ++ ).encode("utf-8"), ++ ) +diff --git a/codecov_auth/tests/test_signals.py b/codecov_auth/tests/test_signals.py +new file mode 100644 +index 00000000..b2fb0642 +--- /dev/null ++++ b/codecov_auth/tests/test_signals.py +@@ -0,0 +1,26 @@ ++import os ++ ++import pytest ++from django.test import override_settings ++ ++from codecov_auth.tests.factories import OrganizationLevelTokenFactory ++ ++ ++@override_settings( ++ SHELTER_PUBSUB_PROJECT_ID="test-project-id", ++ SHELTER_PUBSUB_SYNC_REPO_TOPIC_ID="test-topic-id", ++) ++@pytest.mark.django_db ++def test_shelter_org_token_sync(mocker): ++ # this prevents the pubsub SDK from trying to load credentials ++ os.environ["PUBSUB_EMULATOR_HOST"] = "localhost" ++ ++ publish = mocker.patch("google.cloud.pubsub_v1.PublisherClient.publish") ++ ++ # this triggers the publish via Django signals ++ OrganizationLevelTokenFactory(id=91728376) ++ ++ publish.assert_called_once_with( ++ "projects/test-project-id/topics/test-topic-id", ++ b'{"type": "org_token", "sync": "one", "id": 91728376}', ++ ) +diff --git a/core/signals.py b/core/signals.py +index 77500d63..adffea32 100644 +--- a/core/signals.py ++++ b/core/signals.py +@@ -18,12 +18,19 @@ def _get_pubsub_publisher(): + + + @receiver(post_save, sender=Repository, dispatch_uid="shelter_sync_repo") +-def update_repository(sender, instance, **kwargs): ++def update_repository(sender, instance: Repository, **kwargs): + pubsub_project_id = settings.SHELTER_PUBSUB_PROJECT_ID + topic_id = settings.SHELTER_PUBSUB_SYNC_REPO_TOPIC_ID + if pubsub_project_id and topic_id: + publisher = _get_pubsub_publisher() + topic_path = publisher.topic_path(pubsub_project_id, topic_id) + publisher.publish( +- topic_path, json.dumps({"sync": instance.repoid}).encode("utf-8") ++ topic_path, ++ json.dumps( ++ { ++ "type": "repo", ++ "sync": "one", ++ "id": instance.repoid, ++ } ++ ).encode("utf-8"), + ) +diff --git a/core/tests/test_signals.py b/core/tests/test_signals.py +index b6eafc65..26a8c8e2 100644 +--- a/core/tests/test_signals.py ++++ b/core/tests/test_signals.py +@@ -21,5 +21,6 @@ def test_shelter_repo_sync(mocker): + RepositoryFactory(repoid=91728376) + + publish.assert_called_once_with( +- "projects/test-project-id/topics/test-topic-id", b'{"sync": 91728376}' ++ "projects/test-project-id/topics/test-topic-id", ++ b'{"type": "repo", "sync": "one", "id": 91728376}', + ) +""" + +config_params = { + "services": { + "openai": { + "api_key": "placeholder", # replace this temporarily if you need to regenerate the VCR cassettes + }, + "minio": { + "hash_key": "test-hash", + }, + }, +} + +torngit_token = { + "key": "placeholder", # replace this temporarily if you need to regenerate the VCR cassettes + "secret": None, + "username": "scott-codecov", +} + + +def test_review_index(): + diff = Diff(TEST_DIFF) + assert diff.line_info(29) == LineInfo( + file_path="codecov_auth/signals.py", position=23 + ) + assert diff.line_info(123) == LineInfo( + file_path="core/tests/test_signals.py", position=6 + ) + + +@pytest.mark.asyncio +async def test_perform_initial_review( + dbsession, codecov_vcr, mocker, mock_configuration, mock_storage +): + mock_configuration.set_params(config_params) + + bot_token = mocker.patch("shared.bots.repo_bots.get_repo_particular_bot_token") + bot_token.return_value = (torngit_token, None) + + owner = OwnerFactory.create(service="github", username="scott-codecov") + repository = RepositoryFactory.create(owner=owner, name="codecov-test") + dbsession.add(owner) + dbsession.add(repository) + dbsession.commit() + + archive = ArchiveService(repository) + + await perform_review(repository, 40) + + assert json.loads( + mock_storage.read_file( + "archive", f"ai_pr_review/{archive.storage_hash}/pull_40.json" + ) + ) == { + "commit_sha": "b607bb0e17e1b8d8699272a26e32986a933f9946", + "review_ids": [1740008775], + } + + +@pytest.mark.asyncio +async def test_perform_duplicate_review( + dbsession, codecov_vcr, mocker, mock_configuration, mock_storage +): + mock_configuration.set_params(config_params) + + bot_token = mocker.patch("shared.bots.repo_bots.get_repo_particular_bot_token") + bot_token.return_value = (torngit_token, None) + + owner = OwnerFactory(service="github", username="scott-codecov") + repository = RepositoryFactory(owner=owner, name="codecov-test") + dbsession.add(owner) + dbsession.add(repository) + dbsession.commit() + + archive = ArchiveService(repository) + + mock_storage.write_file( + "archive", + f"ai_pr_review/{archive.storage_hash}/pull_40.json", + json.dumps( + { + "commit_sha": "b607bb0e17e1b8d8699272a26e32986a933f9946", + "review_ids": [1740008775], + } + ), + ) + + perform = mocker.patch("services.ai_pr_review.Review.perform") + perform.return_value = None + + await perform_review(repository, 40) + + # noop - we already made a review for this sha + assert not perform.called + + +@pytest.mark.asyncio +async def test_perform_new_commit( + dbsession, codecov_vcr, mocker, mock_configuration, mock_storage +): + mock_configuration.set_params(config_params) + + bot_token = mocker.patch("shared.bots.repo_bots.get_repo_particular_bot_token") + bot_token.return_value = (torngit_token, None) + + owner = OwnerFactory(service="github", username="scott-codecov") + repository = RepositoryFactory(owner=owner, name="codecov-test") + dbsession.add(owner) + dbsession.add(repository) + dbsession.commit() + + archive = ArchiveService(repository) + + mock_storage.write_file( + "archive", + f"ai_pr_review/{archive.storage_hash}/pull_40.json", + json.dumps( + { + "commit_sha": "b607bb0e17e1b8d8699272a26e32986a933f9946", + "review_ids": [1740008775], + } + ), + ) + + await perform_review(repository, 40) + + assert json.loads( + mock_storage.read_file( + "archive", + f"ai_pr_review/{archive.storage_hash}/pull_40.json", + ) + ) == { + "commit_sha": "5c64a5143951193dde7b14c14611eebe1025f862", + "review_ids": [1740008775, 1740017976], + } diff --git a/sample_app/tests/test_billing.py b/sample_app/tests/test_billing.py new file mode 100644 index 0000000..bd3050a --- /dev/null +++ b/sample_app/tests/test_billing.py @@ -0,0 +1,74 @@ +import pytest +from django.test import override_settings + +from database.tests.factories import OwnerFactory +from shared.plan.constants import PlanName +from shared.plan.service import PlanService +from tests.helpers import mock_all_plans_and_tiers + + +class TestBillingServiceTestCase: + """ + BillingService is deprecated - use PlanService instead. + """ + + @pytest.fixture(autouse=True) + def setup(self): + mock_all_plans_and_tiers() + + @pytest.mark.django_db + def test_pr_author_plan_check(self, request, dbsession, with_sql_functions): + owner = OwnerFactory.create(service="github", plan="users-pr-inappm") + dbsession.add(owner) + dbsession.flush() + plan = PlanService(owner) + assert plan.is_pr_billing_plan + + @pytest.mark.django_db + @override_settings(IS_ENTERPRISE=True) + def test_pr_author_enterprise_plan_check( + self, request, dbsession, mock_configuration, with_sql_functions + ): + owner = OwnerFactory.create(service="github") + dbsession.add(owner) + dbsession.flush() + + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_dashboard_url"] = ( + "https://codecov.mysite.com" + ) + + plan = PlanService(owner) + + assert plan.is_pr_billing_plan + + @pytest.mark.django_db + def test_plan_not_pr_author(self, request, dbsession, with_sql_functions): + owner = OwnerFactory.create( + service="github", plan=PlanName.CODECOV_PRO_MONTHLY_LEGACY.value + ) + dbsession.add(owner) + dbsession.flush() + + plan = PlanService(owner) + + assert not plan.is_pr_billing_plan + + @pytest.mark.django_db + @override_settings(IS_ENTERPRISE=True) + def test_pr_author_enterprise_plan_check_non_pr_plan( + self, request, dbsession, mocker, mock_configuration, with_sql_functions + ): + owner = OwnerFactory.create(service="github") + dbsession.add(owner) + dbsession.flush() + + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_dashboard_url"] = ( + "https://codeov.mysite.com" + ) + plan = PlanService(owner) + + assert not plan.is_pr_billing_plan diff --git a/sample_app/tests/test_commit_status.py b/sample_app/tests/test_commit_status.py new file mode 100644 index 0000000..2a6512c --- /dev/null +++ b/sample_app/tests/test_commit_status.py @@ -0,0 +1,67 @@ +from services.commit_status import RepositoryCIFilter, _ci_providers + + +class TestCommitStatus: + def test_ci_providers_no_config(self, mock_configuration): + assert _ci_providers() == [] + + def test_ci_providers_config_list(self, mock_configuration): + mock_configuration.params["services"]["ci_providers"] = [ + "ser_1", + "la_3", + "something_4", + ] + assert _ci_providers() == ["ser_1", "la_3", "something_4"] + + def test_ci_providers_config_string(self, mock_configuration): + mock_configuration.params["services"]["ci_providers"] = ( + "ser_1, la_3, something_4" + ) + assert sorted(_ci_providers()) == sorted(["ser_1", "la_3", "something_4"]) + + +class TestRepositoryCIFilter: + def test_filter(self): + service = RepositoryCIFilter( + {"codecov": {"ci": ["simple", "!excluded", "another", "!reject"]}} + ) + assert service._filter({"url": "https://www.example.com", "context": "simple"}) + assert service._filter({"url": "https://www.another.simple", "context": "ok"}) + assert service._filter( + {"url": "http://www.another.simple", "context": "reject"} + ) + assert not service._filter( + {"url": "http://www.excluded.simple", "context": "reject"} + ) + assert not service._filter( + {"url": "http://www.another.reject", "context": "simple"} + ) + assert not service._filter( + {"url": "http://www.example.com", "context": "nothing"} + ) + assert not service._filter( + {"url": "http://www.example.com", "context": "excluded"} + ) + assert not service._filter( + {"url": "http://reject.example.com", "context": "ok"} + ) + assert not service._filter({"url": "http://www.reject.com", "context": "ok"}) + assert not service._filter( + {"url": "http://www.reject.com", "context": "simple"} + ) + assert not service._filter( + {"url": "http://www.ok.com", "context": "simple/reject"} + ) + assert service._filter({"url": "http://www.ok.com", "context": "jenkins build"}) + + def test_filter_jenkins_excluded(self): + service = RepositoryCIFilter( + { + "codecov": { + "ci": ["simple", "!excluded", "!jenkins", "another", "!reject"] + } + } + ) + assert not service._filter( + {"url": "http://www.ok.com", "context": "jenkins build"} + ) diff --git a/sample_app/tests/test_decoration.py b/sample_app/tests/test_decoration.py new file mode 100644 index 0000000..f0a2a10 --- /dev/null +++ b/sample_app/tests/test_decoration.py @@ -0,0 +1,995 @@ +from datetime import datetime, timedelta + +import pytest + +from database.enums import TrialStatus +from database.tests.factories import ( + CommitFactory, + OwnerFactory, + PullFactory, + ReportFactory, + RepositoryFactory, + UploadFactory, +) +from services.decoration import ( + BOT_USER_EMAILS, + Decoration, + _is_bot_account, + determine_decoration_details, + determine_uploads_used, +) +from services.repository import EnrichedPull +from shared.django_apps.codecov_auth.tests.factories import ( + OwnerFactory as DjangoOwnerFactory, +) +from shared.django_apps.core.tests.factories import CommitFactory as DjangoCommitFactory +from shared.django_apps.core.tests.factories import ( + RepositoryFactory as DjangoRepositoryFactory, +) +from shared.django_apps.reports.models import ReportSession, ReportType +from shared.django_apps.reports.tests.factories import CommitReportFactory +from shared.django_apps.reports.tests.factories import ( + UploadFactory as DjangoUploadFactory, +) +from shared.plan.constants import DEFAULT_FREE_PLAN +from shared.plan.service import PlanService +from shared.upload.utils import UploaderType, insert_coverage_measurement +from shared.utils.test_utils import mock_config_helper +from tests.helpers import mock_all_plans_and_tiers + + +@pytest.fixture +def enriched_pull(dbsession, request): + repository = RepositoryFactory.create( + owner__username="codecov", + owner__service="github", + owner__unencrypted_oauth_token="testtlxuu2kfef3km1fbecdlmnb2nvpikvmoadi3", + owner__plan="users-pr-inappm", + name="example-python", + image_token="abcdefghij", + private=True, + ) + dbsession.add(repository) + dbsession.flush() + base_commit = CommitFactory.create( + repository=repository, + author__username=f"base{request.node.name[-20:]}", + author__service="github", + ) + head_commit = CommitFactory.create( + repository=repository, + author__username=f"head{request.node.name[-20:]}", + author__service="github", + ) + pull = PullFactory.create( + author__service="github", + repository=repository, + base=base_commit.commitid, + head=head_commit.commitid, + state="merged", + ) + dbsession.add(base_commit) + dbsession.add(head_commit) + dbsession.add(pull) + dbsession.flush() + provider_pull = { + "author": {"id": "7123", "username": "tomcat"}, + "base": { + "branch": "master", + "commitid": "b92edba44fdd29fcc506317cc3ddeae1a723dd08", + }, + "head": { + "branch": "reason/some-testing", + "commitid": "a06aef4356ca35b34c5486269585288489e578db", + }, + "number": "1", + "id": "1", + "state": "open", + "title": "Creating new code for reasons no one knows", + } + return EnrichedPull(database_pull=pull, provider_pull=provider_pull) + + +@pytest.fixture +def gitlab_root_group(dbsession): + root_group = OwnerFactory.create( + username="root_group", + service="gitlab", + unencrypted_oauth_token="testtlxuu2kfef3km1fbecdlmnb2nvpikvmoadi3", + plan="users-pr-inappm", + plan_activated_users=[], + plan_auto_activate=False, + plan_user_count=3, + ) + dbsession.add(root_group) + dbsession.flush() + return root_group + + +@pytest.fixture +def gitlab_middle_group(dbsession, gitlab_root_group): + mid_group = OwnerFactory.create( + username="mid_group", + service="gitlab", + unencrypted_oauth_token="testtlxuu2kfef3km1fbecdlmnb2nvpikvmoadi4", + plan="users-pr-inappy", + plan_activated_users=[], + parent_service_id=gitlab_root_group.service_id, + plan_auto_activate=True, + ) + dbsession.add(mid_group) + dbsession.flush() + return mid_group + + +@pytest.fixture +def gitlab_enriched_pull_subgroup(dbsession, gitlab_middle_group): + subgroup = OwnerFactory.create( + username="subgroup", + service="gitlab", + unencrypted_oauth_token="testtlxuu2kfef3km1fbecdlmnb2nvpikvmoadi3", + plan=None, + parent_service_id=gitlab_middle_group.service_id, + plan_activated_users=[], + plan_auto_activate=True, + ) + dbsession.add(subgroup) + dbsession.flush() + + repository = RepositoryFactory.create( + owner=subgroup, name="example-python", image_token="abcdefghij", private=True + ) + dbsession.add(repository) + dbsession.flush() + base_commit = CommitFactory.create(repository=repository) + head_commit = CommitFactory.create(repository=repository) + pull = PullFactory.create( + repository=repository, + base=base_commit.commitid, + head=head_commit.commitid, + state="merged", + ) + dbsession.add(base_commit) + dbsession.add(head_commit) + dbsession.add(pull) + dbsession.flush() + provider_pull = { + "author": {"id": "7123", "username": "tomcat"}, + "base": { + "branch": "master", + "commitid": "b92edba44fdd29fcc506317cc3ddeae1a723dd08", + }, + "head": { + "branch": "reason/some-testing", + "commitid": "a06aef4356ca35b34c5486269585288489e578db", + }, + "number": "1", + "id": "1", + "state": "open", + "title": "Creating new code for reasons no one knows", + } + return EnrichedPull(database_pull=pull, provider_pull=provider_pull) + + +class TestDecorationServiceTestCase: + @pytest.fixture(autouse=True) + def setup(self): + mock_all_plans_and_tiers() + + @pytest.mark.django_db + def test_decoration_type_basic_plan_upload_limit( + self, enriched_pull, dbsession, mocker + ): + mocker.patch("services.license.is_enterprise", return_value=False) + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + enriched_pull.database_pull.repository.owner.plan = DEFAULT_FREE_PLAN + enriched_pull.database_pull.repository.private = True + + commit = CommitFactory.create( + repository=enriched_pull.database_pull.repository, + author__service="github", + timestamp=datetime.now(), + ) + + report = ReportFactory.create( + commit=commit, report_type=ReportType.COVERAGE.value + ) + for i in range(249): + upload = UploadFactory.create(report=report, storage_path="url") + dbsession.add(upload) + insert_coverage_measurement( + owner_id=enriched_pull.database_pull.repository.owner.ownerid, + repo_id=enriched_pull.database_pull.repository.repoid, + commit_id=commit.id, + upload_id=upload.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=enriched_pull.database_pull.repository.private, + report_type=report.report_type, + ) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + assert decoration_details.decoration_type != Decoration.upload_limit + assert decoration_details.reason != "Org has exceeded the upload limit" + + upload = UploadFactory.create(report=report, storage_path="url") + dbsession.add(upload) + dbsession.flush() + + insert_coverage_measurement( + owner_id=enriched_pull.database_pull.repository.owner.ownerid, + repo_id=enriched_pull.database_pull.repository.repoid, + commit_id=commit.id, + upload_id=upload.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=enriched_pull.database_pull.repository.private, + report_type=report.report_type, + ) + + decoration_details = determine_decoration_details(enriched_pull) + assert decoration_details.decoration_type == Decoration.upload_limit + assert decoration_details.reason == "Org has exceeded the upload limit" + + @pytest.mark.django_db + def test_decoration_type_team_plan_upload_limit( + self, enriched_pull, dbsession, mocker + ): + mocker.patch("services.license.is_enterprise", return_value=False) + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + enriched_pull.database_pull.repository.owner.plan = "users-teamm" + enriched_pull.database_pull.repository.private = True + + commit = CommitFactory.create( + repository=enriched_pull.database_pull.repository, + author__service="github", + timestamp=datetime.now(), + ) + + report = ReportFactory.create( + commit=commit, report_type=ReportType.COVERAGE.value + ) + for i in range(2499): + upload = UploadFactory.create(report=report, storage_path="url") + dbsession.add(upload) + insert_coverage_measurement( + owner_id=enriched_pull.database_pull.repository.owner.ownerid, + repo_id=enriched_pull.database_pull.repository.repoid, + commit_id=commit.id, + upload_id=upload.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=enriched_pull.database_pull.repository.private, + report_type=report.report_type, + ) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + assert decoration_details.decoration_type != Decoration.upload_limit + assert decoration_details.reason != "Org has exceeded the upload limit" + + upload = UploadFactory.create(report=report, storage_path="url") + dbsession.add(upload) + dbsession.flush() + insert_coverage_measurement( + owner_id=enriched_pull.database_pull.repository.owner.ownerid, + repo_id=enriched_pull.database_pull.repository.repoid, + commit_id=commit.id, + upload_id=upload.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=enriched_pull.database_pull.repository.private, + report_type=report.report_type, + ) + + decoration_details = determine_decoration_details(enriched_pull) + assert decoration_details.decoration_type == Decoration.upload_limit + assert decoration_details.reason == "Org has exceeded the upload limit" + + @pytest.mark.django_db + def test_decoration_type_unlimited_upload_on_enterprise( + self, enriched_pull, dbsession, mocker, mock_configuration + ): + mocker.patch("services.license.is_enterprise", return_value=True) + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_dashboard_url"] = ( + "https://codecov.mysite.com" + ) + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + enriched_pull.database_pull.repository.owner.plan = DEFAULT_FREE_PLAN + enriched_pull.database_pull.repository.private = True + + commit = CommitFactory.create( + repository=enriched_pull.database_pull.repository, + author__service="github", + timestamp=datetime.now(), + ) + + report = ReportFactory.create(commit=commit) + for i in range(250): + upload = UploadFactory.create(report=report, storage_path="url") + dbsession.add(upload) + insert_coverage_measurement( + owner_id=enriched_pull.database_pull.repository.owner.ownerid, + repo_id=enriched_pull.database_pull.repository.repoid, + commit_id=commit.id, + upload_id=upload.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=enriched_pull.database_pull.repository.private, + report_type=report.report_type, + ) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + # self-hosted should not be limited with their uploads + assert decoration_details.decoration_type != Decoration.upload_limit + assert decoration_details.reason != "Org has exceeded the upload limit" + + @pytest.mark.django_db + def test_uploads_used_with_expired_trial(self, mocker): + owner = DjangoOwnerFactory( + service="github", + trial_status=TrialStatus.EXPIRED.value, + trial_start_date=datetime.now() + timedelta(days=-10), + trial_end_date=datetime.now() + timedelta(days=-2), + plan=DEFAULT_FREE_PLAN, + ) + repository = DjangoRepositoryFactory( + author=owner, + private=True, + ) + commit = DjangoCommitFactory( + repository=repository, + author__service="github", + timestamp=datetime.now(), + ) + report = CommitReportFactory( + commit=commit, report_type=ReportType.COVERAGE.value + ) + + report_before_trial = DjangoUploadFactory(report=report, storage_path="url") + report_before_trial.created_at += timedelta(days=-12) + report_before_trial.save() + upload_before_trial = insert_coverage_measurement( + owner_id=owner.ownerid, + repo_id=repository.repoid, + commit_id=commit.id, + upload_id=report_before_trial.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=repository.private, + report_type=report.report_type, + ) + upload_before_trial.created_at += timedelta(days=-12) + upload_before_trial.save() + + report_during_trial = DjangoUploadFactory(report=report, storage_path="url") + report_during_trial.created_at += timedelta(days=-5) + report_during_trial.save() + upload_during_trial = insert_coverage_measurement( + owner_id=owner.ownerid, + repo_id=repository.repoid, + commit_id=commit.id, + upload_id=report_during_trial.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=repository.private, + report_type=report.report_type, + ) + upload_during_trial.created_at += timedelta(days=-5) + upload_during_trial.save() + + report_after_trial = DjangoUploadFactory(report=report, storage_path="url") + insert_coverage_measurement( + owner_id=owner.ownerid, + repo_id=repository.repoid, + commit_id=commit.id, + upload_id=report_after_trial.id, + uploader_used=UploaderType.LEGACY.value, + private_repo=repository.private, + report_type=report.report_type, + ) + + uploads_present = ReportSession.objects.all() + assert len(uploads_present) == 3 + + mock_config_helper(mocker, configs={"setup.upload_throttling_enabled": True}) + plan_service = PlanService(current_org=owner) + uploads_used = determine_uploads_used(plan_service=plan_service) + + assert uploads_used == 2 + + @pytest.mark.django_db + def test_get_decoration_type_no_pull(self, mocker): + decoration_details = determine_decoration_details(None) + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "No pull" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_no_provider_pull(self, mocker, enriched_pull): + enriched_pull.provider_pull = None + + decoration_details = determine_decoration_details(enriched_pull) + + assert decoration_details.decoration_type == Decoration.standard + assert ( + decoration_details.reason + == "Can't determine PR author - no pull info from provider" + ) + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_public_repo(self, dbsession, mocker, enriched_pull): + enriched_pull.database_pull.repository.private = False + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "Public repo" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_not_pr_plan(self, dbsession, mocker, enriched_pull): + enriched_pull.database_pull.repository.owner.plan = "users-inappm" + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "Org not on PR plan" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + # what is a users plan? + def test_get_decoration_type_for_users_plan(self, dbsession): + repository = RepositoryFactory.create( + owner__username="drazisil-org", + owner__service="github", + owner__unencrypted_oauth_token="testtfasdfasdflxuu2kfer2ef23", + owner__plan=DEFAULT_FREE_PLAN, + private=True, + ) + dbsession.add(repository) + dbsession.flush() + base_commit = CommitFactory.create( + repository=repository, + author__service="github", + ) + head_commit = CommitFactory.create( + repository=repository, + author__service="github", + ) + pull = PullFactory.create( + author__service="github", + repository=repository, + base=base_commit.commitid, + head=head_commit.commitid, + state="merged", + ) + dbsession.add(base_commit) + dbsession.add(head_commit) + dbsession.add(pull) + dbsession.flush() + provider_pull = { + "author": {"id": "7123", "username": "tomcat"}, + "base": { + "branch": "master", + "commitid": "b92edba44fdd29fcc506317cc3ddeae1a723dd08", + }, + "head": { + "branch": "reason/some-testing", + "commitid": "a06aef4356ca35b34c5486269585288489e578db", + }, + "number": "1", + "id": "1", + "state": "open", + "title": "Creating new code for reasons no one knows", + } + enriched_pull_whitelisted = EnrichedPull( + database_pull=pull, provider_pull=provider_pull + ) + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull_whitelisted.provider_pull["author"]["username"], + service_id=enriched_pull_whitelisted.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull_whitelisted) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be manually activated" + assert decoration_details.should_attempt_author_auto_activation is False + assert ( + pr_author.ownerid + not in enriched_pull_whitelisted.database_pull.repository.owner.plan_activated_users + ) + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_not_in_db(self, mocker, enriched_pull): + enriched_pull.provider_pull["author"]["id"] = "190" + + decoration_details = determine_decoration_details(enriched_pull) + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "PR author not found in database" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_manual_activation_required( + self, dbsession, mocker, enriched_pull, with_sql_functions + ): + enriched_pull.database_pull.repository.owner.plan_user_count = 3 + enriched_pull.database_pull.repository.owner.plan_activated_users = [] + enriched_pull.database_pull.repository.owner.plan_auto_activate = False + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be manually activated" + assert decoration_details.should_attempt_author_auto_activation is False + assert ( + pr_author.ownerid + not in enriched_pull.database_pull.repository.owner.plan_activated_users + ) + + @pytest.mark.django_db + @pytest.mark.parametrize( + "is_bot,param,value", + [ + (True, "email", "dependabot[bot]@users.noreply.github.com"), + (True, "email", "29139614+renovate[bot]@users.noreply.github.com"), + (True, "email", "157164994+sentry-autofix[bot]@users.noreply.github.com"), + (True, "service_id", "29139614"), + (True, "service_id", "157164994"), + (False, None, None), + ], + ) + def test_is_bot_account(self, is_bot, param, value): + pr_author = OwnerFactory.create( + service="github", + ) + if is_bot and param == "email": + pr_author.email = value + elif is_bot and param == "service_id": + pr_author.service_id = value + assert _is_bot_account(pr_author) == is_bot + + @pytest.mark.django_db + def test_get_decoration_type_bot(self, dbsession, mocker, enriched_pull): + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + email=BOT_USER_EMAILS[0], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.standard + assert ( + decoration_details.reason + == "Bot user detected (does not need to be activated)" + ) + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_already_active( + self, dbsession, mocker, enriched_pull + ): + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + enriched_pull.database_pull.repository.owner.plan_user_count = 3 + enriched_pull.database_pull.repository.owner.plan_activated_users = [ + pr_author.ownerid + ] + enriched_pull.database_pull.repository.owner.plan_auto_activate = False + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "User is currently activated" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_should_attempt_pr_author_auto_activation( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.owner.plan_user_count = 3 + enriched_pull.database_pull.repository.owner.plan_activated_users = [] + enriched_pull.database_pull.repository.owner.plan_auto_activate = True + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert ( + decoration_details.activation_org_ownerid + == enriched_pull.database_pull.repository.owner.ownerid + ) + assert decoration_details.activation_author_ownerid == pr_author.ownerid + # activation hasnt happened yet + assert ( + pr_author.ownerid + not in enriched_pull.database_pull.repository.owner.plan_activated_users + ) + + @pytest.mark.django_db + def test_get_decoration_type_should_attempt_pr_author_auto_activation_users_developer( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.owner.plan = DEFAULT_FREE_PLAN + enriched_pull.database_pull.repository.owner.plan_user_count = 1 + enriched_pull.database_pull.repository.owner.plan_activated_users = [] + enriched_pull.database_pull.repository.owner.plan_auto_activate = True + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert ( + decoration_details.activation_org_ownerid + == enriched_pull.database_pull.repository.owner.ownerid + ) + assert decoration_details.activation_author_ownerid == pr_author.ownerid + # activation hasnt happened yet + assert ( + pr_author.ownerid + not in enriched_pull.database_pull.repository.owner.plan_activated_users + ) + + @pytest.mark.django_db + def test_get_decoration_type_passing_empty_upload( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.private = False + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull, "pass") + + assert decoration_details.decoration_type == Decoration.passing_empty_upload + assert decoration_details.reason == "Non testable files got changed." + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_failing_empty_upload( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.private = False + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull, "fail") + + assert decoration_details.decoration_type == Decoration.failing_empty_upload + assert decoration_details.reason == "Testable files got changed." + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_processing_upload( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.private = False + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull, "processing") + + assert decoration_details.decoration_type == Decoration.processing_upload + assert decoration_details.reason == "Upload is still processing." + assert decoration_details.should_attempt_author_auto_activation is False + + +class TestDecorationServiceGitLabTestCase: + @pytest.fixture(autouse=True) + def setup(self): + mock_all_plans_and_tiers() + + @pytest.mark.django_db + def test_get_decoration_type_not_pr_plan_gitlab_subgroup( + self, + dbsession, + mocker, + gitlab_root_group, + gitlab_enriched_pull_subgroup, + with_sql_functions, + ): + gitlab_root_group.plan = "users-inappm" + dbsession.flush() + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "Org not on PR plan" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_not_in_db_gitlab_subgroup( + self, + mocker, + gitlab_root_group, + gitlab_enriched_pull_subgroup, + with_sql_functions, + ): + gitlab_enriched_pull_subgroup.provider_pull["author"]["id"] = "190" + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "PR author not found in database" + assert decoration_details.should_attempt_author_auto_activation is False + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_manual_activation_required_gitlab_subgroup( + self, + dbsession, + mocker, + gitlab_root_group, + gitlab_enriched_pull_subgroup, + with_sql_functions, + ): + gitlab_root_group.plan_auto_activate = False + # setting on child group should not matter, uses setting from root + child_group = gitlab_enriched_pull_subgroup.database_pull.repository.owner + child_group.plan_auto_activate = True + + pr_author = OwnerFactory.create( + username=gitlab_enriched_pull_subgroup.provider_pull["author"]["username"], + service="gitlab", + service_id=gitlab_enriched_pull_subgroup.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be manually activated" + assert decoration_details.should_attempt_author_auto_activation is False + assert decoration_details.activation_org_ownerid is None + assert decoration_details.activation_author_ownerid is None + + # allow auto-activate on root + gitlab_root_group.plan_auto_activate = True + # setting on child group should not matter, uses setting from root + child_group.plan_auto_activate = False + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert decoration_details.activation_org_ownerid == gitlab_root_group.ownerid + assert decoration_details.activation_author_ownerid == pr_author.ownerid + # activation hasn't happened yet + assert pr_author.ownerid not in gitlab_root_group.plan_activated_users + + @pytest.mark.django_db + def test_get_decoration_type_pr_author_already_active_subgroup( + self, + dbsession, + mocker, + gitlab_root_group, + gitlab_enriched_pull_subgroup, + with_sql_functions, + ): + pr_author = OwnerFactory.create( + username=gitlab_enriched_pull_subgroup.provider_pull["author"]["username"], + service="gitlab", + service_id=gitlab_enriched_pull_subgroup.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + gitlab_root_group.plan_activated_users = [pr_author.ownerid] + gitlab_root_group.plan_auto_activate = False + dbsession.flush() + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.standard + assert decoration_details.reason == "User is currently activated" + assert decoration_details.should_attempt_author_auto_activation is False + assert decoration_details.activation_org_ownerid is None + assert decoration_details.activation_author_ownerid is None + + @pytest.mark.django_db + def test_get_decoration_type_should_attempt_pr_author_auto_activation( + self, + dbsession, + mocker, + gitlab_root_group, + gitlab_enriched_pull_subgroup, + with_sql_functions, + ): + pr_author = OwnerFactory.create( + username=gitlab_enriched_pull_subgroup.provider_pull["author"]["username"], + service="gitlab", + service_id=gitlab_enriched_pull_subgroup.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + gitlab_root_group.plan_user_count = 3 + gitlab_root_group.plan_activated_users = [] + gitlab_root_group.plan_auto_activate = True + dbsession.flush() + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert decoration_details.activation_org_ownerid == gitlab_root_group.ownerid + assert decoration_details.activation_author_ownerid == pr_author.ownerid + # activation hasn't happened yet + assert pr_author.ownerid not in gitlab_root_group.plan_activated_users + + @pytest.mark.django_db + def test_get_decoration_type_owner_activated_users_null( + self, dbsession, mocker, enriched_pull + ): + enriched_pull.database_pull.repository.owner.plan_user_count = 3 + enriched_pull.database_pull.repository.owner.plan_activated_users = None + enriched_pull.database_pull.repository.owner.plan_auto_activate = True + + pr_author = OwnerFactory.create( + service="github", + username=enriched_pull.provider_pull["author"]["username"], + service_id=enriched_pull.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + decoration_details = determine_decoration_details(enriched_pull) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert ( + decoration_details.activation_org_ownerid + == enriched_pull.database_pull.repository.owner.ownerid + ) + assert decoration_details.activation_author_ownerid == pr_author.ownerid + assert enriched_pull.database_pull.repository.owner.plan_activated_users is None + + @pytest.mark.django_db + def test_uploads_used_with_expired_trial(self, mocker, dbsession): + owner = DjangoOwnerFactory( + service="gitlab", + trial_status=TrialStatus.EXPIRED.value, + trial_start_date=datetime.now() + timedelta(days=-10), + trial_end_date=datetime.now() + timedelta(days=-2), + plan=DEFAULT_FREE_PLAN, + ) + repository = DjangoRepositoryFactory( + author=owner, + private=True, + ) + commit = DjangoCommitFactory( + repository=repository, + author__service="gitlab", + timestamp=datetime.now(), + ) + report = CommitReportFactory( + commit=commit, report_type=ReportType.COVERAGE.value + ) + DjangoUploadFactory(report=report, storage_path="url") + DjangoUploadFactory(report=report, storage_path="url") + + uploads_present = ReportSession.objects.all() + assert len(uploads_present) == 2 + + mock_config_helper(mocker, configs={"setup.upload_throttling_enabled": False}) + plan_service = PlanService(current_org=owner) + uploads_used = determine_uploads_used(plan_service=plan_service) + + assert uploads_used == 0 + + @pytest.mark.django_db + def test_author_is_activated_on_subgroup_not_root( + self, dbsession, gitlab_root_group, gitlab_enriched_pull_subgroup + ): + pr_author = OwnerFactory.create( + username=gitlab_enriched_pull_subgroup.provider_pull["author"]["username"], + service="gitlab", + service_id=gitlab_enriched_pull_subgroup.provider_pull["author"]["id"], + ) + dbsession.add(pr_author) + dbsession.flush() + + # user is activated on subgroup but not root group and root group does not auto activate + gitlab_root_group.plan_auto_activate = False + child_group = gitlab_enriched_pull_subgroup.database_pull.repository.owner + child_group.plan_auto_activate = False + child_group.plan_activated_users = [pr_author.ownerid] + dbsession.flush() + + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be manually activated" + assert decoration_details.should_attempt_author_auto_activation is False + assert decoration_details.activation_org_ownerid is None + assert decoration_details.activation_author_ownerid is None + + assert pr_author.ownerid not in gitlab_root_group.plan_activated_users + assert ( + pr_author.ownerid + in gitlab_enriched_pull_subgroup.database_pull.repository.owner.plan_activated_users + ) + + # allow auto-activate on root for user to get non-blocking decoration + gitlab_root_group.plan_auto_activate = True + decoration_details = determine_decoration_details(gitlab_enriched_pull_subgroup) + dbsession.commit() + + assert decoration_details.decoration_type == Decoration.upgrade + assert decoration_details.reason == "User must be activated" + assert decoration_details.should_attempt_author_auto_activation is True + assert decoration_details.activation_org_ownerid == gitlab_root_group.ownerid + assert decoration_details.activation_author_ownerid == pr_author.ownerid diff --git a/sample_app/tests/test_failure_normalizer.py b/sample_app/tests/test_failure_normalizer.py new file mode 100644 index 0000000..79b8453 --- /dev/null +++ b/sample_app/tests/test_failure_normalizer.py @@ -0,0 +1,127 @@ +import pytest + +from services.failure_normalizer import FailureNormalizer + +test_string = "abcdefAB-1234-1234-1234-abcdefabcdef test_string 2024-03-10 test 0x44358378 20240312T155215Z 2024-03-12T15:52:15Z 15:52:15Z 2024-03-12T08:52:15-07:00 https://api.codecov.io/commits/list :1:2 :3: :: 0xabcdef1234" + + +def test_failure_normalizer(): + user_dict = {"TEST": [r"test_string"]} + f = FailureNormalizer(user_dict) + s = f.normalize_failure_message(test_string) + + assert ( + s + == "UUID TEST DATE test HEXNUMBER DATETIME DATETIME TIME DATETIME URL LINENO LINENO :: HEXNUMBER" + ) + + +def test_failure_normalizer_ignore_predefined(): + user_dict = {"TEST": [r"test_string"]} + f = FailureNormalizer(user_dict, True) + s = f.normalize_failure_message(test_string) + + assert ( + s + == "abcdefAB-1234-1234-1234-abcdefabcdef TEST 2024-03-10 test 0x44358378 20240312T155215Z 2024-03-12T15:52:15Z 15:52:15Z 2024-03-12T08:52:15-07:00 https://api.codecov.io/commits/list :1:2 :3: :: 0xabcdef1234" + ) + + +def test_failure_normalizer_append_predefined(): + user_dict = {"UUID": ["test"]} + f = FailureNormalizer(user_dict) + s = f.normalize_failure_message(test_string) + + assert ( + s + == "UUID UUID_string DATE UUID HEXNUMBER DATETIME DATETIME TIME DATETIME URL LINENO LINENO :: HEXNUMBER" + ) + + +def test_failure_normalizer_overwrite_predefined(): + user_dict = {"UUID": ["test"]} + f = FailureNormalizer(user_dict, override_predefined=True) + s = f.normalize_failure_message(test_string) + + assert ( + s + == "HASH UUID_string DATE UUID HEXNUMBER DATETIME DATETIME TIME DATETIME URL LINENO LINENO :: HEXNUMBER" + ) + + +def test_failure_normalizer_filepath(): + thing_string = "hello/my/name/is/hello/world.js" + user_dict = {"UUID": ["test"]} + f = FailureNormalizer(user_dict, override_predefined=True) + s = f.normalize_failure_message(thing_string) + + assert s == "FILEPATH/is/hello/world.js" + + +@pytest.mark.parametrize( + "input,expected", + [ + ( + """def test_subtract(): +> assert Calculator.subtract(1, 2) == 1.0 +E assert -1 == 1.0 +E + where -1 = <function Calculator.subtract at 0x7f43b21a3130>(1, 2) +E + where <function Calculator.subtract at 0x7f43b21a3130> = Calculator.subtract + +app/test_calculator.py:12: AssertionError" +""", + """def test_subtract(): +> assert Calculator.subtract(NO, NO) == NO +E assert NO == NO +E + where NO = <function Calculator.subtract at HEXNUMBER>(NO, NO) +E + where <function Calculator.subtract at HEXNUMBER> = Calculator.subtract + +app/test_calculator.pyLINENO AssertionError" +""", + ), + ( + """mocker = <pytest_mock.plugin.MockFixture object at 0x6ddc0ae62550> +mock_configuration = <shared.config.ConfigHelper object at 0x54dc9bb7c210> +chain = mocker.patch("tasks.upload.chain") +storage_path = ( + "v1/repos/testing/ed1bdd67-8fd2-4cdb-ac9e-39b99e4a3892/bundle_report.sqlite" +) +message="", +commitid="abf6d4df662c47e32460020ab14abf9303581429", +s = b'\\x592f6b514678496f4333336a54314f71774c744f7934524d4479517778715270446678487459344769777458454a584d632b61633349432f35636c52635659473330782f7a496b7a5053542b426333454d614c5635673d3d' +altchars = None, validate = False +""", + """mocker = <pytest_mock.plugin.MockFixture object at HEXNUMBER> +mock_configuration = <shared.config.ConfigHelper object at HEXNUMBER> +chain = mocker.patch("tasks.upload.chain") +storage_path = ( + "FILEPATH/testing/UUID/bundle_report.sqlite" +) +message="", +commitid="HASH", +s = b'\\xHASH' +altchars = None, validate = False +""", + ), + ], +) +def test_from_random_cases(input, expected): + test_message = input + order_to_process = [ + "UUID", + "DATETIME", + "DATE", + "TIME", + "URL", + "FILEPATH", + "LINENO", + "HASH", + "HEXNUMBER", + "NO", + ] + + normalizer_class = FailureNormalizer( + {}, override_predefined=True, key_analysis_order=order_to_process + ) + s = normalizer_class.normalize_failure_message(test_message) + assert s == expected diff --git a/sample_app/tests/test_github.py b/sample_app/tests/test_github.py new file mode 100644 index 0000000..b8333f2 --- /dev/null +++ b/sample_app/tests/test_github.py @@ -0,0 +1,103 @@ +from unittest.mock import MagicMock + +import pytest +from redis import RedisError + +from database.models.core import Owner +from database.tests.factories.core import ( + CommitFactory, + GithubAppInstallationFactory, + RepositoryFactory, +) +from services.github import get_github_app_for_commit, set_github_app_for_commit + + +class TestGetSetGithubAppsToCommits: + def _get_commit(self, dbsession): + commit = CommitFactory(repository__owner__service="github") + dbsession.add(commit) + dbsession.flush() + return commit + + def _get_app(self, owner: Owner, dbsession): + app = GithubAppInstallationFactory( + owner=owner, installation_id=1250, app_id=250, pem_path="some_path" + ) + dbsession.add(app) + dbsession.flush() + return app + + @pytest.fixture + def mock_redis(self, mocker): + fake_redis = MagicMock(name="fake_redis") + mock_conn = mocker.patch("services.github.get_redis_connection") + mock_conn.return_value = fake_redis + return fake_redis + + def test_set_app_for_commit_no_app(self, mock_redis, dbsession): + commit = self._get_commit(dbsession) + assert set_github_app_for_commit(None, commit) == False + mock_redis.set.assert_not_called() + + def test_set_app_for_commit_redis_success(self, mock_redis, dbsession): + commit = self._get_commit(dbsession) + app = self._get_app(commit.repository.owner, dbsession) + assert set_github_app_for_commit(app.id, commit) == True + mock_redis.set.assert_called_with( + f"app_to_use_for_commit_{commit.id}", str(app.id), ex=(60 * 60 * 2) + ) + + def test_set_app_for_commit_redis_error(self, mock_redis, dbsession): + commit = self._get_commit(dbsession) + mock_redis.set.side_effect = RedisError + assert set_github_app_for_commit("1000", commit) == False + mock_redis.set.assert_called_with( + f"app_to_use_for_commit_{commit.id}", "1000", ex=(60 * 60 * 2) + ) + + def test_get_app_for_commit(self, mock_redis, dbsession): + repo_github = RepositoryFactory(owner__service="github") + repo_ghe = RepositoryFactory(owner__service="github_enterprise") + repo_gitlab = RepositoryFactory(owner__service="gitlab") + redis_keys = { + "app_to_use_for_commit_12": b"1200", + "app_to_use_for_commit_10": b"1000", + } + fake_commit_12 = MagicMock( + name="fake_commit", **{"id": 12, "repository": repo_github} + ) + fake_commit_10 = MagicMock( + name="fake_commit", + **{"id": 10, "repository": repo_ghe}, + ) + fake_commit_50 = MagicMock( + name="fake_commit", **{"id": 50, "repository": repo_github} + ) + fake_commit_gitlab = MagicMock( + name="fake_commit", **{"id": 12, "repository": repo_gitlab} + ) + mock_redis.get.side_effect = lambda key: redis_keys.get(key) + assert get_github_app_for_commit(fake_commit_12) == "1200" + assert get_github_app_for_commit(fake_commit_10) == "1000" + assert get_github_app_for_commit(fake_commit_50) is None + # This feature is Github-exclusive, so we skip checking for commits that are in repos of other providers + assert get_github_app_for_commit(fake_commit_gitlab) is None + + def test_get_app_for_commit_error(self, mock_redis): + repo_github = RepositoryFactory(owner__service="github") + mock_redis.get.side_effect = RedisError + fake_commit_12 = MagicMock( + name="fake_commit", **{"id": 12, "repository": repo_github} + ) + assert get_github_app_for_commit(fake_commit_12) is None + mock_redis.get.assert_called_with("app_to_use_for_commit_12") + + @pytest.mark.integration + def test_get_and_set_app_for_commit(self, dbsession): + commit = self._get_commit(dbsession) + # String + set_github_app_for_commit("12", commit) + assert get_github_app_for_commit(commit) == "12" + # Int + set_github_app_for_commit(24, commit) + assert get_github_app_for_commit(commit) == "24" diff --git a/sample_app/tests/test_license.py b/sample_app/tests/test_license.py new file mode 100644 index 0000000..b812743 --- /dev/null +++ b/sample_app/tests/test_license.py @@ -0,0 +1,163 @@ +from datetime import datetime + +from database.tests.factories import OwnerFactory, RepositoryFactory +from services.license import ( + InvalidLicenseReason, + calculate_reason_for_not_being_valid, + has_valid_license, + is_properly_licensed, + requires_license, +) + + +class TestLicenseService: + def test_is_properly_licensed_doesnt_require_license(self, dbsession, mocker): + mocker.patch("services.license.requires_license", return_value=False) + mocker.patch("services.license.has_valid_license", return_value=False) + assert is_properly_licensed(dbsession) + + def test_is_properly_licensed_requires_license_doesnt_have_it( + self, dbsession, mocker + ): + mocker.patch("services.license.requires_license", return_value=True) + mocker.patch("services.license.has_valid_license", return_value=False) + assert not is_properly_licensed(dbsession) + + def test_is_properly_licensed_requires_license_has_it(self, dbsession, mocker): + mocker.patch("services.license.requires_license", return_value=True) + mocker.patch("services.license.has_valid_license", return_value=True) + assert is_properly_licensed(dbsession) + + def test_requires_license(self, mocker): + mocker.patch("services.license.is_enterprise", return_value=True) + assert requires_license() + mocker.patch("services.license.is_enterprise", return_value=False) + assert not requires_license() + + def test_has_valid_license(self, dbsession, mocker): + mocked_reason = mocker.patch( + "services.license.reason_for_not_being_valid", return_value=None + ) + assert has_valid_license(dbsession) + mocked_reason.assert_called_with(dbsession) + mocker.patch( + "services.license.reason_for_not_being_valid", return_value="something" + ) + assert not has_valid_license(dbsession) + mocked_reason.assert_called_with(dbsession) + + def test_calculate_reason_for_not_being_valid_no_license( + self, dbsession, mock_configuration + ): + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.invalid + ) + + def test_calculate_reason_for_not_being_valid_bad_url( + self, dbsession, mock_configuration + ): + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://bad.site.org" + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.url_mismatch + ) + + def test_calculate_reason_for_not_being_valid_simple_license( + self, dbsession, mock_configuration, mocker + ): + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codeov.mysite.com" + assert calculate_reason_for_not_being_valid(dbsession) is None + + def test_calculate_reason_for_not_being_valid_too_many_owners( + self, dbsession, mock_configuration + ): + for i in range(11): + owner = OwnerFactory.create( + service="github", username=f"test_calculate_reason_{i}" + ) + dbsession.add(owner) + dbsession.flush() + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codeov.mysite.com" + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.users_exceeded + ) + + def test_calculate_reason_for_not_being_valid_too_many_plan_activated_users( + self, dbsession, mock_configuration + ): + org_owner = OwnerFactory.create( + service="github", oauth_token=None, plan_activated_users=list(range(1, 12)) + ) + dbsession.add(org_owner) + dbsession.flush() + encrypted_license = "wxWEJyYgIcFpi6nBSyKQZQeaQ9Eqpo3SXyUomAqQOzOFjdYB3A8fFM1rm+kOt2ehy9w95AzrQqrqfxi9HJIb2zLOMOB9tSy52OykVCzFtKPBNsXU/y5pQKOfV7iI3w9CHFh3tDwSwgjg8UsMXwQPOhrpvl2GdHpwEhFdaM2O3vY7iElFgZfk5D9E7qEnp+WysQwHKxDeKLI7jWCnBCBJLDjBJRSz0H7AfU55RQDqtTrnR+rsLDHOzJ80/VxwVYhb" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codecov.mysite.com" + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.users_exceeded + ) + + def test_calculate_reason_for_not_being_valid_repos_exceeded( + self, dbsession, mock_configuration + ): + # number of max repos is 20 + owner = OwnerFactory.create(service="github") + dbsession.add(owner) + dbsession.flush() + for i in range(21): + repo = RepositoryFactory.create(updatestamp=datetime.now(), owner=owner) + dbsession.add(repo) + dbsession.flush() + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codeov.mysite.com" + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.repos_exceeded + ) + + def test_calculate_reason_for_not_being_valid_repos_warning( + self, dbsession, mock_configuration, mocker + ): + mocker.patch("services.license._get_now", return_value=datetime(2020, 4, 2)) + # number of max repos is 20 + owner = OwnerFactory.create(service="github") + dbsession.add(owner) + dbsession.flush() + for i in range(18): + repo = RepositoryFactory.create(updatestamp=datetime.now(), owner=owner) + dbsession.add(repo) + dbsession.flush() + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codeov.mysite.com" + assert calculate_reason_for_not_being_valid(dbsession) is None + + def test_calculate_reason_for_not_being_valid_expired( + self, dbsession, mock_configuration, mocker + ): + mocker.patch("services.license._get_now", return_value=datetime(2021, 10, 11)) + owner = OwnerFactory.create(service="github") + dbsession.add(owner) + dbsession.flush() + for i in range(18): + repo = RepositoryFactory.create(updatestamp=datetime.now(), owner=owner) + dbsession.add(repo) + dbsession.flush() + encrypted_license = "0dRbhbzp8TVFQp7P4e2ES9lSfyQlTo8J7LQ/N51yeAE/KcRBCnU+QsVvVMDuLL4xNGXGGk9p4ZTmIl0II3cMr0tIoPHe9Re2UjommalyFYuP8JjjnNR/Ql2DnjOzEnTzsE2Poq9xlNHcIU4F9gC2WOYPnazR6U+t4CelcvIAbEpbOMOiw34nVyd3OEmWusquMNrwkNkk/lwjwCJmj6bTXQ==" + mock_configuration.params["setup"]["enterprise_license"] = encrypted_license + mock_configuration.params["setup"]["codecov_url"] = "https://codeov.mysite.com" + assert ( + calculate_reason_for_not_being_valid(dbsession) + == InvalidLicenseReason.expired + ) diff --git a/sample_app/tests/test_owner_service.py b/sample_app/tests/test_owner_service.py new file mode 100644 index 0000000..8c5f184 --- /dev/null +++ b/sample_app/tests/test_owner_service.py @@ -0,0 +1,160 @@ +from database.tests.factories import GithubAppInstallationFactory, OwnerFactory +from services.owner import get_owner_provider_service +from shared.rate_limits import gh_app_key_name, owner_key_name +from shared.reports.types import UploadType +from shared.typings.torngit import AdditionalData + + +class TestOwnerServiceTestCase: + def test_get_owner_provider_service(self, dbsession): + owner = OwnerFactory.create( + service="github", + unencrypted_oauth_token="bcaa0dc0c66b4a8c8c65ac919a1a91aa", + bot=None, + ) + dbsession.add(owner) + dbsession.flush() + res = get_owner_provider_service(owner) + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": {}, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.service == "github" + assert res.data == expected_data + assert res.token == { + "key": "bcaa0dc0c66b4a8c8c65ac919a1a91aa", + "secret": None, + "entity_name": owner_key_name(owner.ownerid), + } + + def test_get_owner_provider_service_with_installation(self, dbsession, mocker): + mocker.patch( + "shared.bots.github_apps.get_github_integration_token", + return_value="integration_token", + ) + owner = OwnerFactory.create( + service="github", + unencrypted_oauth_token="bcaa0dc0c66b4a8c8c65ac919a1a91aa", + bot=None, + ) + dbsession.add(owner) + installation = GithubAppInstallationFactory( + installation_id=1500, + owner=owner, + ) + dbsession.add(installation) + dbsession.flush() + res = get_owner_provider_service(owner) + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": {}, + "installation": { + "id": installation.id, + "installation_id": 1500, + "pem_path": None, + "app_id": installation.app_id, + }, + "fallback_installations": [], + "additional_data": {}, + } + assert res.service == "github" + assert res.data == expected_data + assert res.token == { + "key": "integration_token", + "username": "installation_1500", + "entity_name": gh_app_key_name( + installation_id=installation.installation_id, app_id=installation.app_id + ), + } + + def test_get_owner_provider_service_other_service(self, dbsession): + owner = OwnerFactory.create( + service="gitlab", unencrypted_oauth_token="testenll80qbqhofao65", bot=None + ) + dbsession.add(owner) + dbsession.flush() + res = get_owner_provider_service(owner) + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": {}, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.service == "gitlab" + assert res.data == expected_data + assert res.token == { + "key": "testenll80qbqhofao65", + "secret": None, + "entity_name": owner_key_name(owner.ownerid), + } + + def test_get_owner_provider_service_different_bot(self, dbsession): + bot_token = "bcaa0dc0c66b4a8c8c65ac919a1a91aa" + owner = OwnerFactory.create( + unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + bot=OwnerFactory.create(unencrypted_oauth_token=bot_token), + ) + dbsession.add(owner) + dbsession.flush() + res = get_owner_provider_service(owner, ignore_installation=True) + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": {}, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data["repo"] == expected_data["repo"] + assert res.data == expected_data + assert res.token == { + "key": bot_token, + "secret": None, + "entity_name": owner_key_name(owner.bot.ownerid), + } + + def test_get_owner_provider_service_additional_data(self, dbsession): + owner = OwnerFactory.create( + service="gitlab", unencrypted_oauth_token="testenll80qbqhofao65", bot=None + ) + dbsession.add(owner) + dbsession.flush() + additional_data: AdditionalData = {"upload_type": UploadType.BUNDLE_ANALYSIS} + res = get_owner_provider_service(owner, additional_data=additional_data) + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": {}, + "installation": None, + "fallback_installations": None, + "additional_data": {"upload_type": UploadType.BUNDLE_ANALYSIS}, + } + assert res.service == "gitlab" + assert res.data == expected_data + assert res.token == { + "key": "testenll80qbqhofao65", + "secret": None, + "entity_name": owner_key_name(owner.ownerid), + } diff --git a/sample_app/tests/test_processing_state.py b/sample_app/tests/test_processing_state.py new file mode 100644 index 0000000..69abe5b --- /dev/null +++ b/sample_app/tests/test_processing_state.py @@ -0,0 +1,71 @@ +from uuid import uuid4 + +from services.processing.state import ( + ProcessingState, + should_perform_merge, + should_trigger_postprocessing, +) + + +def test_single_upload(): + state = ProcessingState(1234, uuid4().hex) + state.mark_uploads_as_processing([1]) + + state.mark_upload_as_processed(1) + + # this is the only in-progress upload, nothing more to expect + assert should_perform_merge(state.get_upload_numbers()) + + assert state.get_uploads_for_merging() == {1} + state.mark_uploads_as_merged([1]) + + assert should_trigger_postprocessing(state.get_upload_numbers()) + + +def test_concurrent_uploads(): + state = ProcessingState(1234, uuid4().hex) + state.mark_uploads_as_processing([1]) + + state.mark_upload_as_processed(1) + # meanwhile, another upload comes in: + state.mark_uploads_as_processing([2]) + + # not merging/postprocessing yet, as that will be debounced with the second upload + assert not should_perform_merge(state.get_upload_numbers()) + + state.mark_upload_as_processed(2) + + assert should_perform_merge(state.get_upload_numbers()) + + assert state.get_uploads_for_merging() == {1, 2} + state.mark_uploads_as_merged([1, 2]) + + assert should_trigger_postprocessing(state.get_upload_numbers()) + + +def test_batch_merging_many_uploads(): + state = ProcessingState(1234, uuid4().hex) + + state.mark_uploads_as_processing([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) + + for id in range(1, 12): + state.mark_upload_as_processed(id) + + # we have only processed 8 out of 9. we want to do a batched merge + assert should_perform_merge(state.get_upload_numbers()) + merging = state.get_uploads_for_merging() + assert len(merging) == 10 # = MERGE_BATCH_SIZE + state.mark_uploads_as_merged(merging) + + # but no notifications yet + assert not should_trigger_postprocessing(state.get_upload_numbers()) + + state.mark_upload_as_processed(12) + + # with the last upload being processed, we do another merge, and then trigger notifications + assert should_perform_merge(state.get_upload_numbers()) + merging = state.get_uploads_for_merging() + assert len(merging) == 2 + state.mark_uploads_as_merged(merging) + + assert should_trigger_postprocessing(state.get_upload_numbers()) diff --git a/sample_app/tests/test_redis.py b/sample_app/tests/test_redis.py new file mode 100644 index 0000000..94f4206 --- /dev/null +++ b/sample_app/tests/test_redis.py @@ -0,0 +1,8 @@ +from shared.helpers.redis import get_redis_connection + + +def test_get_redis_connection(mocker): + mocked = mocker.patch("shared.helpers.redis.Redis.from_url") + res = get_redis_connection() + assert res is not None + mocked.assert_called_with("redis://redis:6379") diff --git a/sample_app/tests/test_report.py b/sample_app/tests/test_report.py new file mode 100644 index 0000000..9744047 --- /dev/null +++ b/sample_app/tests/test_report.py @@ -0,0 +1,2625 @@ +from decimal import Decimal +from unittest import mock + +import pytest +from celery.exceptions import SoftTimeLimitExceeded + +from database.models import CommitReport, RepositoryFlag, Upload +from database.tests.factories import CommitFactory +from helpers.exceptions import RepositoryWithoutValidBotError +from services.processing.merging import clear_carryforward_sessions +from services.report import NotReadyToBuildReportYetError, ReportService +from services.report import log as report_log +from shared.api_archive.archive import ArchiveService +from shared.reports.resources import Report, ReportFile, Session, SessionType +from shared.reports.test_utils import convert_report_to_better_readable +from shared.reports.types import ReportLine, ReportTotals +from shared.torngit.exceptions import TorngitRateLimitError +from shared.yaml import UserYaml + + +@pytest.fixture +def sample_report(): + report = Report() + first_file = ReportFile("file_1.go") + first_file.append(1, ReportLine.create(1, sessions=[[0, 1]], complexity=(10, 2))) + first_file.append(2, ReportLine.create(0, sessions=[[0, 1]])) + first_file.append(3, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(5, ReportLine.create(1, sessions=[[0, 1], [1, 1]])) + first_file.append(6, ReportLine.create(0, sessions=[[0, 1]])) + first_file.append(8, ReportLine.create(1, sessions=[[0, 1], [1, 0]])) + first_file.append(9, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(10, ReportLine.create(0, sessions=[[0, 1]])) + second_file = ReportFile("file_2.py") + second_file.append(12, ReportLine.create(1, sessions=[[0, 1]])) + second_file.append(51, ReportLine.create("1/2", type="b", sessions=[[0, 1]])) + report.append(first_file) + report.append(second_file) + report.add_session( + Session( + flags=["unit"], + provider="circleci", + session_type=SessionType.uploaded, + build="aycaramba", + totals=ReportTotals(2, 10), + ) + ) + report.add_session( + Session( + flags=["integration"], + provider="travis", + session_type=SessionType.carriedforward, + build="poli", + ) + ) + return report + + +@pytest.fixture +def sample_commit_with_report_big(dbsession, mock_storage): + sessions_dict = { + "0": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": [], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + "1": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + "2": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + "3": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + } + file_headers = { + "file_00.py": [ + 0, + [0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_01.py": [ + 1, + [0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_02.py": [ + 2, + [0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_03.py": [ + 3, + [0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_04.py": [ + 4, + [0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_05.py": [ + 5, + [0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_06.py": [ + 6, + [0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_07.py": [ + 7, + [0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_08.py": [ + 8, + [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_09.py": [ + 9, + [0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_10.py": [ + 10, + [0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_11.py": [ + 11, + [0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_12.py": [ + 12, + [0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_13.py": [ + 13, + [0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_14.py": [ + 14, + [0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0], + None, + ], + } + commit = CommitFactory.create( + _report_json={"sessions": sessions_dict, "files": file_headers} + ) + dbsession.add(commit) + dbsession.flush() + with open("tasks/tests/samples/sample_chunks_4_sessions.txt", "rb") as f: + archive_service = ArchiveService(commit.repository) + archive_service.write_chunks(commit.commitid, f.read()) + return commit + + +@pytest.fixture +def sample_commit_with_report_big_with_labels(dbsession, mock_storage): + sessions_dict = { + "0": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + } + file_headers = { + "file_00.py": [ + 0, + [0, 4, 0, 4, 0, "0", 0, 0, 0, 0, 0, 0, 0], + [[0, 4, 0, 4, 0, "0", 0, 0, 0, 0, 0, 0, 0]], + None, + ], + "file_01.py": [ + 1, + [0, 32, 32, 0, 0, "100", 0, 0, 0, 0, 0, 0, 0], + [[0, 32, 32, 0, 0, "100", 0, 0, 0, 0, 0, 0, 0]], + None, + ], + } + commit = CommitFactory.create( + _report_json={"sessions": sessions_dict, "files": file_headers} + ) + dbsession.add(commit) + dbsession.flush() + with open("tasks/tests/samples/sample_chunks_with_header.txt", "rb") as f: + archive_service = ArchiveService(commit.repository) + archive_service.write_chunks(commit.commitid, f.read()) + return commit + + +@pytest.fixture +def sample_commit_with_report_big_already_carriedforward(dbsession, mock_storage): + sessions_dict = { + "0": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": [], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + "1": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "t": None, + "u": None, + }, + "2": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "carriedforward", + "t": None, + "u": None, + }, + "3": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "n": None, + "p": None, + "st": "carriedforward", + "t": None, + "u": None, + }, + } + file_headers = { + "file_00.py": [ + 0, + [0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_01.py": [ + 1, + [0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_10.py": [ + 10, + [0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_11.py": [ + 11, + [0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_12.py": [ + 12, + [0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_13.py": [ + 13, + [0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_14.py": [ + 14, + [0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_02.py": [ + 2, + [0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_03.py": [ + 3, + [0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_04.py": [ + 4, + [0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_05.py": [ + 5, + [0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_06.py": [ + 6, + [0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_07.py": [ + 7, + [0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_08.py": [ + 8, + [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], + None, + ], + "file_09.py": [ + 9, + [0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + ], + } + commit = CommitFactory.create( + _report_json={"sessions": sessions_dict, "files": file_headers} + ) + dbsession.add(commit) + dbsession.flush() + with open("tasks/tests/samples/sample_chunks_4_sessions.txt", "rb") as f: + archive_service = ArchiveService(commit.repository) + archive_service.write_chunks(commit.commitid, f.read()) + return commit + + +class TestReportService: + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit( + self, + dbsession, + sample_commit_with_report_big, + mock_storage, + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + assert sorted(report.files) == sorted( + [ + "file_00.py", + "file_01.py", + "file_02.py", + "file_03.py", + "file_04.py", + "file_05.py", + "file_06.py", + "file_07.py", + "file_08.py", + "file_09.py", + "file_10.py", + "file_11.py", + "file_12.py", + "file_13.py", + "file_14.py", + ] + ) + assert report.totals == ReportTotals( + files=15, + lines=188, + hits=68, + misses=26, + partials=94, + coverage="36.17021", + branches=0, + methods=0, + messages=0, + sessions=2, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + + assert readable_report == { + "archive": { + "file_00.py": [ + (1, 1, None, [[2, 1]], None, None), + (2, 1, None, [[2, 1]], None, None), + (3, "1/3", None, [[2, "1/3"]], None, None), + (4, "1/2", None, [[3, "1/2"]], None, None), + (5, 0, None, [[3, 0]], None, None), + (6, 0, None, [[2, 0]], None, None), + (7, 0, None, [[3, 0]], None, None), + (8, 0, None, [[3, 0]], None, None), + (9, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (10, 0, None, [[2, 0]], None, None), + (11, "1/2", None, [[2, "1/2"]], None, None), + (12, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (13, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (14, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + ], + "file_01.py": [ + (2, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (3, "1/2", None, [[3, "1/2"]], None, None), + (4, "1/2", None, [[3, "1/2"]], None, None), + (5, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (6, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (7, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (8, 1, None, [[2, 1]], None, None), + (9, 1, None, [[2, 1]], None, None), + (10, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (11, 1, None, [[3, 0], [2, 1]], None, None), + ], + "file_02.py": [ + (1, 1, None, [[2, 1]], None, None), + (2, "1/3", None, [[3, "1/3"]], None, None), + (4, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (5, 1, None, [[3, 1]], None, None), + (6, "1/3", None, [[2, "1/3"]], None, None), + (8, 1, None, [[2, 1]], None, None), + (9, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (10, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (11, "1/2", None, [[2, "1/2"]], None, None), + (12, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (13, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + ], + "file_03.py": [ + (2, 1, None, [[3, 0], [2, 1]], None, None), + (3, "1/2", None, [[3, "1/2"]], None, None), + (4, 0, None, [[3, 0]], None, None), + (5, "1/3", None, [[2, "1/3"]], None, None), + (6, "1/3", None, [[3, "1/3"]], None, None), + (7, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (8, 0, None, [[3, 0]], None, None), + (9, "1/3", None, [[3, "1/3"]], None, None), + (10, "1/3", None, [[2, "1/3"]], None, None), + (11, "1/2", None, [[2, "1/2"]], None, None), + (12, "1/2", None, [[3, "1/2"]], None, None), + (13, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (14, "1/2", None, [[3, "1/2"]], None, None), + (15, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (16, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + ], + "file_04.py": [ + (1, "1/3", None, [[2, "1/3"]], None, None), + (2, 0, None, [[3, 0]], None, None), + (3, "1/2", None, [[2, "1/2"]], None, None), + (4, "1/2", None, [[2, "1/2"]], None, None), + (5, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (6, "1/2", None, [[3, "1/2"]], None, None), + (7, 1, None, [[3, 0], [2, 1]], None, None), + (8, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (9, "1/3", None, [[2, "1/3"]], None, None), + (10, "1/2", None, [[2, "1/2"]], None, None), + ], + "file_05.py": [ + (2, 0, None, [[2, 0]], None, None), + (3, "1/2", None, [[2, "1/2"]], None, None), + (4, 0, None, [[3, 0]], None, None), + (5, "1/3", None, [[3, "1/3"]], None, None), + (6, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (7, "1/3", None, [[3, "1/3"]], None, None), + (8, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (9, "1/3", None, [[2, "1/3"]], None, None), + (10, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (11, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (12, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (13, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (14, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + ], + "file_06.py": [ + (3, "1/2", None, [[3, "1/2"]], None, None), + (4, 1, None, [[3, 1]], None, None), + (5, 1, None, [[3, 1]], None, None), + (6, 1, None, [[2, 1]], None, None), + (7, 1, None, [[3, 1]], None, None), + (8, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (9, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + ], + "file_07.py": [ + (1, 1, None, [[3, 1]], None, None), + (2, 1, None, [[2, 0], [3, 1]], None, None), + (3, 1, None, [[2, 1]], None, None), + (4, "1/2", None, [[2, "1/2"], [3, "1/3"]], None, None), + (5, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (6, 0, None, [[2, 0]], None, None), + (7, "1/3", None, [[3, "1/3"]], None, None), + (8, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (9, "1/3", None, [[2, "1/3"]], None, None), + (10, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (11, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + ], + "file_08.py": [ + (1, 0, None, [[3, 0]], None, None), + (2, 0, None, [[2, 0]], None, None), + (3, 0, None, [[2, 0]], None, None), + (4, "1/3", None, [[2, "1/3"]], None, None), + (5, "1/2", None, [[3, "1/2"]], None, None), + (6, 0, None, [[2, 0]], None, None), + (7, 1, None, [[2, 0], [3, 1]], None, None), + (8, 1, None, [[3, 0], [2, 1]], None, None), + (9, "1/2", None, [[3, "1/2"]], None, None), + (10, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (11, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + ], + "file_09.py": [ + (1, 0, None, [[2, 0]], None, None), + (3, "1/3", None, [[3, "1/3"]], None, None), + (6, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (7, "1/2", None, [[2, "1/2"]], None, None), + (8, "1/2", None, [[2, "1/2"]], None, None), + (9, 1, None, [[2, 1]], None, None), + (10, 1, None, [[2, 0], [3, 1]], None, None), + (11, "1/3", None, [[2, "1/3"]], None, None), + (12, "1/3", None, [[3, "1/3"]], None, None), + (13, 1, None, [[2, 0], [3, 1]], None, None), + (14, 1, None, [[3, 0], [2, 1]], None, None), + ], + "file_10.py": [ + (2, 1, None, [[3, 1]], None, None), + (3, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (4, "1/2", None, [[2, "1/2"]], None, None), + (6, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (7, 1, None, [[3, 1]], None, None), + (8, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (9, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (10, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + ], + "file_11.py": [ + (1, 0, None, [[3, 0]], None, None), + (3, "1/2", None, [[2, "1/2"]], None, None), + (4, "1/2", None, [[3, "1/2"]], None, None), + (5, 0, None, [[2, 0]], None, None), + (6, 0, None, [[3, 0]], None, None), + (7, "1/3", None, [[2, "1/3"]], None, None), + (8, 1, None, [[2, 1]], None, None), + (9, "1/2", None, [[2, "1/2"]], None, None), + (10, 1, None, [[3, 1]], None, None), + (11, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (12, 1, None, [[3, 1]], None, None), + (13, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (14, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (15, 0, None, [[2, 0]], None, None), + (16, 1, None, [[2, 0], [3, 1]], None, None), + (17, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (18, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (19, 0, None, [[3, 0]], None, None), + (20, 1, None, [[3, 1]], None, None), + (21, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (22, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (23, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + ], + "file_12.py": [ + (2, "1/2", None, [[3, "1/2"]], None, None), + (3, "1/3", None, [[3, "1/3"]], None, None), + (4, 0, None, [[2, 0]], None, None), + (5, 0, None, [[3, 0]], None, None), + (7, 1, None, [[3, 1]], None, None), + (8, "1/2", None, [[3, "1/2"], [2, "1/3"]], None, None), + (9, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (10, 0, None, [[3, 0]], None, None), + (11, "1/3", None, [[3, "1/3"]], None, None), + (12, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (13, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (14, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + ], + "file_13.py": [ + (2, 1, None, [[3, 1]], None, None), + (6, 1, None, [[3, 0], [2, 1]], None, None), + (7, "1/3", None, [[2, "1/3"]], None, None), + (8, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (9, 1, None, [[3, 0], [2, 1]], None, None), + (10, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (11, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (12, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (13, "1/2", None, [[3, "1/2"]], None, None), + (14, 1, None, [[3, 1]], None, None), + (15, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + ], + "file_14.py": [ + (1, 1, None, [[2, 1]], None, None), + (2, 0, None, [[2, 0]], None, None), + (3, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (5, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (6, "1/3", None, [[3, "1/3"]], None, None), + (7, 1, None, [[2, 1]], None, None), + (8, "1/3", None, [[2, "1/3"]], None, None), + (9, "1/2", None, [[2, "1/2"]], None, None), + (10, 1, None, [[2, 1]], None, None), + (11, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (12, 1, None, [[2, 0], [3, 1]], None, None), + (13, "1/3", None, [[3, "1/3"]], None, None), + (14, "1/3", None, [[3, "1/3"]], None, None), + (15, 0, None, [[2, 0]], None, None), + (16, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (17, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (18, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (19, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (20, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (21, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (22, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (23, 1, None, [[2, 0], [3, 1]], None, None), + ], + }, + "report": { + "files": { + "file_00.py": [ + 0, + [0, 14, 4, 5, 5, "28.57143", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_01.py": [ + 1, + [0, 10, 3, 0, 7, "30.00000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_02.py": [ + 2, + [0, 11, 5, 0, 6, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_03.py": [ + 3, + [0, 15, 4, 2, 9, "26.66667", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_04.py": [ + 4, + [0, 10, 3, 1, 6, "30.00000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_05.py": [ + 5, + [0, 13, 3, 2, 8, "23.07692", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_06.py": [ + 6, + [0, 7, 5, 0, 2, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_07.py": [ + 7, + [0, 11, 5, 1, 5, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_08.py": [ + 8, + [0, 11, 2, 4, 5, "18.18182", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_09.py": [ + 9, + [0, 11, 5, 1, 5, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_10.py": [ + 10, + [0, 8, 3, 0, 5, "37.50000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_11.py": [ + 11, + [0, 22, 8, 5, 9, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_12.py": [ + 12, + [0, 12, 4, 3, 5, "33.33333", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_13.py": [ + 13, + [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_14.py": [ + 14, + [0, 22, 8, 2, 12, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + }, + "sessions": { + "2": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": parent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + "3": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": parent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + }, + }, + "totals": { + "b": 0, + "c": "36.17021", + "C": 0, + "d": 0, + "diff": None, + "f": 15, + "h": 68, + "M": 0, + "m": 26, + "N": 0, + "n": 188, + "p": 94, + "s": 2, + }, + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_with_labels( + self, dbsession, sample_commit_with_report_big_with_labels + ): + parent_commit = sample_commit_with_report_big_with_labels + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + assert sorted(report.files) == ["file_00.py", "file_01.py"] + + assert report.totals == ReportTotals( + files=2, + lines=36, + hits=32, + misses=4, + partials=0, + coverage="88.88889", + branches=0, + methods=0, + messages=0, + sessions=1, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + assert readable_report == { + "archive": { + "file_00.py": [ + (1, 0, None, [[0, 0]], None, None), + (3, 0, None, [[0, 0]], None, None), + (4, 0, None, [[0, 0]], None, None), + (5, 0, None, [[0, 0]], None, None), + ], + "file_01.py": [ + (1, 1, None, [[0, 1]], None, None), + (2, 1, None, [[0, 1]], None, None), + (5, 1, None, [[0, 1]], None, None), + (6, 1, None, [[0, 1]], None, None), + (7, 1, None, [[0, 1]], None, None), + (8, 1, None, [[0, 1]], None, None), + (9, 1, None, [[0, 1]], None, None), + (12, 1, None, [[0, 1]], None, None), + (13, 1, None, [[0, 1]], None, None), + (14, 1, None, [[0, 1]], None, None), + (16, 1, None, [[0, 1]], None, None), + (17, 1, None, [[0, 1]], None, None), + (18, 1, None, [[0, 1]], None, None), + (19, 1, None, [[0, 1]], None, None), + (21, 1, None, [[0, 1]], None, None), + (22, 1, None, [[0, 1]], None, None), + (23, 1, None, [[0, 1]], None, None), + (25, 1, None, [[0, 1]], None, None), + (26, 1, None, [[0, 1]], None, None), + (27, 1, None, [[0, 1]], None, None), + (29, 1, None, [[0, 1]], None, None), + (30, 1, None, [[0, 1]], None, None), + (31, 1, None, [[0, 1]], None, None), + (33, 1, None, [[0, 1]], None, None), + (34, 1, None, [[0, 1]], None, None), + (36, 1, None, [[0, 1]], None, None), + (37, 1, None, [[0, 1]], None, None), + (38, 1, None, [[0, 1]], None, None), + (39, 1, None, [[0, 1]], None, None), + (41, 1, None, [[0, 1]], None, None), + (43, 1, None, [[0, 1]], None, None), + (44, 0, None, [[0, 0]], None, None), + ], + }, + "report": { + "files": { + "file_00.py": [ + 0, + [0, 4, 0, 4, 0, "0", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_01.py": [ + 1, + [0, 32, 32, 0, 0, "100", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + }, + "sessions": { + "0": { + "N": "Carriedforward", + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "se": {"carriedforward_from": parent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + } + }, + }, + "totals": { + "C": 0, + "M": 0, + "N": 0, + "b": 0, + "c": "88.88889", + "d": 0, + "diff": None, + "f": 2, + "h": 32, + "m": 4, + "n": 36, + "p": 0, + "s": 1, + }, + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_build_report_from_commit_carriedforward_add_sessions( + self, dbsession, sample_commit_with_report_big, mocker + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml = UserYaml({"flags": {"enterprise": {"carryforward": True}}}) + + def fake_possibly_shift(report, base, head): + return report + + mock_possibly_shift = mocker.patch.object( + ReportService, + "_possibly_shift_carryforward_report", + side_effect=fake_possibly_shift, + ) + report = ReportService(yaml).create_new_report_for_commit(commit) + assert report is not None + assert len(report.files) == 15 + mock_possibly_shift.assert_called() + to_merge_session = Session(flags=["enterprise"]) + report.add_session(to_merge_session) + assert sorted(report.sessions.keys()) == [2, 3, 4] + assert clear_carryforward_sessions(report, ["enterprise"], yaml) == {2, 3} + assert sorted(report.sessions.keys()) == [4] + readable_report = convert_report_to_better_readable(report) + assert readable_report == { + "archive": {}, + "report": { + "files": {}, + "sessions": { + "4": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "se": {}, + "t": None, + "u": None, + } + }, + }, + "totals": { + "C": 0, + "M": 0, + "N": 0, + "b": 0, + "c": None, + "d": 0, + "diff": None, + "f": 0, + "h": 0, + "m": 0, + "n": 0, + "p": 0, + "s": 1, + }, + } + + def test_get_existing_report_for_commit_already_carriedforward_add_sessions( + self, dbsession, sample_commit_with_report_big_already_carriedforward + ): + commit = sample_commit_with_report_big_already_carriedforward + dbsession.add(commit) + dbsession.flush() + yaml = UserYaml({"flags": {"enterprise": {"carryforward": True}}}) + report = ReportService(yaml).get_existing_report_for_commit(commit) + assert report is not None + assert len(report.files) == 15 + assert sorted(report.sessions.keys()) == [0, 1, 2, 3] + first_to_merge_session = Session(flags=["enterprise"]) + report.add_session(first_to_merge_session) + assert sorted(report.sessions.keys()) == [0, 1, 2, 3, 4] + assert clear_carryforward_sessions(report, {"enterprise"}, yaml) == {2, 3} + assert sorted(report.sessions.keys()) == [0, 1, 4] + readable_report = convert_report_to_better_readable(report) + expected_sessions_dict = { + "0": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": None, + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "se": {}, + "t": None, + "u": None, + }, + "1": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "se": {}, + "t": None, + "u": None, + }, + "4": { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "se": {}, + "t": None, + "u": None, + }, + } + assert readable_report["report"]["sessions"] == expected_sessions_dict + + newly_added_session = { + "N": None, + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit"], + "j": None, + "n": None, + "p": None, + "st": "uploaded", + "se": {}, + "t": None, + "u": None, + } + second_to_merge_session = Session(flags=["unit"]) + report.add_session(second_to_merge_session) + assert sorted(report.sessions.keys()) == [0, 1, 3, 4] + assert clear_carryforward_sessions(report, {"unit"}, yaml) == set() + assert sorted(report.sessions.keys()) == [0, 1, 3, 4] + new_readable_report = convert_report_to_better_readable(report) + assert len(new_readable_report["report"]["sessions"]) == 4 + assert ( + new_readable_report["report"]["sessions"]["0"] + == expected_sessions_dict["0"] + ) + assert ( + new_readable_report["report"]["sessions"]["1"] + == expected_sessions_dict["1"] + ) + assert ( + new_readable_report["report"]["sessions"]["4"] + == expected_sessions_dict["4"] + ) + assert new_readable_report["report"]["sessions"]["3"] == newly_added_session + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_with_path_filters( + self, dbsession, sample_commit_with_report_big, mocker + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = { + "flags": { + "enterprise": {"carryforward": True, "paths": ["file_1.*"]}, + "special_flag": {"paths": ["file_0.*"]}, + } + } + + def fake_possibly_shift(report, base, head): + return report + + mock_possibly_shift = mocker.patch.object( + ReportService, + "_possibly_shift_carryforward_report", + side_effect=fake_possibly_shift, + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + assert sorted(report.files) == sorted( + ["file_10.py", "file_11.py", "file_12.py", "file_13.py", "file_14.py"] + ) + mock_possibly_shift.assert_called() + assert report.totals == ReportTotals( + files=5, + lines=75, + hits=29, + misses=10, + partials=36, + coverage="38.66667", + branches=0, + methods=0, + messages=0, + sessions=2, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + + assert readable_report == { + "archive": { + "file_10.py": [ + (2, 1, None, [[3, 1]], None, None), + (3, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (4, "1/2", None, [[2, "1/2"]], None, None), + (6, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (7, 1, None, [[3, 1]], None, None), + (8, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (9, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (10, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + ], + "file_11.py": [ + (1, 0, None, [[3, 0]], None, None), + (3, "1/2", None, [[2, "1/2"]], None, None), + (4, "1/2", None, [[3, "1/2"]], None, None), + (5, 0, None, [[2, 0]], None, None), + (6, 0, None, [[3, 0]], None, None), + (7, "1/3", None, [[2, "1/3"]], None, None), + (8, 1, None, [[2, 1]], None, None), + (9, "1/2", None, [[2, "1/2"]], None, None), + (10, 1, None, [[3, 1]], None, None), + (11, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (12, 1, None, [[3, 1]], None, None), + (13, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (14, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (15, 0, None, [[2, 0]], None, None), + (16, 1, None, [[2, 0], [3, 1]], None, None), + (17, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (18, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (19, 0, None, [[3, 0]], None, None), + (20, 1, None, [[3, 1]], None, None), + (21, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + (22, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (23, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + ], + "file_12.py": [ + (2, "1/2", None, [[3, "1/2"]], None, None), + (3, "1/3", None, [[3, "1/3"]], None, None), + (4, 0, None, [[2, 0]], None, None), + (5, 0, None, [[3, 0]], None, None), + (7, 1, None, [[3, 1]], None, None), + (8, "1/2", None, [[3, "1/2"], [2, "1/3"]], None, None), + (9, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (10, 0, None, [[3, 0]], None, None), + (11, "1/3", None, [[3, "1/3"]], None, None), + (12, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (13, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (14, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + ], + "file_13.py": [ + (2, 1, None, [[3, 1]], None, None), + (6, 1, None, [[3, 0], [2, 1]], None, None), + (7, "1/3", None, [[2, "1/3"]], None, None), + (8, "3/3", None, [[2, 1], [3, "1/3"]], None, None), + (9, 1, None, [[3, 0], [2, 1]], None, None), + (10, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (11, "1/3", None, [[2, 0], [3, "1/3"]], None, None), + (12, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (13, "1/2", None, [[3, "1/2"]], None, None), + (14, 1, None, [[3, 1]], None, None), + (15, "2/2", None, [[3, 1], [2, "1/2"]], None, None), + ], + "file_14.py": [ + (1, 1, None, [[2, 1]], None, None), + (2, 0, None, [[2, 0]], None, None), + (3, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (5, "2/2", None, [[2, 1], [3, "1/2"]], None, None), + (6, "1/3", None, [[3, "1/3"]], None, None), + (7, 1, None, [[2, 1]], None, None), + (8, "1/3", None, [[2, "1/3"]], None, None), + (9, "1/2", None, [[2, "1/2"]], None, None), + (10, 1, None, [[2, 1]], None, None), + (11, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (12, 1, None, [[2, 0], [3, 1]], None, None), + (13, "1/3", None, [[3, "1/3"]], None, None), + (14, "1/3", None, [[3, "1/3"]], None, None), + (15, 0, None, [[2, 0]], None, None), + (16, "1/2", None, [[2, 0], [3, "1/2"]], None, None), + (17, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (18, "1/3", None, [[3, 0], [2, "1/3"]], None, None), + (19, "1/2", None, [[3, 0], [2, "1/2"]], None, None), + (20, "3/3", None, [[3, 1], [2, "1/3"]], None, None), + (21, "1/3", None, [[2, "1/2"], [3, "1/3"]], None, None), + (22, "1/3", None, [[3, "1/2"], [2, "1/3"]], None, None), + (23, 1, None, [[2, 0], [3, 1]], None, None), + ], + }, + "report": { + "files": { + "file_10.py": [ + 0, + [0, 8, 3, 0, 5, "37.50000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_11.py": [ + 1, + [0, 22, 8, 5, 9, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_12.py": [ + 2, + [0, 12, 4, 3, 5, "33.33333", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_13.py": [ + 3, + [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_14.py": [ + 4, + [0, 22, 8, 2, 12, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + }, + "sessions": { + "2": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": parent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + "3": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": parent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + }, + }, + "totals": { + "b": 0, + "c": "38.66667", + "C": 0, + "d": 0, + "diff": None, + "f": 5, + "h": 29, + "M": 0, + "m": 10, + "N": 0, + "n": 75, + "p": 36, + "s": 2, + }, + } + + def test_create_new_report_for_commit_no_flags( + self, dbsession, sample_commit_with_report_big, mocker + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + yaml_dict = { + "flags": { + "enterprise": {"paths": ["file_1.*"]}, + "special_flag": {"paths": ["file_0.*"]}, + } + } + mock_possibly_shift = mocker.patch.object( + ReportService, "_possibly_shift_carryforward_report" + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + assert sorted(report.files) == [] + mock_possibly_shift.assert_not_called() + assert report.totals == ReportTotals( + files=0, + lines=0, + hits=0, + misses=0, + partials=0, + coverage=None, + branches=0, + methods=0, + messages=0, + sessions=0, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + assert readable_report == { + "archive": {}, + "report": {"files": {}, "sessions": {}}, + "totals": { + "C": 0, + "M": 0, + "N": 0, + "b": 0, + "c": None, + "d": 0, + "diff": None, + "f": 0, + "h": 0, + "m": 0, + "n": 0, + "p": 0, + "s": 0, + }, + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_no_parent( + self, dbsession, sample_commit_with_report_big, mocker + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=None, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + mock_possibly_shift = mocker.patch.object( + ReportService, "_possibly_shift_carryforward_report" + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + assert sorted(report.files) == [] + mock_possibly_shift.assert_not_called() + assert report.totals == ReportTotals( + files=0, + lines=0, + hits=0, + misses=0, + partials=0, + coverage=None, + branches=0, + methods=0, + messages=0, + sessions=0, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + assert readable_report == { + "archive": {}, + "report": {"files": {}, "sessions": {}}, + "totals": { + "C": 0, + "M": 0, + "N": 0, + "b": 0, + "c": None, + "d": 0, + "diff": None, + "f": 0, + "h": 0, + "m": 0, + "n": 0, + "p": 0, + "s": 0, + }, + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_parent_not_ready( + self, dbsession, sample_commit_with_report_big, mocker + ): + grandparent_commit = sample_commit_with_report_big + parent_commit = CommitFactory.create( + repository=grandparent_commit.repository, + parent_commit_id=grandparent_commit.commitid, + _report_json=None, + state="pending", + ) + commit = CommitFactory.create( + repository=grandparent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + mock_possibly_shift = mocker.patch.object( + ReportService, "_possibly_shift_carryforward_report" + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + mock_possibly_shift.assert_called() + assert sorted(report.files) == [ + "file_00.py", + "file_01.py", + "file_02.py", + "file_03.py", + "file_04.py", + "file_05.py", + "file_06.py", + "file_07.py", + "file_08.py", + "file_09.py", + "file_10.py", + "file_11.py", + "file_12.py", + "file_13.py", + "file_14.py", + ] + assert report.totals == ReportTotals( + files=15, + lines=188, + hits=68, + misses=26, + partials=94, + coverage="36.17021", + branches=0, + methods=0, + messages=0, + sessions=2, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + assert readable_report["report"] == { + "files": { + "file_00.py": [ + 0, + [0, 14, 4, 5, 5, "28.57143", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_01.py": [ + 1, + [0, 10, 3, 0, 7, "30.00000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_02.py": [ + 2, + [0, 11, 5, 0, 6, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_03.py": [ + 3, + [0, 15, 4, 2, 9, "26.66667", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_04.py": [ + 4, + [0, 10, 3, 1, 6, "30.00000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_05.py": [ + 5, + [0, 13, 3, 2, 8, "23.07692", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_06.py": [ + 6, + [0, 7, 5, 0, 2, "71.42857", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_07.py": [ + 7, + [0, 11, 5, 1, 5, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_08.py": [ + 8, + [0, 11, 2, 4, 5, "18.18182", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_09.py": [ + 9, + [0, 11, 5, 1, 5, "45.45455", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_10.py": [ + 10, + [0, 8, 3, 0, 5, "37.50000", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_11.py": [ + 11, + [0, 22, 8, 5, 9, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_12.py": [ + 12, + [0, 12, 4, 3, 5, "33.33333", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_13.py": [ + 13, + [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + "file_14.py": [ + 14, + [0, 22, 8, 2, 12, "36.36364", 0, 0, 0, 0, 0, 0, 0], + None, + None, + ], + }, + "sessions": { + "2": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": grandparent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + "3": { + "a": None, + "c": None, + "d": None, + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "N": "Carriedforward", + "n": None, + "p": None, + "se": {"carriedforward_from": grandparent_commit.commitid}, + "st": "carriedforward", + "t": None, + "u": None, + }, + }, + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_parent_not_ready_but_skipped( + self, dbsession, sample_commit_with_report_big, mocker + ): + parent_commit = sample_commit_with_report_big + parent_commit.state = "skipped" + dbsession.flush() + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + mock_possibly_shift = mocker.patch.object( + ReportService, "_possibly_shift_carryforward_report" + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + mock_possibly_shift.assert_called() + assert sorted(report.files) == sorted( + [ + "file_00.py", + "file_01.py", + "file_02.py", + "file_03.py", + "file_04.py", + "file_05.py", + "file_06.py", + "file_07.py", + "file_08.py", + "file_09.py", + "file_10.py", + "file_11.py", + "file_12.py", + "file_13.py", + "file_14.py", + ] + ) + assert report.totals == ReportTotals( + files=15, + lines=188, + hits=68, + misses=26, + partials=94, + coverage="36.17021", + branches=0, + methods=0, + messages=0, + sessions=2, + complexity=0, + complexity_total=0, + diff=0, + ) + readable_report = convert_report_to_better_readable(report) + expected_results_report = { + "sessions": { + "2": { + "N": "Carriedforward", + "a": None, + "c": None, + "d": readable_report["report"]["sessions"]["2"]["d"], + "e": None, + "f": ["enterprise"], + "j": None, + "n": None, + "p": None, + "st": "carriedforward", + "se": {"carriedforward_from": parent_commit.commitid}, + "t": None, + "u": None, + }, + "3": { + "N": "Carriedforward", + "a": None, + "c": None, + "d": readable_report["report"]["sessions"]["3"]["d"], + "e": None, + "f": ["unit", "enterprise"], + "j": None, + "n": None, + "p": None, + "st": "carriedforward", + "se": {"carriedforward_from": parent_commit.commitid}, + "t": None, + "u": None, + }, + } + } + assert ( + expected_results_report["sessions"]["2"] + == readable_report["report"]["sessions"]["2"] + ) + assert ( + expected_results_report["sessions"]["3"] + == readable_report["report"]["sessions"]["3"] + ) + assert ( + expected_results_report["sessions"] == readable_report["report"]["sessions"] + ) + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_too_many_ancestors_not_ready( + self, dbsession, sample_commit_with_report_big, mocker + ): + grandparent_commit = sample_commit_with_report_big + current_commit = grandparent_commit + for i in range(10): + current_commit = CommitFactory.create( + repository=grandparent_commit.repository, + parent_commit_id=current_commit.commitid, + _report_json=None, + state="pending", + ) + dbsession.add(current_commit) + commit = CommitFactory.create( + repository=grandparent_commit.repository, + parent_commit_id=current_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + mock_possibly_shift = mocker.patch.object( + ReportService, "_possibly_shift_carryforward_report" + ) + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report is not None + mock_possibly_shift.assert_not_called() + assert sorted(report.files) == [] + readable_report = convert_report_to_better_readable(report) + + assert readable_report["report"] == {"files": {}, "sessions": {}} + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_parent_had_no_parent_and_pending(self, dbsession): + current_commit = CommitFactory.create(parent_commit_id=None, state="pending") + dbsession.add(current_commit) + for i in range(5): + current_commit = CommitFactory.create( + repository=current_commit.repository, + parent_commit_id=current_commit.commitid, + _report_json=None, + state="pending", + ) + dbsession.add(current_commit) + commit = CommitFactory.create( + repository=current_commit.repository, + parent_commit_id=current_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + with pytest.raises(NotReadyToBuildReportYetError): + ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_potential_cf_but_not_real_cf( + self, dbsession, sample_commit_with_report_big + ): + parent_commit = sample_commit_with_report_big + dbsession.flush() + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = { + "flag_management": { + "default_rules": {"carryforward": False}, + "individual_flags": [{"name": "banana", "carryforward": True}], + } + } + report = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert report.is_empty() + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_parent_has_no_report( + self, mock_storage, dbsession + ): + parent = CommitFactory.create() + dbsession.add(parent) + dbsession.flush() + commit = CommitFactory.create( + parent_commit_id=parent.commitid, repository=parent.repository + ) + dbsession.add(commit) + dbsession.flush() + report_service = ReportService( + UserYaml({"flags": {"enterprise": {"carryforward": True}}}) + ) + r = report_service.create_new_report_for_commit(commit) + assert r.files == [] + + def test_save_full_report( + self, dbsession, mock_storage, sample_report, mock_configuration + ): + mock_configuration.set_params( + { + "setup": { + "save_report_data_in_storage": { + "only_codecov": False, + }, + } + } + ) + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + current_report_row = CommitReport(commit_id=commit.id_) + dbsession.add(current_report_row) + dbsession.flush() + sample_report.sessions[0].archive = "path/to/upload/location" + sample_report.sessions[ + 0 + ].name = "this name contains more than 100 chars 1111111111111111111111111111111111111111111111111111111111111this is more than 100" + report_service = ReportService({}) + res = report_service.save_full_report(commit, sample_report) + storage_hash = ArchiveService(commit.repository).storage_hash + assert res == { + "url": f"v4/repos/{storage_hash}/commits/{commit.commitid}/chunks.txt" + } + assert len(current_report_row.uploads) == 2 + first_upload = dbsession.query(Upload).filter_by( + report_id=current_report_row.id_, provider="circleci" + )[0] + second_upload = dbsession.query(Upload).filter_by( + report_id=current_report_row.id_, provider="travis" + )[0] + dbsession.refresh(second_upload) + dbsession.refresh(first_upload) + assert first_upload.build_code == "aycaramba" + assert first_upload.build_url is None + assert first_upload.env is None + assert first_upload.job_code is None + assert ( + first_upload.name + == "this name contains more than 100 chars 1111111111111111111111111111111111111111111111111111111111111" + ) + assert first_upload.provider == "circleci" + assert first_upload.report_id == current_report_row.id_ + assert first_upload.state == "complete" + assert first_upload.storage_path == "path/to/upload/location" + assert first_upload.order_number == 0 + assert len(first_upload.flags) == 1 + assert first_upload.flags[0].repository == commit.repository + assert first_upload.flags[0].flag_name == "unit" + assert first_upload.totals is not None + assert first_upload.totals.branches == 0 + assert first_upload.totals.coverage == Decimal("0.0") + assert first_upload.totals.hits == 0 + assert first_upload.totals.lines == 10 + assert first_upload.totals.methods == 0 + assert first_upload.totals.misses == 0 + assert first_upload.totals.partials == 0 + assert first_upload.totals.files == 2 + assert first_upload.upload_extras == {} + assert first_upload.upload_type == "uploaded" + assert second_upload.build_code == "poli" + assert second_upload.build_url is None + assert second_upload.env is None + assert second_upload.job_code is None + assert second_upload.name is None + assert second_upload.provider == "travis" + assert second_upload.report_id == current_report_row.id_ + assert second_upload.state == "complete" + assert second_upload.storage_path == "" + assert second_upload.order_number == 1 + assert len(second_upload.flags) == 1 + assert second_upload.flags[0].repository == commit.repository + assert second_upload.flags[0].flag_name == "integration" + assert second_upload.totals is None + assert second_upload.upload_extras == {} + assert second_upload.upload_type == "carriedforward" + + def test_save_report_empty_report(self, dbsession, mock_storage): + report = Report() + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + current_report_row = CommitReport(commit_id=commit.id_) + dbsession.add(current_report_row) + dbsession.flush() + report_service = ReportService({}) + res = report_service.save_report(commit, report) + storage_hash = ArchiveService(commit.repository).storage_hash + assert res == { + "url": f"v4/repos/{storage_hash}/commits/{commit.commitid}/chunks.txt" + } + assert commit.totals == { + "f": 0, + "n": 0, + "h": 0, + "m": 0, + "p": 0, + "c": 0, + "b": 0, + "d": 0, + "M": 0, + "s": 0, + "C": 0, + "N": 0, + "diff": None, + } + assert commit.report_json == { + "files": {}, + "sessions": {}, + "totals": [0, 0, 0, 0, 0, None, 0, 0, 0, 0, 0, 0, None], + } + assert res["url"] in mock_storage.storage["archive"] + assert mock_storage.storage["archive"][res["url"]] == b"" + + def test_save_report(self, dbsession, mock_storage, sample_report): + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + current_report_row = CommitReport(commit_id=commit.id_) + dbsession.add(current_report_row) + dbsession.flush() + report_service = ReportService({}) + res = report_service.save_report(commit, sample_report) + storage_hash = ArchiveService(commit.repository).storage_hash + + assert res == { + "url": f"v4/repos/{storage_hash}/commits/{commit.commitid}/chunks.txt" + } + assert len(current_report_row.uploads) == 0 + assert commit.report_json == { + "files": { + "file_1.go": [ + 0, + [0, 8, 5, 3, 0, "62.50000", 0, 0, 0, 0, 10, 2, 0], + None, + None, + ], + "file_2.py": [ + 1, + [0, 2, 1, 0, 1, "50.00000", 1, 0, 0, 0, 0, 0, 0], + None, + None, + ], + }, + "sessions": { + "0": { + "t": [2, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + "d": None, + "a": None, + "f": ["unit"], + "c": "circleci", + "n": "aycaramba", + "N": None, + "j": None, + "u": None, + "p": None, + "e": None, + "st": "uploaded", + "se": {}, + }, + "1": { + "t": None, + "d": None, + "a": None, + "f": ["integration"], + "c": "travis", + "n": "poli", + "N": None, + "j": None, + "u": None, + "p": None, + "e": None, + "st": "carriedforward", + "se": {}, + }, + }, + "totals": [2, 10, 6, 3, 1, "60.00000", 1, 0, 0, 2, 10, 2, None], + } + assert res["url"] in mock_storage.storage["archive"] + expected_content = "\n".join( + [ + '{"present_sessions":[0,1]}', + "[1,null,[[0,1]],null,[10,2]]", + "[0,null,[[0,1]]]", + "[1,null,[[0,1]]]", + "", + "[1,null,[[0,1],[1,1]]]", + "[0,null,[[0,1]]]", + "", + "[1,null,[[0,1],[1,0]]]", + "[1,null,[[0,1]]]", + "[0,null,[[0,1]]]", + "<<<<< end_of_chunk >>>>>", + '{"present_sessions":[0]}', + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "[1,null,[[0,1]]]", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "", + '["1/2","b",[[0,1]]]', + ] + ) + assert mock_storage.storage["archive"][res["url"]].decode() == expected_content + + def test_initialize_and_save_report_brand_new(self, dbsession, mock_storage): + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + report_service = ReportService({}) + r = report_service.initialize_and_save_report(commit) + assert r is not None + assert len(mock_storage.storage["archive"]) == 0 + + def test_initialize_and_save_report_report_but_no_details( + self, dbsession, mock_storage + ): + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + report_row = CommitReport(commit_id=commit.id_) + dbsession.add(report_row) + dbsession.flush() + report_service = ReportService({}) + r = report_service.initialize_and_save_report(commit) + dbsession.refresh(report_row) + assert r is not None + assert len(mock_storage.storage["archive"]) == 0 + + @pytest.mark.django_db + def test_initialize_and_save_report_carryforward_needed( + self, dbsession, sample_commit_with_report_big, mock_storage + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + _report_json=None, + parent_commit_id=parent_commit.commitid, + repository=parent_commit.repository, + ) + dbsession.add(commit) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + report_service = ReportService(UserYaml(yaml_dict)) + r = report_service.initialize_and_save_report(commit) + assert len(r.uploads) == 2 + first_upload = dbsession.query(Upload).filter_by( + report_id=r.id_, order_number=2 + )[0] + second_upload = dbsession.query(Upload).filter_by( + report_id=r.id_, order_number=3 + )[0] + assert first_upload.build_code is None + assert first_upload.build_url is None + assert first_upload.env is None + assert first_upload.job_code is None + assert first_upload.name == "Carriedforward" + assert first_upload.provider is None + assert first_upload.report_id == r.id_ + assert first_upload.state == "complete" + assert first_upload.storage_path == "" + assert first_upload.order_number == 2 + assert len(first_upload.flags) == 1 + assert first_upload.flags[0].repository == commit.repository + assert first_upload.flags[0].flag_name == "enterprise" + assert first_upload.totals is None + assert first_upload.upload_extras == { + "carriedforward_from": parent_commit.commitid + } + assert first_upload.upload_type == "carriedforward" + assert second_upload.build_code is None + assert second_upload.build_url is None + assert second_upload.env is None + assert second_upload.job_code is None + assert second_upload.name == "Carriedforward" + assert second_upload.provider is None + assert second_upload.report_id == r.id_ + assert second_upload.state == "complete" + assert second_upload.storage_path == "" + assert second_upload.order_number == 3 + assert len(second_upload.flags) == 2 + assert sorted([f.flag_name for f in second_upload.flags]) == [ + "enterprise", + "unit", + ] + assert second_upload.totals is None + assert second_upload.upload_extras == { + "carriedforward_from": parent_commit.commitid + } + assert second_upload.upload_type == "carriedforward" + + @pytest.mark.django_db + def test_initialize_and_save_report_report_but_no_details_carryforward_needed( + self, dbsession, sample_commit_with_report_big, mock_storage + ): + parent_commit = sample_commit_with_report_big + commit = CommitFactory.create( + _report_json=None, + parent_commit_id=parent_commit.commitid, + repository=parent_commit.repository, + ) + dbsession.add(commit) + dbsession.flush() + report_row = CommitReport(commit_id=commit.id_) + dbsession.add(report_row) + dbsession.flush() + yaml_dict = {"flags": {"enterprise": {"carryforward": True}}} + report_service = ReportService(UserYaml(yaml_dict)) + r = report_service.initialize_and_save_report(commit) + assert len(r.uploads) == 2 + first_upload = dbsession.query(Upload).filter_by( + report_id=r.id_, order_number=2 + )[0] + second_upload = dbsession.query(Upload).filter_by( + report_id=r.id_, order_number=3 + )[0] + assert first_upload.build_code is None + assert first_upload.build_url is None + assert first_upload.env is None + assert first_upload.job_code is None + assert first_upload.name == "Carriedforward" + assert first_upload.provider is None + assert first_upload.report_id == r.id_ + assert first_upload.state == "complete" + assert first_upload.storage_path == "" + assert first_upload.order_number == 2 + assert len(first_upload.flags) == 1 + assert first_upload.flags[0].repository == commit.repository + assert first_upload.flags[0].flag_name == "enterprise" + assert first_upload.totals is None + assert first_upload.upload_extras == { + "carriedforward_from": parent_commit.commitid + } + assert first_upload.upload_type == "carriedforward" + assert second_upload.build_code is None + assert second_upload.build_url is None + assert second_upload.env is None + assert second_upload.job_code is None + assert second_upload.name == "Carriedforward" + assert second_upload.provider is None + assert second_upload.report_id == r.id_ + assert second_upload.state == "complete" + assert second_upload.storage_path == "" + assert second_upload.order_number == 3 + assert len(second_upload.flags) == 2 + assert sorted([f.flag_name for f in second_upload.flags]) == [ + "enterprise", + "unit", + ] + assert second_upload.totals is None + assert second_upload.upload_extras == { + "carriedforward_from": parent_commit.commitid + } + assert second_upload.upload_type == "carriedforward" + + def test_initialize_and_save_report_needs_backporting( + self, dbsession, sample_commit_with_report_big, mock_storage, mocker + ): + commit = sample_commit_with_report_big + report_service = ReportService({}) + r = report_service.initialize_and_save_report(commit) + assert r is not None + assert len(r.uploads) == 4 + first_upload = dbsession.query(Upload).filter_by(order_number=0).first() + assert sorted([f.flag_name for f in first_upload.flags]) == [] + second_upload = dbsession.query(Upload).filter_by(order_number=1).first() + assert sorted([f.flag_name for f in second_upload.flags]) == ["unit"] + third_upload = dbsession.query(Upload).filter_by(order_number=2).first() + assert sorted([f.flag_name for f in third_upload.flags]) == ["enterprise"] + fourth_upload = dbsession.query(Upload).filter_by(order_number=3).first() + assert sorted([f.flag_name for f in fourth_upload.flags]) == [ + "enterprise", + "unit", + ] + assert ( + dbsession.query(RepositoryFlag) + .filter_by(repository_id=commit.repoid) + .count() + == 2 + ) + storage_keys = mock_storage.storage["archive"].keys() + assert any(key.endswith("chunks.txt") for key in storage_keys) + + def test_initialize_and_save_report_existing_report( + self, mock_storage, sample_report, dbsession, mocker + ): + mocker_save_full_report = mocker.patch.object(ReportService, "save_full_report") + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + current_report_row = CommitReport(commit_id=commit.id_) + dbsession.add(current_report_row) + dbsession.flush() + report_service = ReportService({}) + report_service.save_report(commit, sample_report) + res = report_service.initialize_and_save_report(commit) + assert res == current_report_row + assert not mocker_save_full_report.called + + @pytest.mark.django_db + def test_create_report_upload(self, dbsession): + arguments = { + "branch": "master", + "build": "646048900", + "build_url": "http://github.com/greenlantern/reponame/actions/runs/646048900", + "cmd_args": "n,F,Q,C", + "commit": "1280bf4b8d596f41b101ac425758226c021876da", + "job": "thisjob", + "flags": ["unittest"], + "name": "this name contains more than 100 chars 1111111111111111111111111111111111111111111111111111111111111this is more than 100", + "owner": "greenlantern", + "package": "github-action-20210309-2b87ace", + "pr": "33", + "repo": "reponame", + "reportid": "6e2b6449-4e60-43f8-80ae-2c03a5c03d92", + "service": "github-actions", + "slug": "greenlantern/reponame", + "url": "v4/raw/2021-03-12/C00AE6C87E34AF41A6D38D154C609782/1280bf4b8d596f41b101ac425758226c021876da/6e2b6449-4e60-43f8-80ae-2c03a5c03d92.txt", + "using_global_token": "false", + "version": "v4", + } + commit = CommitFactory.create() + dbsession.add(commit) + dbsession.flush() + current_report_row = CommitReport(commit_id=commit.id_) + dbsession.add(current_report_row) + dbsession.flush() + report_service = ReportService({}) + res = report_service.create_report_upload(arguments, current_report_row) + dbsession.flush() + assert res.build_code == "646048900" + assert ( + res.build_url + == "http://github.com/greenlantern/reponame/actions/runs/646048900" + ) + assert res.env is None + assert res.job_code == "thisjob" + assert ( + res.name + == "this name contains more than 100 chars 1111111111111111111111111111111111111111111111111111111111111" + ) + assert res.provider == "github-actions" + assert res.report_id == current_report_row.id_ + assert res.state == "started" + assert ( + res.storage_path + == "v4/raw/2021-03-12/C00AE6C87E34AF41A6D38D154C609782/1280bf4b8d596f41b101ac425758226c021876da/6e2b6449-4e60-43f8-80ae-2c03a5c03d92.txt" + ) + assert res.order_number is None + assert res.totals is None + assert res.upload_extras == {} + assert res.upload_type == "uploaded" + + def test_shift_carryforward_report( + self, dbsession, sample_report, mocker, mock_repo_provider + ): + parent_commit = CommitFactory() + commit = CommitFactory(parent_commit_id=parent_commit.commitid) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + fake_diff = { + "diff": { + "files": { + "file_1.go": { + "type": "modified", + "before": None, + "segments": [ + { + "header": [3, 3, 3, 4], + "lines": [ + " some go code in line 3", + "-this line was removed", + "+this line was added", + "+this line was also added", + " ", + ], + }, + { + "header": [9, 1, 10, 5], + "lines": [ + " some go code in line 9", + "+add", + "+add", + "+add", + "+add", + ], + }, + ], + } + } + } + } + + def fake_get_compare(base, head): + assert base == parent_commit.commitid + assert head == commit.commitid + return fake_diff + + mock_repo_provider.get_compare = mock.AsyncMock(side_effect=fake_get_compare) + result = ReportService({})._possibly_shift_carryforward_report( + sample_report, parent_commit, commit + ) + readable_report = convert_report_to_better_readable(result) + assert readable_report["archive"] == { + "file_1.go": [ + (1, 1, None, [[0, 1]], None, (10, 2)), + (2, 0, None, [[0, 1]], None, None), + (3, 1, None, [[0, 1]], None, None), + (6, 1, None, [[0, 1], [1, 1]], None, None), + (7, 0, None, [[0, 1]], None, None), + (9, 1, None, [[0, 1], [1, 0]], None, None), + (10, 1, None, [[0, 1]], None, None), + (15, 0, None, [[0, 1]], None, None), + ], + "file_2.py": [ + (12, 1, None, [[0, 1]], None, None), + (51, "1/2", "b", [[0, 1]], None, None), + ], + } + + @pytest.mark.django_db(databases={"default", "timeseries"}) + def test_create_new_report_for_commit_and_shift( + self, dbsession, sample_report, mocker, mock_repo_provider, mock_storage + ): + parent_commit = CommitFactory() + parent_commit_report = CommitReport(commit_id=parent_commit.id_) + dbsession.add(parent_commit) + dbsession.add(parent_commit_report) + dbsession.flush() + + commit = CommitFactory.create( + repository=parent_commit.repository, + parent_commit_id=parent_commit.commitid, + _report_json=None, + ) + dbsession.add(commit) + dbsession.flush() + dbsession.add(CommitReport(commit_id=commit.id_)) + dbsession.flush() + yaml_dict = { + "flags": { + "integration": {"carryforward": True}, + "unit": {"carryforward": True}, + } + } + + fake_diff = { + "diff": { + "files": { + "file_1.go": { + "type": "modified", + "before": None, + "segments": [ + { + "header": [3, 3, 3, 4], + "lines": [ + " some go code in line 3", + "-this line was removed", + "+this line was added", + "+this line was also added", + " ", + ], + }, + { + "header": [9, 1, 10, 5], + "lines": [ + " some go code in line 9", + "+add", + "+add", + "+add", + "+add", + ], + }, + ], + } + } + } + } + + def fake_get_compare(base, head): + assert base == parent_commit.commitid + assert head == commit.commitid + return fake_diff + + mock_repo_provider.get_compare = mock.AsyncMock(side_effect=fake_get_compare) + + mock_get_report = mocker.patch.object( + ReportService, "get_existing_report_for_commit", return_value=sample_report + ) + + result = ReportService(UserYaml(yaml_dict)).create_new_report_for_commit(commit) + assert mock_get_report.call_count == 1 + readable_report = convert_report_to_better_readable(result) + assert readable_report["archive"] == { + "file_1.go": [ + (1, 1, None, [[0, 1]], None, (10, 2)), + (2, 0, None, [[0, 1]], None, None), + (3, 1, None, [[0, 1]], None, None), + (6, 1, None, [[0, 1], [1, 1]], None, None), + (7, 0, None, [[0, 1]], None, None), + (9, 1, None, [[0, 1], [1, 0]], None, None), + (10, 1, None, [[0, 1]], None, None), + (15, 0, None, [[0, 1]], None, None), + ], + "file_2.py": [ + (12, 1, None, [[0, 1]], None, None), + (51, "1/2", "b", [[0, 1]], None, None), + ], + } + + def test_possibly_shift_carryforward_report_cant_get_diff( + self, dbsession, sample_report, mocker + ): + parent_commit = CommitFactory() + commit = CommitFactory(parent_commit_id=parent_commit.commitid) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + mock_log_error = mocker.patch.object(report_log, "error") + + def raise_error(*args, **kwargs): + raise TorngitRateLimitError(response_data="", message="error", reset=None) + + fake_provider = mocker.Mock() + fake_provider.get_compare = raise_error + mock_provider_service = mocker.patch( + "services.report.get_repo_provider_service", return_value=fake_provider + ) + result = ReportService({})._possibly_shift_carryforward_report( + sample_report, parent_commit, commit + ) + assert result == sample_report + mock_provider_service.assert_called() + mock_log_error.assert_called_with( + "Failed to shift carryforward report lines.", + extra={ + "reason": "Can't get diff", + "commit": commit.commitid, + "error": str( + TorngitRateLimitError(response_data="", message="error", reset=None) + ), + "error_type": type( + TorngitRateLimitError(response_data="", message="error", reset=None) + ), + }, + ) + + def test_possibly_shift_carryforward_report_bot_error( + self, dbsession, sample_report, mocker + ): + parent_commit = CommitFactory() + commit = CommitFactory(parent_commit_id=parent_commit.commitid) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + mock_log_error = mocker.patch.object(report_log, "error") + + def raise_error(*args, **kwargs): + raise RepositoryWithoutValidBotError() + + mock_provider_service = mocker.patch( + "services.report.get_repo_provider_service", side_effect=raise_error + ) + result = ReportService({})._possibly_shift_carryforward_report( + sample_report, parent_commit, commit + ) + assert result == sample_report + mock_provider_service.assert_called() + mock_log_error.assert_called_with( + "Failed to shift carryforward report lines", + extra={ + "reason": "Can't get provider_service", + "commit": commit.commitid, + "error": str(RepositoryWithoutValidBotError()), + }, + ) + + def test_possibly_shift_carryforward_report_random_processing_error( + self, dbsession, mocker, mock_repo_provider + ): + parent_commit = CommitFactory() + commit = CommitFactory(parent_commit_id=parent_commit.commitid) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + mock_log_error = mocker.patch.object(report_log, "error") + + def raise_error(*args, **kwargs): + raise Exception("Very random and hard to get exception") + + mock_repo_provider.get_compare = mock.AsyncMock( + side_effect=lambda *args, **kwargs: {"diff": {}} + ) + mock_report = mocker.Mock() + mock_report.shift_lines_by_diff = raise_error + result = ReportService({})._possibly_shift_carryforward_report( + mock_report, parent_commit, commit + ) + assert result == mock_report + mock_log_error.assert_called_with( + "Failed to shift carryforward report lines.", + exc_info=True, + extra={ + "reason": "Unknown", + "commit": commit.commitid, + }, + ) + + def test_possibly_shift_carryforward_report_softtimelimit_reraised( + self, dbsession, mocker, mock_repo_provider + ): + parent_commit = CommitFactory() + commit = CommitFactory(parent_commit_id=parent_commit.commitid) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + + def raise_error(*args, **kwargs): + raise SoftTimeLimitExceeded() + + mock_report = mocker.Mock() + mock_report.shift_lines_by_diff = raise_error + with pytest.raises(SoftTimeLimitExceeded): + ReportService({})._possibly_shift_carryforward_report( + mock_report, parent_commit, commit + ) diff --git a/sample_app/tests/test_repository_service.py b/sample_app/tests/test_repository_service.py new file mode 100644 index 0000000..8666dca --- /dev/null +++ b/sample_app/tests/test_repository_service.py @@ -0,0 +1,2149 @@ +import inspect +from datetime import datetime +from unittest import mock +from unittest.mock import MagicMock, patch + +import pytest +from freezegun import freeze_time + +from database.models import Owner +from database.models.core import ( + Pull, + Repository, +) +from database.tests.factories import ( + CommitFactory, + GithubAppInstallationFactory, + OwnerFactory, + PullFactory, + RepositoryFactory, +) +from services.repository import ( + _pick_best_base_comparedto_pair, + fetch_and_update_pull_request_information, + fetch_and_update_pull_request_information_from_commit, + fetch_appropriate_parent_for_commit, + fetch_commit_yaml_and_possibly_store, + get_repo_provider_service, + get_repo_provider_service_by_id, + update_commit_from_provider_info, + upsert_author, +) +from shared.encryption.oauth import get_encryptor_from_configuration +from shared.rate_limits import gh_app_key_name, owner_key_name +from shared.reports.types import UploadType +from shared.torngit.base import TorngitBaseAdapter +from shared.torngit.exceptions import ( + TorngitClientError, + TorngitObjectNotFoundError, + TorngitServerUnreachableError, +) +from shared.typings.torngit import ( + AdditionalData, + GithubInstallationInfo, + OwnerInfo, + RepoInfo, + TorngitInstanceData, +) +from tasks.notify import get_repo_provider_service_for_specific_commit + + +@pytest.fixture +def repo(dbsession) -> Repository: + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__service="github", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + return repo + + +@pytest.fixture +def pull(dbsession, repo) -> Pull: + pull = PullFactory.create(repository=repo, author=None) + dbsession.add(pull) + dbsession.flush() + return pull + + +def test_get_repo_provider_service_github(dbsession, repo): + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data == expected_data + assert repo.owner.service == "github" + assert res._on_token_refresh is not None + assert inspect.isawaitable(res._on_token_refresh(None)) + assert res.token == { + "username": repo.owner.username, + "key": "testyftq3ovzkb3zmt823u3t04lkrt9w", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + + +def test_get_repo_provider_service_additional_data(dbsession, repo): + additional_data: AdditionalData = {"upload_type": UploadType.TEST_RESULTS} + res = get_repo_provider_service(repo, additional_data=additional_data) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {"upload_type": UploadType.TEST_RESULTS}, + } + assert res.data == expected_data + assert repo.owner.service == "github" + assert res._on_token_refresh is not None + assert inspect.isawaitable(res._on_token_refresh(None)) + assert res.token == { + "username": repo.owner.username, + "key": "testyftq3ovzkb3zmt823u3t04lkrt9w", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + + +def test_get_repo_provider_service_github_with_installations(dbsession, mocker, repo): + mocker.patch( + "shared.bots.github_apps.get_github_integration_token", + return_value="installation_token", + ) + installation_0 = GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + owner=repo.owner, + ) + installation_1 = GithubAppInstallationFactory( + name="my_app", + installation_id=1300, + app_id=300, + pem_path="path", + owner=repo.owner, + ) + repo.owner.github_app_installations = [installation_0, installation_1] + dbsession.add_all([repo, installation_0, installation_1]) + dbsession.flush() + res = get_repo_provider_service(repo, installation_name_to_use="my_app") + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": True, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": { + "id": installation_1.id, + "installation_id": 1300, + "app_id": 300, + "pem_path": "path", + }, + "fallback_installations": [ + { + "id": installation_0.id, + "app_id": 200, + "installation_id": 1200, + "pem_path": None, + } + ], + "additional_data": {}, + } + assert res.data == expected_data + assert repo.owner.service == "github" + assert res._on_token_refresh is None + assert res.token == { + "key": "installation_token", + "username": "installation_1300", + "entity_name": gh_app_key_name( + installation_id=installation_1.installation_id, + app_id=installation_1.app_id, + ), + } + + +def test_get_repo_provider_service_bitbucket(dbsession): + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__service="bitbucket", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data == expected_data + assert repo.owner.service == "bitbucket" + assert res._on_token_refresh is None + assert res.token == { + "username": repo.owner.username, + "key": "testyftq3ovzkb3zmt823u3t04lkrt9w", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + + +def test_get_repo_provider_service_with_token_refresh_callback(dbsession): + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__service="gitlab", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data == expected_data + assert res._on_token_refresh is not None + assert inspect.isawaitable(res._on_token_refresh(None)) + assert res.token == { + "username": repo.owner.username, + "key": "testyftq3ovzkb3zmt823u3t04lkrt9w", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + + +def test_get_repo_provider_service_repo_bot(dbsession, mock_configuration): + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__service="gitlab", + name="example-python", + private=False, + ) + dbsession.add(repo) + dbsession.flush() + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data == expected_data + assert res.token == { + "username": repo.owner.username, + "key": "testyftq3ovzkb3zmt823u3t04lkrt9w", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + assert res._on_token_refresh is not None + + +@pytest.mark.asyncio +async def test_token_refresh_callback(dbsession): + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__service="gitlab", + name="example-python", + ) + dbsession.add(repo) + dbsession.flush() + res = get_repo_provider_service(repo) + new_token = {"key": "new_access_token", "refresh_token": "new_refresh_token"} + await res._on_token_refresh(new_token) + owner = dbsession.query(Owner).filter_by(ownerid=repo.owner.ownerid).first() + encryptor = get_encryptor_from_configuration() + saved_token = encryptor.decrypt_token(owner.oauth_token) + assert saved_token["key"] == "new_access_token" + assert saved_token["refresh_token"] == "new_refresh_token" + + +def test_get_repo_provider_service_different_bot(dbsession): + bot_token = "bcaa0dc0c66b4a8c8c65ac919a1a91aa" + bot = OwnerFactory.create(unencrypted_oauth_token=bot_token) + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + bot=bot, + name="example-python", + ) + dbsession.add(repo) + dbsession.add(bot) + dbsession.flush() + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data["repo"] == expected_data["repo"] + assert res.data == expected_data + assert res.token == { + "username": repo.bot.username, + "key": bot_token, + "secret": None, + "entity_name": owner_key_name(repo.bot.ownerid), + } + + +def test_get_repo_provider_service_no_bot(dbsession): + bot_token = "bcaa0dc0c66b4a8c8c65ac919a1a91aa" + owner_bot = OwnerFactory.create(unencrypted_oauth_token=bot_token) + repo = RepositoryFactory.create( + owner__unencrypted_oauth_token="testyftq3ovzkb3zmt823u3t04lkrt9w", + owner__bot=owner_bot, + bot=None, + name="example-python", + ) + dbsession.add(repo) + dbsession.add(owner_bot) + dbsession.flush() + res = get_repo_provider_service(repo) + expected_data = { + "owner": { + "ownerid": repo.owner.ownerid, + "service_id": repo.owner.service_id, + "username": repo.owner.username, + }, + "repo": { + "name": "example-python", + "using_integration": False, + "service_id": repo.service_id, + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data == expected_data + assert res.token == { + "username": repo.owner.bot.username, + "key": bot_token, + "secret": None, + "entity_name": owner_key_name(repo.owner.bot.ownerid), + } + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_grandparent( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "a" * 32 + repository = RepositoryFactory.create() + parent_commit = CommitFactory.create( + commitid=grandparent_commit_id, repository=repository + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + } + ], + } + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert grandparent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_parent_has_no_message( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "a" * 32 + repository = RepositoryFactory.create() + parent_with_no_message = CommitFactory.create( + commitid=parent_commit_id, + repository=repository, + message=None, + parent_commit_id=None, + ) + parent_commit = CommitFactory.create( + commitid=grandparent_commit_id, repository=repository + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + } + ], + } + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.add(parent_with_no_message) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert grandparent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_parent_is_deleted( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "a" * 32 + repository = RepositoryFactory.create() + parent_with_no_message = CommitFactory.create( + commitid=parent_commit_id, + repository=repository, + message="message", + parent_commit_id=None, + deleted=True, + ) + parent_commit = CommitFactory.create( + commitid=grandparent_commit_id, repository=repository + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + } + ], + } + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.add(parent_with_no_message) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert grandparent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_parent_has_no_message_but_nothing_better( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "a" * 32 + repository = RepositoryFactory.create() + parent_with_no_message = CommitFactory.create( + commitid=parent_commit_id, + repository=repository, + message=None, + parent_commit_id=None, + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + } + ], + } + dbsession.add(commit) + dbsession.add(parent_with_no_message) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert parent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_multiple_commit_parent_has_no_message_but_nothing_better( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "a" * 32 + sec_parent_commit_id = "b" * 32 + repository = RepositoryFactory.create() + parent_with_no_message = CommitFactory.create( + commitid=parent_commit_id, + repository=repository, + message=None, + parent_commit_id=None, + ) + sec_parent_with_no_message = CommitFactory.create( + commitid=sec_parent_commit_id, + repository=repository, + message=None, + parent_commit_id=None, + branch="bbb", + ) + commit = CommitFactory.create( + parent_commit_id=None, repository=repository, branch="bbb" + ) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + }, + { + "commitid": sec_parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + }, + ], + } + dbsession.add(commit) + dbsession.add(parent_with_no_message) + dbsession.add(sec_parent_with_no_message) + dbsession.flush() + git_commit = {"parents": [parent_commit_id, sec_parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert sec_parent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_grandparent_wrong_repo_with_same( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "39594a6cd3213e4a606de77486f16bbf22c4f42e" + repository = RepositoryFactory.create() + second_repository = RepositoryFactory.create() + parent_commit = CommitFactory.create( + commitid=grandparent_commit_id, repository=repository + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + deceiving_parent_commit = CommitFactory.create( + commitid=parent_commit_id, repository=second_repository + ) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [{"commitid": grandparent_commit_id, "parents": []}], + } + ], + } + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.add(deceiving_parent_commit) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert grandparent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_grandparents_wrong_repo( + dbsession, mock_repo_provider +): + grandparent_commit_id = "8aa5aa054aaa21cf5a664acd504a1af6f5caafaa" + parent_commit_id = "39594a6cd3213e4a606de77486f16bbf22c4f42e" + second_parent_commit_id = "aaaaaa6cd3213e4a606de77486f16bbf22c4f422" + repository = RepositoryFactory.create() + second_repository = RepositoryFactory.create() + parent_commit = CommitFactory.create( + commitid=grandparent_commit_id, repository=repository, branch="aaa" + ) + seconed_parent_commit = CommitFactory.create( + commitid=second_parent_commit_id, repository=repository, branch="bbb" + ) + commit = CommitFactory.create( + parent_commit_id=None, repository=repository, branch="bbb" + ) + deceiving_parent_commit = CommitFactory.create( + commitid=parent_commit_id, repository=second_repository + ) + f = { + "commitid": commit.commitid, + "parents": [ + { + "commitid": parent_commit_id, + "parents": [ + {"commitid": grandparent_commit_id, "parents": []}, + {"commitid": second_parent_commit_id, "parents": []}, + ], + }, + ], + } + dbsession.add(seconed_parent_commit) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.add(deceiving_parent_commit) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + mock_repo_provider.get_ancestors_tree.return_value = f + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert second_parent_commit_id == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_direct_parent( + dbsession, mock_repo_provider +): + parent_commit_id = "8aa5be054aeb21cf5a664ecd504a1af6f5ceafba" + repository = RepositoryFactory.create() + parent_commit = CommitFactory.create( + commitid=parent_commit_id, repository=repository + ) + commit = CommitFactory.create(parent_commit_id=None, repository=repository) + dbsession.add(parent_commit) + dbsession.add(commit) + dbsession.flush() + git_commit = {"parents": [parent_commit_id]} + expected_result = parent_commit_id + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert expected_result == result + + +@pytest.mark.asyncio +async def test_fetch_appropriate_parent_for_commit_multiple_parents( + dbsession, mock_repo_provider +): + first_parent_commit_id = "8aa5be054aeb21cf5a664ecd504a1af6f5ceafba" + second_parent_commit_id = "a" * 32 + repository = RepositoryFactory.create() + second_parent_commit = CommitFactory.create( + commitid=second_parent_commit_id, repository=repository, branch="2ndBranch" + ) + first_parent_commit = CommitFactory.create( + commitid=first_parent_commit_id, repository=repository, branch="1stBranch" + ) + commit = CommitFactory.create( + parent_commit_id=None, repository=repository, branch="1stBranch" + ) + dbsession.add(second_parent_commit) + dbsession.add(first_parent_commit) + dbsession.add(commit) + dbsession.flush() + git_commit = {"parents": [first_parent_commit_id, second_parent_commit_id]} + expected_result = first_parent_commit_id + result = await fetch_appropriate_parent_for_commit( + mock_repo_provider, commit, git_commit + ) + assert expected_result == result + + +@freeze_time("2024-03-28T00:00:00") +def test_upsert_author_doesnt_exist(dbsession): + service = "github" + author_id = "123" + username = "username" + email = "email" + name = "name" + author = upsert_author(dbsession, service, author_id, username, email, name) + dbsession.flush() + assert author.free == 0 + assert author is not None + assert author.service == "github" + assert author.service_id == "123" + assert author.name == "name" + assert author.email == "email" + assert author.username == "username" + assert author.plan_activated_users is None + assert author.admins is None + assert author.permission is None + assert author.integration_id is None + assert author.yaml is None + assert author.oauth_token is None + assert author.bot_id is None + assert author.createstamp.isoformat() == "2024-03-28T00:00:00" + + +def test_upsert_author_already_exists(dbsession): + username = "username" + email = "email@email.com" + service = "bitbucket" + service_id = "975" + owner = OwnerFactory.create( + service=service, + service_id=service_id, + email=email, + username=username, + yaml={"a": ["12", "3"]}, + ) + dbsession.add(owner) + dbsession.flush() + + author = upsert_author(dbsession, service, service_id, username, None, None) + dbsession.flush() + assert author.ownerid == owner.ownerid + assert author.free == 0 + assert author is not None + assert author.service == service + assert author.service_id == service_id + assert author.name == owner.name + assert author.email == email + assert author.username == username + assert author.plan_activated_users == [] + assert author.admins == [] + assert author.permission == [] + assert author.integration_id is None + assert author.yaml == {"a": ["12", "3"]} + assert author.oauth_token == owner.oauth_token + assert author.bot_id == owner.bot_id + + +def test_upsert_author_needs_update(dbsession): + username = "username" + email = "email@email.com" + service = "bitbucket" + service_id = "975" + owner = OwnerFactory.create( + service=service, + service_id=service_id, + email=email, + username=username, + yaml={"a": ["12", "3"]}, + ) + dbsession.add(owner) + dbsession.flush() + + new_name = "Newt Namenheim" + new_username = "new_username" + new_email = "new_email@email.com" + author = upsert_author( + dbsession, service, service_id, new_username, new_email, new_name + ) + dbsession.flush() + + assert author is not None + assert author.ownerid == owner.ownerid + assert author.free == 0 + assert author.service == service + assert author.service_id == service_id + assert author.name == new_name + assert author.email == new_email + assert author.username == new_username + assert author.plan_activated_users == [] + assert author.admins == [] + assert author.permission == [] + assert author.integration_id is None + assert author.yaml == {"a": ["12", "3"]} + assert author.oauth_token == owner.oauth_token + assert author.bot_id == owner.bot_id + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_no_author_id( + dbsession, mocker, mock_storage +): + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", pullid=None + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=1, + totals=None, + _report_json=None, + repository=possible_parent_commit.repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + f = { + "author": { + "id": None, + "username": None, + "email": "email@email.com", + "name": "Mario", + }, + "message": "This message is brought to you by", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + get_pull_request_result = { + "head": {"branch": "newbranchyeah"}, + "base": {"branch": "main"}, + } + repository_service = mocker.MagicMock( + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + await update_commit_from_provider_info(repository_service, commit) + dbsession.flush() + dbsession.refresh(commit) + assert commit.author is None + assert commit.message == "This message is brought to you by" + assert commit.pullid == 1 + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == "newbranchyeah" + assert commit.merged is False + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_no_pullid_on_defaultbranch( + dbsession, mocker, mock_repo_provider, mock_storage +): + repository = RepositoryFactory.create(branch="superbranch") + dbsession.add(repository) + dbsession.flush() + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", pullid=None, repository=repository + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=None, + totals=None, + branch="papapa", + _report_json=None, + repository=repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + mock_repo_provider.find_pull_request.return_value = None + mock_repo_provider.get_best_effort_branches.return_value = [ + "superbranch", + "else", + "pokemon", + ] + mock_repo_provider.get_commit.return_value = { + "author": { + "id": None, + "username": None, + "email": "email@email.com", + "name": "Mario", + }, + "message": "This message is brought to you by", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + await update_commit_from_provider_info(mock_repo_provider, commit) + dbsession.flush() + dbsession.refresh(commit) + assert commit.author is None + assert commit.message == "This message is brought to you by" + assert commit.pullid is None + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == "superbranch" + assert commit.merged is True + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_no_pullid_not_on_defaultbranch( + dbsession, mocker, mock_repo_provider, mock_storage +): + repository = RepositoryFactory.create(branch="superbranch") + dbsession.add(repository) + dbsession.flush() + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", pullid=None, repository=repository + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=None, + branch="papapa", + totals=None, + _report_json=None, + repository=repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + mock_repo_provider.find_pull_request.return_value = None + mock_repo_provider.get_best_effort_branches.return_value = ["else", "pokemon"] + mock_repo_provider.get_commit.return_value = { + "author": { + "id": None, + "username": None, + "email": "email@email.com", + "name": "Mario", + }, + "message": "This message is brought to you by", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + await update_commit_from_provider_info(mock_repo_provider, commit) + dbsession.flush() + dbsession.refresh(commit) + assert commit.author is None + assert commit.message == "This message is brought to you by" + assert commit.pullid is None + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == "papapa" + assert commit.merged is False + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_with_author_id( + dbsession, mocker, mock_storage +): + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", pullid=None + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=1, + totals=None, + _report_json=None, + repository=possible_parent_commit.repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "This message is brought to you by", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + get_pull_request_result = { + "head": {"branch": "newbranchyeah"}, + "base": {"branch": "main"}, + } + repository_service = mocker.MagicMock( + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + await update_commit_from_provider_info(repository_service, commit) + dbsession.flush() + dbsession.refresh(commit) + assert commit.message == "This message is brought to you by" + assert commit.pullid == 1 + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == "newbranchyeah" + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + assert commit.author is not None + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.author.username == "author_username" + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_pull_from_fork( + dbsession, mocker, mock_storage +): + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", pullid=None + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=1, + totals=None, + _report_json=None, + repository=possible_parent_commit.repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "This message is brought to you by", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + get_pull_request_result = { + "head": {"branch": "main", "slug": f"some-guy/{commit.repository.name}"}, + "base": { + "branch": "main", + "slug": f"{commit.repository.owner.username}/{commit.repository.name}", + }, + } + repository_service = mocker.MagicMock( + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + await update_commit_from_provider_info(repository_service, commit) + dbsession.flush() + dbsession.refresh(commit) + assert commit.message == "This message is brought to you by" + assert commit.pullid == 1 + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == f"some-guy/{commit.repository.name}:main" + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + assert commit.author is not None + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.author.username == "author_username" + + +@pytest.mark.asyncio +async def test_update_commit_from_provider_info_bitbucket_merge( + dbsession, mocker, mock_storage +): + possible_parent_commit = CommitFactory.create( + message="possible_parent_commit", + pullid=None, + repository__owner__service="bitbucket", + ) + commit = CommitFactory.create( + message="", + author=None, + pullid=1, + totals=None, + _report_json=None, + repository=possible_parent_commit.repository, + ) + dbsession.add(possible_parent_commit) + dbsession.add(commit) + dbsession.flush() + dbsession.refresh(commit) + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "Merged in aaaa/coverage.py (pull request #99) Fix #123: crash", + "parents": [possible_parent_commit.commitid], + "timestamp": "2018-07-09T23:39:20Z", + } + get_pull_request_result = { + "head": {"branch": "newbranchyeah"}, + "base": {"branch": "thebasebranch"}, + } + repository_service = mocker.MagicMock( + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + await update_commit_from_provider_info(repository_service, commit) + dbsession.flush() + dbsession.refresh(commit) + assert ( + commit.message + == "Merged in aaaa/coverage.py (pull request #99) Fix #123: crash" + ) + assert commit.pullid == 1 + assert commit.totals is None + assert commit.report_json == {} + assert commit.branch == "thebasebranch" + assert commit.parent_commit_id == possible_parent_commit.commitid + assert commit.state == "complete" + assert commit.author is not None + assert commit.timestamp == datetime(2018, 7, 9, 23, 39, 20) + assert commit.author.username == "author_username" + + +@pytest.mark.asyncio +async def test_get_repo_gh_no_integration(dbsession, mocker): + owner = OwnerFactory.create( + service="github", + username="1nf1n1t3l00p", + service_id="45343385", + unencrypted_oauth_token="bcaa0dc0c66b4a8c8c65ac919a1a91aa", + ) + dbsession.add(owner) + + repo = RepositoryFactory.create( + private=True, + name="pytest", + using_integration=False, + service_id="123456", + owner=owner, + ) + dbsession.add(repo) + dbsession.flush() + + res = get_repo_provider_service_by_id(dbsession, repo.repoid) + + expected_data = { + "owner": { + "ownerid": owner.ownerid, + "service_id": owner.service_id, + "username": owner.username, + }, + "repo": { + "name": "pytest", + "using_integration": False, + "service_id": "123456", + "repoid": repo.repoid, + }, + "installation": None, + "fallback_installations": None, + "additional_data": {}, + } + assert res.data["repo"] == expected_data["repo"] + assert res.data == expected_data + assert res.token == { + "username": "1nf1n1t3l00p", + "key": "bcaa0dc0c66b4a8c8c65ac919a1a91aa", + "secret": None, + "entity_name": owner_key_name(repo.owner.ownerid), + } + + +class TestGetRepoProviderServiceForSpecificCommit: + @pytest.fixture + def mock_get_repo_provider_service(self, mocker): + mock_get_repo_provider_service = mocker.patch( + "tasks.notify.get_repo_provider_service" + ) + return mock_get_repo_provider_service + + @pytest.fixture + def mock_redis(self, mocker): + fake_redis = MagicMock(name="fake_redis") + mock_conn = mocker.patch("services.github.get_redis_connection") + mock_conn.return_value = fake_redis + return fake_redis + + def test_get_repo_provider_service_for_specific_commit_not_gh( + self, dbsession, mock_get_repo_provider_service, mock_redis + ): + commit = CommitFactory(repository__owner__service="gitlab") + mock_get_repo_provider_service.return_value = "the TorngitAdapter" + response = get_repo_provider_service_for_specific_commit(commit, "some_name") + assert response == "the TorngitAdapter" + mock_get_repo_provider_service.assert_called_with( + commit.repository, "some_name" + ) + + @patch("tasks.notify._possibly_pin_commit_to_github_app") + def test_get_repo_provider_service_for_specific_commit_no_specific_app_for_commit( + self, mock_pin, dbsession, mock_get_repo_provider_service, mock_redis + ): + commit = CommitFactory(repository__owner__service="github") + assert commit.id not in [10000, 15000] + redis_keys = { + "app_to_use_for_commit_15000": b"1200", + "app_to_use_for_commit_10000": b"1000", + } + mock_redis.get.side_effect = lambda key: redis_keys.get(key) + + mock_get_repo_provider_service.return_value = "the TorngitAdapter" + + response = get_repo_provider_service_for_specific_commit(commit, "some_name") + assert response == "the TorngitAdapter" + mock_get_repo_provider_service.assert_called_with( + commit.repository, "some_name" + ) + + @patch("tasks.notify.get_github_app_token", return_value=("the app token", None)) + @patch( + "tasks.notify._get_repo_provider_service_instance", + return_value="the TorngitAdapter", + ) + def test_get_repo_provider_service_for_specific_commit( + self, + mock_get_instance, + mock_get_app_token, + dbsession, + mock_get_repo_provider_service, + mock_redis, + ): + commit = CommitFactory(repository__owner__service="github") + app = GithubAppInstallationFactory( + owner=commit.repository.owner, app_id=12, installation_id=1200 + ) + dbsession.add_all([commit, app]) + dbsession.flush() + assert commit.repository.owner.github_app_installations == [app] + redis_keys = { + f"app_to_use_for_commit_{commit.id}": str(app.id).encode(), + } + mock_redis.get.side_effect = lambda key: redis_keys.get(key) + response = get_repo_provider_service_for_specific_commit(commit, "some_name") + assert response == "the TorngitAdapter" + mock_get_instance.assert_called_once() + + data = TorngitInstanceData( + repo=RepoInfo( + name=commit.repository.name, + using_integration=True, + service_id=commit.repository.service_id, + repoid=commit.repository.repoid, + ), + owner=OwnerInfo( + service_id=commit.repository.owner.service_id, + ownerid=commit.repository.ownerid, + username=commit.repository.owner.username, + ), + installation=GithubInstallationInfo( + id=app.id, app_id=12, installation_id=1200, pem_path=None + ), + fallback_installations=None, + ) + mock_get_instance.assert_called_with( + "github", + dict( + **data, + token="the app token", + token_type_mapping=None, + on_token_refresh=None, + ), + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_from_commit_new_pull_commits_in_place( + self, dbsession, mocker + ): + now = datetime.utcnow() + commit = CommitFactory.create(message="", totals=None, _report_json=None) + base_commit = CommitFactory.create(repository=commit.repository) + dbsession.add(commit) + dbsession.add(base_commit) + dbsession.flush() + current_yaml = {} + get_pull_request_result = { + "base": {"branch": "master", "commitid": base_commit.commitid}, + "head": {"branch": "reason/some-testing", "commitid": commit.commitid}, + "number": "1", + "id": "1", + "state": "open", + "title": "Creating new code for reasons no one knows", + "author": {"id": "123", "username": "pr_author_username"}, + } + repository_service = mocker.MagicMock( + service="github", + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + + # Setting the pullid for the commit without flushing. This ensures that we don't try to build the pull object, + # so that it can go through the path that creates/updates the pull object from `get_pull_request_result` + commit.pullid = 1 + enriched_pull = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + res = enriched_pull.database_pull + dbsession.flush() + dbsession.refresh(res) + assert res is not None + assert res.repoid == commit.repoid + assert res.pullid == 1 + assert res.issueid == 1 + assert res.updatestamp > now + assert res.state == "open" + assert res.title == "Creating new code for reasons no one knows" + assert res.base == base_commit.commitid + assert res.compared_to == base_commit.commitid + assert res.head == commit.commitid + assert res.commentid is None + assert res.diff is None + assert res._flare is None + assert res._flare_storage_path is None + assert ( + res.author + == dbsession.query(Owner) + .filter( + Owner.service == "github", + Owner.service_id == get_pull_request_result["author"]["id"], + Owner.username == get_pull_request_result["author"]["username"], + ) + .first() + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_from_commit_existing_pull_commits_in_place( + self, dbsession, mocker, repo, pull + ): + now = datetime.utcnow() + commit = CommitFactory.create( + message="", + pullid=pull.pullid, + totals=None, + _report_json=None, + repository=repo, + ) + base_commit = CommitFactory.create(repository=repo, branch="master") + dbsession.add(pull) + dbsession.add(commit) + dbsession.add(base_commit) + dbsession.flush() + current_yaml = {} + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "Merged in aaaa/coverage.py (pull request #99) Fix #123: crash", + "timestamp": datetime(2019, 10, 10), + "parents": [], + } + get_pull_request_result = { + "base": {"branch": "master", "commitid": base_commit.commitid}, + "head": {"branch": "reason/some-testing", "commitid": commit.commitid}, + "number": str(pull.pullid), + "id": str(pull.pullid), + "state": "open", + "title": "Creating new code for reasons no one knows", + "author": {"id": "123", "username": "pr_author_username"}, + } + repository_service = mocker.MagicMock( + service="github", + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + enriched_pull = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + res = enriched_pull.database_pull + dbsession.flush() + dbsession.refresh(res) + assert res is not None + assert res == pull + assert res.repoid == commit.repoid + assert res.pullid == pull.pullid + assert res.issueid == pull.pullid + assert res.updatestamp > now + assert res.state == "open" + assert res.title == "Creating new code for reasons no one knows" + assert res.base == base_commit.commitid + assert res.compared_to == base_commit.commitid + assert res.head == commit.commitid + assert res.commentid is None + assert res.diff is None + assert res._flare is None + assert res._flare_storage_path is None + assert ( + res.author + == dbsession.query(Owner) + .filter( + Owner.service == "github", + Owner.service_id == get_pull_request_result["author"]["id"], + Owner.username == get_pull_request_result["author"]["username"], + ) + .first() + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_multiple_pulls_same_repo( + self, dbsession, mocker, repo, pull + ): + now = datetime.utcnow() + pull.title = "purposelly bad title" + second_pull = PullFactory.create(repository=repo) + commit = CommitFactory.create( + message="", + pullid=pull.pullid, + totals=None, + _report_json=None, + repository=repo, + ) + base_commit = CommitFactory.create(repository=repo, branch="master") + dbsession.add(pull) + dbsession.add(second_pull) + dbsession.add(commit) + dbsession.add(base_commit) + dbsession.flush() + current_yaml = {} + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "Merged in aaaa/coverage.py (pull request #99) Fix #123: crash", + "timestamp": datetime(2019, 10, 10), + "parents": [], + } + get_pull_request_result = { + "base": {"branch": "master", "commitid": base_commit.commitid}, + "head": {"branch": "reason/some-testing", "commitid": commit.commitid}, + "number": str(pull.pullid), + "id": str(pull.pullid), + "state": "open", + "title": "Creating new code for reasons no one knows", + "author": {"id": "123", "username": "pr_author_username"}, + } + + repository_service = mocker.MagicMock( + service="github", + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + enriched_pull = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + res = enriched_pull.database_pull + dbsession.flush() + dbsession.refresh(res) + assert res is not None + assert res == pull + assert res != second_pull + assert res.repoid == commit.repoid + assert res.pullid == pull.pullid + assert res.issueid == pull.pullid + assert res.updatestamp > now + assert res.state == "open" + assert res.title == "Creating new code for reasons no one knows" + assert res.base == base_commit.commitid + assert res.compared_to == base_commit.commitid + assert res.head == commit.commitid + assert res.commentid is None + assert res.diff is None + assert res._flare is None + assert res._flare_storage_path is None + assert ( + res.author + == dbsession.query(Owner) + .filter( + Owner.service == "github", + Owner.service_id == get_pull_request_result["author"]["id"], + Owner.username == get_pull_request_result["author"]["username"], + ) + .first() + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_from_commit_different_compared_to( + self, + dbsession, + mocker, + repo, + pull, + ): + now = datetime.utcnow() + commit = CommitFactory.create( + message="", + pullid=pull.pullid, + totals=None, + _report_json=None, + repository=repo, + ) + second_comparedto_commit = CommitFactory.create( + repository=repo, + branch="master", + merged=True, + timestamp=datetime(2019, 5, 6), + ) + compared_to_commit = CommitFactory.create( + repository=repo, + branch="master", + merged=True, + timestamp=datetime(2019, 7, 15), + ) + dbsession.add(commit) + dbsession.add(second_comparedto_commit) + dbsession.add(compared_to_commit) + dbsession.flush() + current_yaml = {} + f = { + "author": { + "id": "author_id", + "username": "author_username", + "email": "email@email.com", + "name": "Mario", + }, + "message": "Merged in aaaa/coverage.py (pull request #99) Fix #123: crash", + "parents": [], + "timestamp": datetime(2019, 10, 10), + } + get_pull_request_result = { + "base": {"branch": "master", "commitid": "somecommitid"}, + "head": {"branch": "reason/some-testing", "commitid": commit.commitid}, + "number": str(pull.pullid), + "id": str(pull.pullid), + "state": "open", + "title": "Creating new code for reasons no one knows", + "author": {"id": "123", "username": "pr_author_username"}, + } + repository_service = mocker.MagicMock( + service="github", + get_commit=mock.AsyncMock(return_value=f), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + enriched_pull = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + res = enriched_pull.database_pull + dbsession.flush() + dbsession.refresh(res) + assert res is not None + assert res == pull + assert res.repoid == commit.repoid + assert res.pullid == pull.pullid + assert res.issueid == pull.pullid + assert res.updatestamp > now + assert res.state == "open" + assert res.title == "Creating new code for reasons no one knows" + assert res.base == "somecommitid" + assert res.compared_to == compared_to_commit.commitid + assert res.head == commit.commitid + assert res.commentid is None + assert res.diff is None + assert res._flare is None + assert res._flare_storage_path is None + assert ( + res.author + == dbsession.query(Owner) + .filter( + Owner.service == "github", + Owner.service_id == get_pull_request_result["author"]["id"], + Owner.username == get_pull_request_result["author"]["username"], + ) + .first() + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_no_compared_to( + self, dbsession, mocker, repo, pull + ): + now = datetime.utcnow() + compared_to_commit = CommitFactory.create( + repository=repo, branch="master", merged=True + ) + commit = CommitFactory.create( + message="", + pullid=pull.pullid, + totals=None, + _report_json=None, + repository=repo, + ) + dbsession.add(pull) + dbsession.add(commit) + dbsession.add(compared_to_commit) + dbsession.flush() + current_yaml = {} + get_pull_request_result = { + "base": {"branch": "master", "commitid": "somecommitid"}, + "head": {"branch": "reason/some-testing", "commitid": commit.commitid}, + "number": str(pull.pullid), + "id": str(pull.pullid), + "state": "open", + "title": "Creating new code for reasons no one knows", + "author": {"id": "123", "username": "pr_author_username"}, + } + repository_service = mocker.MagicMock( + service="github", + get_commit=mock.AsyncMock( + side_effect=TorngitObjectNotFoundError("response", "message") + ), + get_pull_request=mock.AsyncMock(return_value=get_pull_request_result), + ) + enriched_pull = await fetch_and_update_pull_request_information( + repository_service, dbsession, pull.repoid, pull.pullid, current_yaml + ) + res = enriched_pull.database_pull + dbsession.flush() + dbsession.refresh(res) + assert res is not None + assert res == pull + assert res.repoid == commit.repoid + assert res.pullid == pull.pullid + assert res.issueid == pull.pullid + assert res.updatestamp > now + assert res.state == "open" + assert res.title == "Creating new code for reasons no one knows" + assert res.base == "somecommitid" + assert res.compared_to is None + assert res.head is None + assert res.commentid is None + assert res.diff is None + assert res._flare is None + assert res._flare_storage_path is None + assert ( + res.author + == dbsession.query(Owner) + .filter( + Owner.service == "github", + Owner.service_id == get_pull_request_result["author"]["id"], + Owner.username == get_pull_request_result["author"]["username"], + ) + .first() + ) + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_torngitexception( + self, dbsession, mocker, repo + ): + commit = CommitFactory.create( + message="", + pullid=None, + totals=None, + _report_json=None, + repository=repo, + ) + compared_to_commit = CommitFactory.create( + repository=repo, branch="master", merged=True + ) + dbsession.add(commit) + dbsession.add(compared_to_commit) + dbsession.flush() + current_yaml = {} + repository_service = mocker.MagicMock( + find_pull_request=mock.AsyncMock( + side_effect=TorngitClientError(422, "response", "message") + ) + ) + res = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + assert res is None + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_torngitexception_getting_pull( + self, dbsession, mocker, repo + ): + commit = CommitFactory.create( + message="", + totals=None, + _report_json=None, + repository=repo, + ) + compared_to_commit = CommitFactory.create( + repository=repo, branch="master", merged=True + ) + dbsession.add(commit) + dbsession.add(compared_to_commit) + dbsession.flush() + + commit.pullid = "123" + current_yaml = {} + repository_service = mocker.MagicMock( + get_pull_request=mock.AsyncMock( + side_effect=TorngitObjectNotFoundError("response", "message") + ) + ) + res = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + assert res.database_pull is None + assert res.provider_pull is None + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_torngitserverexception_getting_pull( + self, dbsession, mocker, repo, pull + ): + current_yaml = {} + repository_service = mocker.MagicMock( + get_pull_request=mock.AsyncMock(side_effect=TorngitServerUnreachableError()) + ) + res = await fetch_and_update_pull_request_information( + repository_service, dbsession, pull.repoid, pull.pullid, current_yaml + ) + assert res.database_pull == pull + assert res.provider_pull is None + + @pytest.mark.asyncio + async def test_fetch_and_update_pull_request_information_notfound_pull_already_exists( + self, dbsession, mocker, repo, pull + ): + commit = CommitFactory.create( + message="", + pullid=pull.pullid, + totals=None, + _report_json=None, + repository=repo, + ) + compared_to_commit = CommitFactory.create( + repository=repo, branch="master", merged=True + ) + dbsession.add(commit) + dbsession.add(compared_to_commit) + dbsession.flush() + current_yaml = {} + repository_service = mocker.MagicMock( + get_pull_request=mock.AsyncMock( + side_effect=TorngitObjectNotFoundError("response", "message") + ) + ) + res = await fetch_and_update_pull_request_information_from_commit( + repository_service, commit, current_yaml + ) + assert res.database_pull == pull + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_no_user_provided_base_no_candidate( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ("abcqwertabcqwertabcqwertabcqwertabcqwert", None) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_yes_user_provided_base_no_candidate( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + pull.user_provided_base_sha = "lkjhgfdslkjhgfdslkjhgfdslkjhgfdslkjhgfds" + dbsession.add(pull) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ("lkjhgfdslkjhgfdslkjhgfdslkjhgfdslkjhgfds", None) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_yes_user_provided_base_exact_match( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + pull.user_provided_base_sha = "1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + dbsession.add(pull) + dbsession.flush() + commit = CommitFactory.create( + repository=repo, commitid="1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + ) + dbsession.add(commit) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ( + "1007cbfb857592b9e7cbe3ecb25748870e2c07fc", + "1007cbfb857592b9e7cbe3ecb25748870e2c07fc", + ) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_yes_user_given_no_base_exact_match( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + pull.user_provided_base_sha = "1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + dbsession.add(pull) + dbsession.flush() + commit = CommitFactory.create( + repository=repo, commitid="1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + ) + dbsession.add(commit) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ( + "1007cbfb857592b9e7cbe3ecb25748870e2c07fc", + "1007cbfb857592b9e7cbe3ecb25748870e2c07fc", + ) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_yes_user_given_no_base_no_match( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + pull.user_provided_base_sha = "1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + dbsession.add(pull) + dbsession.flush() + commit = CommitFactory.create( + repository=repo, + commitid="e9868516aafd365aeab2957d3745353b532d3a37", + branch="basebranch", + timestamp=datetime(2021, 3, 9), + pullid=None, + ) + other_commit = CommitFactory.create( + repository=repo, + commitid="2c07d7804dd9ff61ca5a1d6ee01de108af8cc7e0", + branch="basebranch", + timestamp=datetime(2021, 3, 11), + pullid=None, + ) + dbsession.add(commit) + dbsession.add(other_commit) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ( + "1007cbfb857592b9e7cbe3ecb25748870e2c07fc", + "e9868516aafd365aeab2957d3745353b532d3a37", + ) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_yes_user_given_not_found( + self, + mocker, + dbsession, + repo, + pull, + ): + async def get_commit_mocked(commit_sha): + if commit_sha == "1007cbfb857592b9e7cbe3ecb25748870e2c07fc": + raise TorngitObjectNotFoundError("response", "message") + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + pull.user_provided_base_sha = "1007cbfb857592b9e7cbe3ecb25748870e2c07fc" + dbsession.add(pull) + dbsession.flush() + commit = CommitFactory.create( + repository=repo, + commitid="e9868516aafd365aeab2957d3745353b532d3a37", + branch="basebranch", + timestamp=datetime(2021, 3, 9), + pullid=None, + ) + other_commit = CommitFactory.create( + repository=repo, + commitid="2c07d7804dd9ff61ca5a1d6ee01de108af8cc7e0", + branch="basebranch", + timestamp=datetime(2021, 3, 11), + pullid=None, + ) + dbsession.add(commit) + dbsession.add(other_commit) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ( + "abcqwertabcqwertabcqwertabcqwertabcqwert", + "e9868516aafd365aeab2957d3745353b532d3a37", + ) + + @pytest.mark.asyncio + async def test_pick_best_base_comparedto_pair_no_user_given( + self, mocker, dbsession, repo, pull + ): + async def get_commit_mocked(commit_sha): + return {"timestamp": datetime(2021, 3, 10).isoformat()} + + commit = CommitFactory.create( + repository=repo, + commitid="e9868516aafd365aeab2957d3745353b532d3a37", + branch="basebranch", + timestamp=datetime(2021, 3, 9), + pullid=None, + ) + other_commit = CommitFactory.create( + repository=repo, + commitid="2c07d7804dd9ff61ca5a1d6ee01de108af8cc7e0", + branch="basebranch", + timestamp=datetime(2021, 3, 11), + pullid=None, + ) + dbsession.add(commit) + dbsession.add(other_commit) + dbsession.flush() + repository_service = mocker.Mock( + TorngitBaseAdapter, get_commit=get_commit_mocked + ) + current_yaml = mocker.MagicMock() + pull_information = { + "base": {"commitid": "abcqwert" * 5, "branch": "basebranch"} + } + res = await _pick_best_base_comparedto_pair( + repository_service, pull, current_yaml, pull_information + ) + assert res == ( + "abcqwertabcqwertabcqwertabcqwertabcqwert", + "e9868516aafd365aeab2957d3745353b532d3a37", + ) + + +def test_fetch_commit_yaml_and_possibly_store_only_commit_yaml( + dbsession, mocker, mock_configuration +): + commit = CommitFactory.create() + get_source_result = { + "content": "\n".join(["codecov:", " notify:", " require_ci_to_pass: yes"]) + } + list_top_level_files_result = [ + {"name": ".gitignore", "path": ".gitignore", "type": "file"}, + {"name": ".travis.yml", "path": ".travis.yml", "type": "file"}, + {"name": "README.rst", "path": "README.rst", "type": "file"}, + {"name": "awesome", "path": "awesome", "type": "folder"}, + {"name": "codecov", "path": "codecov", "type": "file"}, + {"name": "codecov.yaml", "path": "codecov.yaml", "type": "file"}, + {"name": "tests", "path": "tests", "type": "folder"}, + ] + repository_service = mocker.MagicMock( + list_top_level_files=mock.AsyncMock(return_value=list_top_level_files_result), + get_source=mock.AsyncMock(return_value=get_source_result), + ) + + result = fetch_commit_yaml_and_possibly_store(commit, repository_service) + expected_result = {"codecov": {"notify": {}, "require_ci_to_pass": True}} + assert result.to_dict() == expected_result + repository_service.get_source.assert_called_with("codecov.yaml", commit.commitid) + repository_service.list_top_level_files.assert_called_with(commit.commitid) + + +def test_fetch_commit_yaml_and_possibly_store_commit_yaml_and_base_yaml( + dbsession, mock_configuration, mocker +): + mock_configuration.set_params({"site": {"coverage": {"precision": 14}}}) + commit = CommitFactory.create() + get_source_result = { + "content": "\n".join(["codecov:", " notify:", " require_ci_to_pass: yes"]) + } + list_top_level_files_result = [ + {"name": ".travis.yml", "path": ".travis.yml", "type": "file"}, + {"name": "awesome", "path": "awesome", "type": "folder"}, + {"name": ".codecov.yaml", "path": ".codecov.yaml", "type": "file"}, + ] + repository_service = mocker.MagicMock( + list_top_level_files=mock.AsyncMock(return_value=list_top_level_files_result), + get_source=mock.AsyncMock(return_value=get_source_result), + ) + + result = fetch_commit_yaml_and_possibly_store(commit, repository_service) + expected_result = { + "codecov": {"notify": {}, "require_ci_to_pass": True}, + "coverage": {"precision": 14}, + } + assert result.to_dict() == expected_result + repository_service.get_source.assert_called_with(".codecov.yaml", commit.commitid) + repository_service.list_top_level_files.assert_called_with(commit.commitid) + + +def test_fetch_commit_yaml_and_possibly_store_commit_yaml_and_repo_yaml( + dbsession, mock_configuration, mocker +): + mock_configuration.set_params({"site": {"coverage": {"precision": 14}}}) + commit = CommitFactory.create( + repository__yaml={"codecov": {"max_report_age": "1y ago"}}, + repository__branch="supeduperbranch", + branch="supeduperbranch", + ) + get_source_result = { + "content": "\n".join(["codecov:", " notify:", " require_ci_to_pass: yes"]) + } + list_top_level_files_result = [ + {"name": ".gitignore", "path": ".gitignore", "type": "file"}, + {"name": ".codecov.yaml", "path": ".codecov.yaml", "type": "file"}, + {"name": "tests", "path": "tests", "type": "folder"}, + ] + repository_service = mocker.MagicMock( + list_top_level_files=mock.AsyncMock(return_value=list_top_level_files_result), + get_source=mock.AsyncMock(return_value=get_source_result), + ) + + result = fetch_commit_yaml_and_possibly_store(commit, repository_service) + expected_result = { + "codecov": {"notify": {}, "require_ci_to_pass": True}, + "coverage": {"precision": 14}, + } + assert result.to_dict() == expected_result + assert commit.repository.yaml == { + "codecov": {"notify": {}, "require_ci_to_pass": True} + } + repository_service.get_source.assert_called_with(".codecov.yaml", commit.commitid) + repository_service.list_top_level_files.assert_called_with(commit.commitid) + + +def test_fetch_commit_yaml_and_possibly_store_commit_yaml_no_commit_yaml( + dbsession, mock_configuration, mocker +): + mock_configuration.set_params({"site": {"coverage": {"round": "up"}}}) + commit = CommitFactory.create( + repository__owner__yaml={"coverage": {"precision": 2}}, + repository__yaml={"codecov": {"max_report_age": "1y ago"}}, + repository__branch="supeduperbranch", + branch="supeduperbranch", + ) + repository_service = mocker.MagicMock( + list_top_level_files=mock.AsyncMock( + side_effect=TorngitClientError(404, "fake_response", "message") + ) + ) + + result = fetch_commit_yaml_and_possibly_store(commit, repository_service) + expected_result = { + "coverage": {"precision": 2, "round": "up"}, + "codecov": {"max_report_age": "1y ago"}, + } + assert result.to_dict() == expected_result + assert commit.repository.yaml == {"codecov": {"max_report_age": "1y ago"}} + + +def test_fetch_commit_yaml_and_possibly_store_commit_yaml_invalid_commit_yaml( + dbsession, mock_configuration, mocker +): + mock_configuration.set_params({"site": {"comment": {"behavior": "new"}}}) + commit = CommitFactory.create( + repository__owner__yaml={"coverage": {"precision": 2}}, + # User needs to be less than PATCH_CENTRIC_DEFAULT_TIME_START + repository__owner__createstamp=datetime.fromisoformat( + "2024-03-30 00:00:00.000+00:00" + ), + repository__yaml={"codecov": {"max_report_age": "1y ago"}}, + repository__branch="supeduperbranch", + branch="supeduperbranch", + ) + dbsession.add(commit) + get_source_result = { + "content": "\n".join(["bad_key:", " notify:", " require_ci_to_pass: yes"]) + } + list_top_level_files_result = [ + {"name": ".gitignore", "path": ".gitignore", "type": "file"}, + {"name": ".codecov.yaml", "path": ".codecov.yaml", "type": "file"}, + {"name": "tests", "path": "tests", "type": "folder"}, + ] + repository_service = mocker.MagicMock( + list_top_level_files=mock.AsyncMock(return_value=list_top_level_files_result), + get_source=mock.AsyncMock(return_value=get_source_result), + ) + + result = fetch_commit_yaml_and_possibly_store(commit, repository_service) + expected_result = { + "coverage": {"precision": 2}, + "codecov": {"max_report_age": "1y ago"}, + "comment": {"behavior": "new"}, + } + assert result.to_dict() == expected_result + assert commit.repository.yaml == {"codecov": {"max_report_age": "1y ago"}} diff --git a/sample_app/tests/test_seats.py b/sample_app/tests/test_seats.py new file mode 100644 index 0000000..f5cb770 --- /dev/null +++ b/sample_app/tests/test_seats.py @@ -0,0 +1,167 @@ +import pytest + +from database.tests.factories import OwnerFactory, PullFactory +from services.repository import EnrichedPull +from services.seats import ShouldActivateSeat, determine_seat_activation +from shared.plan.constants import PlanName +from tests.helpers import mock_all_plans_and_tiers + + +def test_seat_provider_none(dbsession): + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull=None, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.NO_ACTIVATE + assert activate_seat_info.reason == "no_provider_pull" + + +def test_seat_repo_public(dbsession): + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = False + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.NO_ACTIVATE + assert activate_seat_info.reason == "public_repo" + + +@pytest.mark.django_db +def test_seat_billing_plan(dbsession): + mock_all_plans_and_tiers() + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + pull.repository.owner.plan = PlanName.CODECOV_PRO_MONTHLY_LEGACY.value + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.NO_ACTIVATE + assert activate_seat_info.reason == "no_pr_billing_plan" + + +@pytest.mark.django_db +def test_seat_no_author(dbsession): + mock_all_plans_and_tiers() + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + pull.repository.owner.plan = PlanName.CODECOV_PRO_MONTHLY.value + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.NO_ACTIVATE + assert activate_seat_info.reason == "no_pr_author" + + +@pytest.mark.django_db +def test_seat_author_in_org(dbsession): + mock_all_plans_and_tiers() + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + pull.repository.owner.plan = PlanName.CODECOV_PRO_MONTHLY.value + pull.repository.owner.service = "github" + dbsession.flush() + + author = OwnerFactory(service="github", service_id=100) + dbsession.add(author) + dbsession.flush() + + pull.repository.owner.plan_activated_users = [author.ownerid] + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.NO_ACTIVATE + assert activate_seat_info.reason == "author_in_plan_activated_users" + + +@pytest.mark.django_db +def test_seat_author_not_in_org(dbsession): + mock_all_plans_and_tiers() + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + pull.repository.owner.plan = PlanName.CODECOV_PRO_MONTHLY.value + pull.repository.owner.service = "github" + dbsession.flush() + + author = OwnerFactory(service="github", service_id=100) + dbsession.add(author) + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.MANUAL_ACTIVATE + assert activate_seat_info.reason == "manual_activate" + + +@pytest.mark.django_db +def test_seat_author_auto_activate(dbsession): + mock_all_plans_and_tiers() + pull = PullFactory() + dbsession.add(pull) + dbsession.flush() + + pull.repository.private = True + pull.repository.owner.plan = PlanName.CODECOV_PRO_MONTHLY.value + pull.repository.owner.plan_auto_activate = True + pull.repository.owner.service = "github" + dbsession.flush() + + author = OwnerFactory(service="github", service_id=100) + dbsession.add(author) + dbsession.flush() + + enriched_pull = EnrichedPull( + database_pull=pull, + provider_pull={"author": {"id": "100", "username": "test_username"}}, + ) + activate_seat_info = determine_seat_activation(enriched_pull) + + assert activate_seat_info.should_activate_seat == ShouldActivateSeat.AUTO_ACTIVATE + assert activate_seat_info.reason == "auto_activate" diff --git a/sample_app/tests/test_smtp.py b/sample_app/tests/test_smtp.py new file mode 100644 index 0000000..c789034 --- /dev/null +++ b/sample_app/tests/test_smtp.py @@ -0,0 +1,314 @@ +import logging +from smtplib import ( + SMTPAuthenticationError, + SMTPConnectError, + SMTPDataError, + SMTPNotSupportedError, + SMTPRecipientsRefused, + SMTPResponseException, + SMTPSenderRefused, + SMTPServerDisconnected, +) +from unittest.mock import MagicMock, call + +import pytest + +import services.smtp +from helpers.email import Email +from services.smtp import SMTPService, SMTPServiceError + +LOGGER = logging.getLogger(__name__) + +to_addr = "test_to@codecov.io" +from_addr = "test_from@codecov.io" +test_email = Email( + from_addr=from_addr, + subject="Test subject", + text="Hello world", + to_addr=to_addr, +) + + +@pytest.fixture +def set_username_and_password(mock_configuration): + mock_configuration._params["services"]["smtp"]["username"] = "test_username" + mock_configuration._params["services"]["smtp"]["password"] = "test_password" + + +@pytest.fixture +def reset_connection_at_start(): + services.smtp.SMTPService.connection = None + + +class TestSMTP: + def test_correct_init( + self, + mocker, + mock_configuration, + set_username_and_password, + reset_connection_at_start, + ): + mocker.patch("smtplib.SMTP") + + m = mocker.patch("ssl.create_default_context", return_value=MagicMock()) + service = SMTPService() + service.connection.starttls.assert_called_with(context=m.return_value) + service.connection.login.assert_called_with("test_username", "test_password") + + def test_idempotentconnectionection(self, mocker, mock_configuration): + first = SMTPService() + firstconnection = first.connection + second = SMTPService() + secondconnection = second.connection + assert id(firstconnection) == id(secondconnection) + + def test_empty_config(self, mocker, mock_configuration, reset_connection_at_start): + del mock_configuration._params["services"]["smtp"] + service = SMTPService() + assert service.connection is None + + def test_send(self, mocker, mock_configuration): + mocker.patch("smtplib.SMTP") + email = Email( + to_addr="test_to@codecov.io", + from_addr="test_from@codecov.io", + subject="Test subject", + text="test text", + html="test html", + ) + + smtp = SMTPService() + smtp.send(email=email) + + smtp.connection.send_message.assert_called_with(email.message) + + def test_send_email_recipients_refused( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + m.configure_mock(**{"send_message.side_effect": SMTPRecipientsRefused(to_addr)}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + smtp = SMTPService() + + with pytest.raises(SMTPServiceError, match="All recipients were refused"): + smtp.send(email=test_email) + + def test_send_email_sender_refused( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + m.configure_mock( + **{"send_message.side_effect": SMTPSenderRefused(123, "", to_addr)} + ) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + smtp = SMTPService() + + with pytest.raises(SMTPServiceError, match="Sender was refused"): + smtp.send(email=test_email) + + def test_send_email_data_error( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + m.configure_mock(**{"send_message.side_effect": SMTPDataError(123, "")}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + smtp = SMTPService() + + with pytest.raises( + SMTPServiceError, match="The SMTP server did not accept the data" + ): + smtp.send(email=test_email) + + def test_send_email_sends_errs( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + m.configure_mock(**{"send_message.return_value": [(123, "abc"), (456, "def")]}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + smtp = SMTPService() + + with pytest.raises(SMTPServiceError, match="123 abc 456 def"): + smtp.send(email=test_email) + + def test_smtp_active(self, mocker, mock_configuration, dbsession): + smtp = SMTPService() + assert smtp.active() == True + SMTPService.connection = None + assert smtp.active() == False + + def test_smtp_disconnected( + self, + mocker, + mock_configuration, + dbsession, + set_username_and_password, + reset_connection_at_start, + ): + m = MagicMock() + m.configure_mock(**{"noop.side_effect": SMTPServerDisconnected()}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + email = Email( + to_addr="test_to@codecov.io", + from_addr="test_from@codecov.io", + subject="Test subject", + text="test text", + html="test html", + ) + + smtp = SMTPService() + + smtp.send(email) + + smtp.connection.connect.assert_has_calls([call("mailhog", 1025)]) + smtp.connection.starttls.assert_has_calls( + [call(context=smtp.ssl_context), call(context=smtp.ssl_context)] + ) + smtp.connection.login.assert_has_calls( + [ + call("test_username", "test_password"), + call("test_username", "test_password"), + ] + ) + smtp.connection.noop.assert_has_calls([call()]) + smtp.connection.send_message(call(email.message)) + + def test_smtp_init_connect_fail( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + mocker.patch("smtplib.SMTP", side_effect=SMTPConnectError(123, "abc")) + email = Email( + to_addr="test_to@codecov.io", + from_addr="test_from@codecov.io", + subject="Test subject", + text="test text", + html="test html", + ) + + with pytest.raises( + SMTPServiceError, match="Error starting connection for SMTPService" + ): + smtp = SMTPService() + + def test_smtp_disconnected_fail( + self, mocker, mock_configuration, dbsession, reset_connection_at_start + ): + m = MagicMock() + m.configure_mock( + **{ + "noop.side_effect": SMTPServerDisconnected(), + "connect.side_effect": SMTPConnectError(123, "abc"), + } + ) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + email = Email( + to_addr="test_to@codecov.io", + from_addr="test_from@codecov.io", + subject="Test subject", + text="test text", + html="test html", + ) + + with pytest.raises( + SMTPServiceError, match="Error starting connection for SMTPService" + ): + smtp = SMTPService() + smtp.send(email) + + @pytest.mark.parametrize( + "fn, err_msg, side_effect", + [ + ( + "starttls", + "Error doing STARTTLS command on SMTP", + SMTPResponseException(123, "abc"), + ), + ( + "login", + "SMTP server did not accept username/password combination", + SMTPAuthenticationError(123, "abc"), + ), + ], + ) + def test_smtp_tls_not_supported( + self, + caplog, + mocker, + mock_configuration, + dbsession, + reset_connection_at_start, + set_username_and_password, + fn, + err_msg, + side_effect, + ): + m = MagicMock() + m.configure_mock(**{f"{fn}.side_effect": side_effect}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + with caplog.at_level(logging.WARNING): + with pytest.raises(SMTPServiceError, match=err_msg): + smtp = SMTPService() + + assert err_msg in caplog.text + + @pytest.mark.parametrize( + "fn, err_msg", + [ + ( + "starttls", + "Server does not support TLS, continuing initialization of SMTP connection", + ), + ( + "login", + "Server does not support AUTH, continuing initialization of SMTP connection", + ), + ], + ) + def test_smtp_not_supported( + self, + caplog, + mocker, + mock_configuration, + dbsession, + reset_connection_at_start, + set_username_and_password, + fn, + err_msg, + ): + m = MagicMock() + m.configure_mock(**{f"{fn}.side_effect": SMTPNotSupportedError()}) + mocker.patch( + "smtplib.SMTP", + return_value=m, + ) + + with caplog.at_level(logging.WARNING): + smtp = SMTPService() + + assert err_msg in caplog.text diff --git a/sample_app/tests/test_template.py b/sample_app/tests/test_template.py new file mode 100644 index 0000000..54651c7 --- /dev/null +++ b/sample_app/tests/test_template.py @@ -0,0 +1,48 @@ +import pytest +from jinja2.exceptions import TemplateNotFound, UndefinedError + +from services.template import TemplateService + + +class TestTemplate: + def test_get_template(self): + ts = TemplateService() + template = ts.get_template("test.txt") + populated_template = template.render(**{"username": "test_username"}) + assert populated_template == "Test template test_username" + + def test_get_template_html(self): + ts = TemplateService() + template = ts.get_template("test.html") + populated_template = template.render(**{"username": "test_username"}) + expected_result = """ + + + + + + Document + + + +

+ test template test_username +

+ + +""" + for expected_line, actual_line in zip( + expected_result.splitlines(), populated_template.splitlines() + ): + assert expected_line == actual_line + + def test_get_template_no_kwargs(self): + ts = TemplateService() + template = ts.get_template("test.txt") + with pytest.raises(UndefinedError): + template.render(not_username="") + + def test_get_template_non_existing(self): + ts = TemplateService() + with pytest.raises(TemplateNotFound): + ts.get_template("nonexistent") diff --git a/sample_app/tests/test_test_results.py b/sample_app/tests/test_test_results.py new file mode 100644 index 0000000..6ba2de2 --- /dev/null +++ b/sample_app/tests/test_test_results.py @@ -0,0 +1,253 @@ +from unittest import mock + +import pytest + +from database.tests.factories import ( + CommitFactory, + OwnerFactory, + RepositoryFactory, +) +from helpers.notifier import NotifierResult +from services.test_results import ( + ErrorPayload, + FlakeInfo, + TACommentInDepthInfo, + TestResultsNotificationFailure, + TestResultsNotificationPayload, + TestResultsNotifier, + generate_failure_info, + generate_flags_hash, + generate_test_id, + should_do_flaky_detection, +) +from services.yaml import UserYaml +from shared.plan.constants import DEFAULT_FREE_PLAN +from shared.torngit.exceptions import TorngitClientError +from tests.helpers import mock_all_plans_and_tiers + + +def mock_repo_service(): + repo_service = mock.Mock( + post_comment=mock.AsyncMock(), + edit_comment=mock.AsyncMock(), + ) + return repo_service + + +def test_send_to_provider(): + tn = TestResultsNotifier(CommitFactory(), None) + tn._pull = mock.Mock() + tn._pull.database_pull.commentid = None + tn._repo_service = mock_repo_service() + m = {"id": 1} + tn._repo_service.post_comment.return_value = m + + res = tn.send_to_provider(tn._pull, "hello world") + + assert res == True + + tn._repo_service.post_comment.assert_called_with( + tn._pull.database_pull.pullid, "hello world" + ) + assert tn._pull.database_pull.commentid == 1 + + +def test_send_to_provider_edit(): + tn = TestResultsNotifier(CommitFactory(), None) + tn._pull = mock.Mock() + tn._pull.database_pull.commentid = 1 + tn._repo_service = mock_repo_service() + m = {"id": 1} + tn._repo_service.edit_comment.return_value = m + + res = tn.send_to_provider(tn._pull, "hello world") + + assert res == True + tn._repo_service.edit_comment.assert_called_with( + tn._pull.database_pull.pullid, 1, "hello world" + ) + + +def test_send_to_provider_fail(): + tn = TestResultsNotifier(CommitFactory(), None) + tn._pull = mock.Mock() + tn._pull.database_pull.commentid = 1 + tn._repo_service = mock_repo_service() + tn._repo_service.edit_comment.side_effect = TorngitClientError + + res = tn.send_to_provider(tn._pull, "hello world") + + assert res == False + + +def test_generate_failure_info(snapshot): + flags_hash = generate_flags_hash([]) + test_id = generate_test_id(1, "testsuite", "testname", flags_hash) + fail = TestResultsNotificationFailure( + "hello world", + "testname", + [], + test_id, + 1.0, + "https://example.com/build_url", + ) + + res = generate_failure_info(fail) + + assert snapshot("txt") == res + + +def test_build_message(snapshot): + flags_hash = generate_flags_hash([]) + test_id = generate_test_id(1, "testsuite", "testname", flags_hash) + fail = TestResultsNotificationFailure( + "hello world", + "testname", + [], + test_id, + 1.0, + "https://example.com/build_url", + ) + info = TACommentInDepthInfo(failures=[fail], flaky_tests={}) + payload = TestResultsNotificationPayload(1, 2, 3, info) + commit = CommitFactory( + branch="thing/thing", + repository__owner__username="username", + repository__owner__service="github", + repository__name="name", + ) + tn = TestResultsNotifier(commit, None, None, None, payload) + res = tn.build_message() + + assert snapshot("txt") == res + + +def test_build_message_with_flake(snapshot): + flags_hash = generate_flags_hash([]) + test_id = generate_test_id(1, "testsuite", "testname", flags_hash) + fail = TestResultsNotificationFailure( + "hello world", + "testname", + [], + test_id, + 1.0, + "https://example.com/build_url", + ) + flaky_test = FlakeInfo(1, 3) + info = TACommentInDepthInfo(failures=[fail], flaky_tests={test_id: flaky_test}) + payload = TestResultsNotificationPayload(1, 2, 3, info) + commit = CommitFactory( + branch="test_branch", + repository__owner__username="username", + repository__owner__service="github", + repository__name="name", + ) + tn = TestResultsNotifier(commit, None, None, None, payload) + res = tn.build_message() + + assert snapshot("txt") == res + + +def test_notify(mocker): + mocker.patch("helpers.notifier.get_repo_provider_service", return_value=mock.Mock()) + mocker.patch( + "helpers.notifier.fetch_and_update_pull_request_information_from_commit", + return_value=mock.Mock(), + ) + tn = TestResultsNotifier(CommitFactory(), None, _pull=mock.Mock()) + tn.build_message = mock.Mock() + tn.send_to_provider = mock.Mock() + + notification_result = tn.notify() + + assert notification_result == NotifierResult.COMMENT_POSTED + + +def test_notify_fail_torngit_error( + mocker, +): + mocker.patch("helpers.notifier.get_repo_provider_service", return_value=mock.Mock()) + mocker.patch( + "helpers.notifier.fetch_and_update_pull_request_information_from_commit", + return_value=mock.Mock(), + ) + tn = TestResultsNotifier(CommitFactory(), None, _pull=mock.Mock()) + tn.build_message = mock.Mock() + tn.send_to_provider = mock.Mock(return_value=False) + + notification_result = tn.notify() + + assert notification_result == NotifierResult.TORNGIT_ERROR + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "config,private,plan,ex_result", + [ + (False, False, "users-inappm", False), + (True, True, DEFAULT_FREE_PLAN, False), + (True, False, DEFAULT_FREE_PLAN, True), + (True, False, "users-inappm", True), + (True, True, "users-inappm", True), + ], +) +def test_should_do_flake_detection(dbsession, mocker, config, private, plan, ex_result): + mock_all_plans_and_tiers() + owner = OwnerFactory(plan=plan) + repo = RepositoryFactory(private=private, owner=owner) + dbsession.add(repo) + dbsession.flush() + + yaml = {"test_analytics": {"flake_detection": config}} + + result = should_do_flaky_detection(repo, UserYaml.from_dict(yaml)) + + assert result == ex_result + + +def test_specific_error_message(mocker, snapshot): + mock_repo_service = mock.AsyncMock() + mocker.patch( + "helpers.notifier.get_repo_provider_service", return_value=mock_repo_service + ) + mocker.patch( + "helpers.notifier.fetch_and_update_pull_request_information_from_commit", + return_value=mock.AsyncMock(), + ) + + error = ErrorPayload( + "unsupported_file_format", + "Error parsing JUnit XML in test.xml at 4:32: ParserError: No name found", + ) + tn = TestResultsNotifier(CommitFactory(), None, error=error) + result = tn.error_comment() + + assert result == (True, "comment_posted") + + args = mock_repo_service.edit_comment.call_args[0] + db_pull = tn._pull.database_pull + assert args[0] == db_pull.pullid + assert args[1] == db_pull.commentid + assert snapshot("txt") == args[2] + + +def test_specific_error_message_no_error(mocker, snapshot): + mock_repo_service = mock.AsyncMock() + mocker.patch( + "helpers.notifier.get_repo_provider_service", return_value=mock_repo_service + ) + mocker.patch( + "helpers.notifier.fetch_and_update_pull_request_information_from_commit", + return_value=mock.AsyncMock(), + ) + + tn = TestResultsNotifier(CommitFactory(), None) + result = tn.error_comment() + + assert result == (True, "comment_posted") + + args = mock_repo_service.edit_comment.call_args[0] + db_pull = tn._pull.database_pull + assert args[0] == db_pull.pullid + assert args[1] == db_pull.commentid + assert snapshot("txt") == args[2] diff --git a/sample_app/tests/test_timeseries.py b/sample_app/tests/test_timeseries.py new file mode 100644 index 0000000..d4cc8bf --- /dev/null +++ b/sample_app/tests/test_timeseries.py @@ -0,0 +1,1153 @@ +from datetime import UTC, datetime + +import pytest +from celery import group + +from database.models.timeseries import Dataset, Measurement, MeasurementName +from database.tests.factories import CommitFactory, RepositoryFactory +from database.tests.factories.reports import RepositoryFlagFactory +from database.tests.factories.timeseries import DatasetFactory, MeasurementFactory +from services.timeseries import ( + backfill_batch_size, + delete_repository_data, + delete_repository_measurements, + repository_commits_query, + repository_datasets_query, +) +from shared.reports.readonly import ReadOnlyReport +from shared.reports.reportfile import ReportFile +from shared.reports.resources import Report +from shared.reports.types import ReportLine +from shared.utils.sessions import Session +from shared.yaml import UserYaml +from tasks.save_commit_measurements import save_commit_measurements + + +@pytest.fixture +def sample_report(): + report = Report() + first_file = ReportFile("file_1.go") + first_file.append(1, ReportLine.create(1, sessions=[[0, 1]], complexity=(10, 2))) + first_file.append(2, ReportLine.create(0, sessions=[[0, 1]])) + first_file.append(3, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(5, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(6, ReportLine.create(0, sessions=[[0, 1]])) + first_file.append(8, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(9, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(10, ReportLine.create(0, sessions=[[0, 1]])) + second_file = ReportFile("file_2.py") + second_file.append(12, ReportLine.create(1, sessions=[[0, 1]])) + second_file.append(51, ReportLine.create("1/2", type="b", sessions=[[0, 1]])) + report.append(first_file) + report.append(second_file) + report.add_session(Session(flags=["flag1", "flag2"])) + return report + + +@pytest.fixture +def sample_report_for_components(): + report = Report() + first_file = ReportFile("poker.py") + first_file.append(1, ReportLine.create(1, sessions=[[0, 1]])) + first_file.append(2, ReportLine.create(1, sessions=[[0, 1]])) + second_file = ReportFile("folder/poker2.py") + second_file.append(3, ReportLine.create(0, sessions=[[0, 0]])) + second_file.append(4, ReportLine.create(1, sessions=[[0, 1]])) + third_file = ReportFile("random.go") + third_file.append(5, ReportLine.create(0, sessions=[[0, 0]])) + third_file.append(6, ReportLine.create(0, sessions=[[0, 0]])) + third_file.append(8, ReportLine.create(0, sessions=[[0, 1]])) + third_file.append(7, ReportLine.create(1, sessions=[[0, 0]])) + report.append(first_file) + report.append(second_file) + report.append(third_file) + report.add_session( + Session(flags=["test-flag-123", "test-flag-456", "random-flago-987"]) + ) + return report + + +def _create_repository(dbsession): + repository = RepositoryFactory.create() + dbsession.add(repository) + dbsession.flush() + + coverage_dataset = DatasetFactory.create( + repository_id=repository.repoid, + name=MeasurementName.coverage.value, + backfilled=True, + ) + dbsession.add(coverage_dataset) + flag_coverage_dataset = DatasetFactory.create( + repository_id=repository.repoid, + name=MeasurementName.flag_coverage.value, + backfilled=False, + ) + dbsession.add(flag_coverage_dataset) + component_coverage_dataset = DatasetFactory.create( + repository_id=repository.repoid, + name=MeasurementName.component_coverage.value, + backfilled=False, + ) + dbsession.add(component_coverage_dataset) + dbsession.flush() + + return repository + + +@pytest.fixture +def repository(dbsession): + return _create_repository(dbsession) + + +@pytest.fixture +def dataset_names(): + return [ + MeasurementName.coverage.value, + MeasurementName.flag_coverage.value, + MeasurementName.component_coverage.value, + ] + + +class TestTimeseriesService: + def test_insert_commit_measurement( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{commit.repoid}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 60.0 + + def test_save_commit_measurements_no_report( + self, dbsession, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=None, + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + ) + .one_or_none() + ) + + assert measurement is None + + def test_update_commit_measurement( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + measurement = MeasurementFactory.create( + name=MeasurementName.coverage.value, + owner_id=commit.repository.ownerid, + repo_id=commit.repoid, + measurable_id=commit.repoid, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + branch="testing", + value=0, + ) + dbsession.add(measurement) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + measurements = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + ) + .all() + ) + + assert len(measurements) == 1 + measurement = measurements[0] + assert measurement.name == MeasurementName.coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{commit.repoid}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 60.0 + + def test_commit_measurement_insert_flags( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + repository_flag1 = RepositoryFlagFactory( + repository=commit.repository, flag_name="flag1" + ) + dbsession.add(repository_flag1) + dbsession.flush() + + repository_flag2 = RepositoryFlagFactory( + repository=commit.repository, flag_name="flag2" + ) + dbsession.add(repository_flag2) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.flag_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id=f"{repository_flag1.id}", + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.flag_coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{repository_flag1.id}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 100.0 + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.flag_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id=f"{repository_flag2.id}", + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.flag_coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{repository_flag2.id}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 100.0 + + def test_commit_measurement_update_flags( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + repository_flag1 = RepositoryFlagFactory( + repository=commit.repository, flag_name="flag1" + ) + dbsession.add(repository_flag1) + dbsession.flush() + + repository_flag2 = RepositoryFlagFactory( + repository=commit.repository, flag_name="flag2" + ) + dbsession.add(repository_flag2) + dbsession.flush() + + measurement1 = MeasurementFactory.create( + name=MeasurementName.flag_coverage.value, + owner_id=commit.repository.ownerid, + repo_id=commit.repoid, + measurable_id=repository_flag1.id, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + branch="testing", + value=0, + ) + dbsession.add(measurement1) + dbsession.flush() + + measurement2 = MeasurementFactory.create( + name=MeasurementName.flag_coverage.value, + owner_id=commit.repository.ownerid, + repo_id=commit.repoid, + measurable_id=repository_flag2.id, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + branch="testing", + value=0, + ) + dbsession.add(measurement2) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.flag_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id=f"{repository_flag1.id}", + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.flag_coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{repository_flag1.id}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 100.0 + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.flag_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id=f"{repository_flag2.id}", + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.flag_coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == f"{repository_flag2.id}" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 100.0 + + def test_commit_measurement_insert_components( + self, dbsession, sample_report_for_components, repository, dataset_names, mocker + ): + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=False, + ) + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report( + sample_report_for_components + ), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + get_repo_yaml = mocker.patch("tasks.save_commit_measurements.get_repo_yaml") + yaml_dict = { + "component_management": { + "default_rules": { + "paths": [r".*\.go"], + "flag_regexes": [r"test-flag-*"], + }, + "individual_components": [ + {"component_id": "python_files", "paths": [r".*\.py"]}, + {"component_id": "rules_from_default"}, + { + "component_id": "i_have_flags", + "flag_regexes": [r"random-.*"], + }, + { + "component_id": "all_settings", + "name": "all settings", + "flag_regexes": [], + "paths": [r"folder/*"], + }, + { # testing duplicate component on purpose this was causing crashes + "component_id": "all_settings", + "name": "all settings", + "flag_regexes": [], + "paths": [r"folder/*"], + }, + { + "component_id": "path_not_found", + "name": "no expected covarage", + "flag_regexes": [], + "paths": ["asdfasdf"], + }, + { + "component_id": "empty_path", + "name": "no expected covarage", + "flag_regexes": [], + "paths": [], + }, + ], + } + } + get_repo_yaml.return_value = UserYaml(yaml_dict) + save_commit_measurements(commit, dataset_names=dataset_names) + + # 1 for coverage, 3 for flags, 4 for valid components + assert len(dbsession.query(Measurement).all()) == 8 + + python_file_measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="python_files", + ) + .one_or_none() + ) + assert python_file_measurement + assert python_file_measurement.name == MeasurementName.component_coverage.value + assert python_file_measurement.owner_id == commit.repository.ownerid + assert python_file_measurement.repo_id == commit.repoid + assert python_file_measurement.measurable_id == "python_files" + assert python_file_measurement.commit_sha == commit.commitid + assert python_file_measurement.timestamp.replace( + tzinfo=UTC + ) == commit.timestamp.replace(tzinfo=UTC) + assert python_file_measurement.branch == "foo" + assert python_file_measurement.value == 75.0 + + default_component_settings_measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="rules_from_default", + ) + .one_or_none() + ) + assert default_component_settings_measurement + assert ( + default_component_settings_measurement.name + == MeasurementName.component_coverage.value + ) + assert ( + default_component_settings_measurement.owner_id == commit.repository.ownerid + ) + assert default_component_settings_measurement.repo_id == commit.repoid + assert ( + default_component_settings_measurement.measurable_id == "rules_from_default" + ) + assert default_component_settings_measurement.commit_sha == commit.commitid + assert default_component_settings_measurement.timestamp.replace( + tzinfo=UTC + ) == commit.timestamp.replace(tzinfo=UTC) + assert default_component_settings_measurement.branch == "foo" + assert default_component_settings_measurement.value == 25.0 + + manual_flags_measurements = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="i_have_flags", + ) + .one_or_none() + ) + assert manual_flags_measurements + assert ( + manual_flags_measurements.name == MeasurementName.component_coverage.value + ) + assert manual_flags_measurements.owner_id == commit.repository.ownerid + assert manual_flags_measurements.repo_id == commit.repoid + assert manual_flags_measurements.measurable_id == "i_have_flags" + assert manual_flags_measurements.commit_sha == commit.commitid + assert manual_flags_measurements.timestamp.replace( + tzinfo=UTC + ) == commit.timestamp.replace(tzinfo=UTC) + assert manual_flags_measurements.branch == "foo" + assert manual_flags_measurements.value == 25.0 + + all_settings_measurements = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="all_settings", + ) + .one_or_none() + ) + assert all_settings_measurements + assert ( + all_settings_measurements.name == MeasurementName.component_coverage.value + ) + assert all_settings_measurements.owner_id == commit.repository.ownerid + assert all_settings_measurements.repo_id == commit.repoid + assert all_settings_measurements.measurable_id == "all_settings" + assert all_settings_measurements.commit_sha == commit.commitid + assert all_settings_measurements.timestamp.replace( + tzinfo=UTC + ) == commit.timestamp.replace(tzinfo=UTC) + assert all_settings_measurements.branch == "foo" + assert all_settings_measurements.value == 50.0 + + path_not_found_measurements = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="path_not_found", + ) + .one_or_none() + ) + assert path_not_found_measurements is None + + empty_path_measurements = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="empty_path", + ) + .one_or_none() + ) + assert empty_path_measurements is None + + def test_commit_measurement_update_component_parallel( + self, + sample_report_for_components, + repository, + dataset_names, + mocker, + mock_repo_provider, + ): + dbsession = repository.get_db_session() + mocker.patch.object(dbsession, "close") + mocker.patch("tasks.base.get_db_session", return_value=dbsession) + mocker.patch.object(group, "apply_async", group.apply) + + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=True, + ) + + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report( + sample_report_for_components + ), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + get_repo_yaml = mocker.patch("tasks.save_commit_measurements.get_repo_yaml") + get_current_yaml = mocker.patch("tasks.upsert_component.get_repo_yaml") + yaml_dict = { + "component_management": { + "individual_components": [ + { + "component_id": "test-component-123", + "name": "test component", + "flag_regexes": ["random-flago-987"], + "paths": [r"folder/*"], + }, + ], + } + } + get_repo_yaml.return_value = UserYaml(yaml_dict) + get_current_yaml.return_value = UserYaml(yaml_dict) + + save_commit_measurements(commit, dataset_names=dataset_names) + + # Want to commit here to have the results persisted properly. + # Otherwise the results aren't going to be reflected in the select below. + # dbsession.commit() + + measurements = ( + dbsession.query(Measurement) + .filter_by(name=MeasurementName.component_coverage.value) + .all() + ) + + assert len(measurements) == 1 + dbsession.add(commit) + assert measurements[0].name == MeasurementName.component_coverage.value + assert measurements[0].owner_id == commit.repository.ownerid + assert measurements[0].repo_id == commit.repoid + assert measurements[0].measurable_id == "test-component-123" + assert measurements[0].commit_sha == commit.commitid + assert measurements[0].timestamp.replace( + tzinfo=UTC + ) == commit.timestamp.replace(tzinfo=UTC) + + def test_commit_measurement_update_component( + self, dbsession, sample_report_for_components, repository, dataset_names, mocker + ): + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=False, + ) + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report( + sample_report_for_components + ), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + get_repo_yaml = mocker.patch("tasks.save_commit_measurements.get_repo_yaml") + yaml_dict = { + "component_management": { + "individual_components": [ + { + "component_id": "test-component-123", + "name": "test component", + "flag_regexes": ["random-flago-987"], + "paths": [r"folder/*"], + }, + ], + } + } + get_repo_yaml.return_value = UserYaml(yaml_dict) + + measurement = MeasurementFactory.create( + name=MeasurementName.component_coverage.value, + owner_id=commit.repository.ownerid, + repo_id=commit.repoid, + measurable_id="test-component-123", + commit_sha=commit.commitid, + timestamp=commit.timestamp, + branch="testing", + value=0, + ) + dbsession.add(measurement) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=dataset_names) + + # Want to commit here to have the results persisted properly. + # Otherwise the results aren't going to be reflected in the select below. + dbsession.commit() + + measurement = ( + dbsession.query(Measurement) + .filter_by( + name=MeasurementName.component_coverage.value, + commit_sha=commit.commitid, + timestamp=commit.timestamp, + measurable_id="test-component-123", + ) + .one_or_none() + ) + + assert measurement + assert measurement.name == MeasurementName.component_coverage.value + assert measurement.owner_id == commit.repository.ownerid + assert measurement.repo_id == commit.repoid + assert measurement.measurable_id == "test-component-123" + assert measurement.commit_sha == commit.commitid + assert measurement.timestamp.replace(tzinfo=UTC) == commit.timestamp.replace( + tzinfo=UTC + ) + assert measurement.branch == "foo" + assert measurement.value == 50.0 + + def test_commit_measurement_no_datasets( + self, mock_storage, dbsession, dataset_names, mocker + ): + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=False, + ) + + repository = RepositoryFactory.create() + dbsession.add(repository) + dbsession.flush() + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + + save_commit_measurements(commit, dataset_names=[]) + + assert dbsession.query(Measurement).count() == 0 + + def test_repository_commits_query(self, dbsession, repository, mocker): + commit1 = CommitFactory.create( + repository=repository, + timestamp=datetime(2022, 6, 1, 0, 0, 0).replace(tzinfo=UTC), + ) + dbsession.add(commit1) + commit2 = CommitFactory.create( + repository=repository, + timestamp=datetime(2022, 6, 10, 0, 0, 0).replace(tzinfo=UTC), + ) + dbsession.add(commit2) + commit3 = CommitFactory.create( + repository=repository, + timestamp=datetime(2022, 6, 17, 0, 0, 0).replace(tzinfo=UTC), + ) + dbsession.add(commit3) + commit4 = CommitFactory.create( + timestamp=datetime(2022, 6, 10, 0, 0, 0).replace(tzinfo=UTC) + ) + dbsession.add(commit4) + dbsession.flush() + + commits = repository_commits_query( + repository, + start_date=datetime(2022, 6, 1, 0, 0, 0).replace(tzinfo=UTC), + end_date=datetime(2022, 6, 15, 0, 0, 0).replace(tzinfo=UTC), + ) + + assert len(list(commits)) == 2 + assert commits[0].id_ == commit2.id_ + assert commits[1].id_ == commit1.id_ + + def test_repository_datasets_query(self, repository): + datasets = repository_datasets_query(repository) + assert [dataset.name for dataset in datasets] == [ + MeasurementName.coverage.value, + MeasurementName.flag_coverage.value, + MeasurementName.component_coverage.value, + ] + + datasets = repository_datasets_query(repository, backfilled=True) + assert [dataset.name for dataset in datasets] == [ + MeasurementName.coverage.value, + ] + + datasets = repository_datasets_query(repository, backfilled=False) + assert [dataset.name for dataset in datasets] == [ + MeasurementName.flag_coverage.value, + MeasurementName.component_coverage.value, + ] + + def test_backfill_batch_size(self, repository, mocker): + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=False, + ) + dbsession = repository.get_db_session() + coverage_dataset = ( + dbsession.query(Dataset.name) + .filter_by( + repository_id=repository.repoid, name=MeasurementName.coverage.value + ) + .first() + ) + flag_coverage_dataset = ( + dbsession.query(Dataset.name) + .filter_by( + repository_id=repository.repoid, + name=MeasurementName.flag_coverage.value, + ) + .first() + ) + component_coverage_dataset = ( + dbsession.query(Dataset.name) + .filter_by( + repository_id=repository.repoid, + name=MeasurementName.component_coverage.value, + ) + .first() + ) + + # Initially batch size is 500 for all measurement names + batch_size = backfill_batch_size(repository, coverage_dataset) + assert batch_size == 500 + batch_size = backfill_batch_size(repository, flag_coverage_dataset) + assert batch_size == 500 + batch_size = backfill_batch_size(repository, component_coverage_dataset) + assert batch_size == 500 + + dbsession = repository.get_db_session() + flag1 = RepositoryFlagFactory(repository=repository, flag_name="flag1") + flag2 = RepositoryFlagFactory(repository=repository, flag_name="flag2") + dbsession.add(flag1) + dbsession.add(flag2) + dbsession.flush() + + # Adding flags should only affect flag coverage measurement + batch_size = backfill_batch_size(repository, coverage_dataset) + assert batch_size == 500 + batch_size = backfill_batch_size(repository, flag_coverage_dataset) + assert batch_size == 250 + batch_size = backfill_batch_size(repository, component_coverage_dataset) + assert batch_size == 500 + + get_repo_yaml = mocker.patch("services.timeseries.get_repo_yaml") + yaml_dict = { + "component_management": { + "default_rules": { + "paths": [r".*\.go"], + "flag_regexes": [r"test-flag-*"], + }, + "individual_components": [ + {"component_id": "component_1"}, + {"component_id": "component_2"}, + {"component_id": "component_3"}, + {"component_id": "component_4"}, + {"component_id": "component_5"}, + ], + } + } + get_repo_yaml.return_value = UserYaml(yaml_dict) + + # Adding componets should only affect component coverage measurement + batch_size = backfill_batch_size(repository, coverage_dataset) + assert batch_size == 500 + batch_size = backfill_batch_size(repository, flag_coverage_dataset) + assert batch_size == 250 + batch_size = backfill_batch_size(repository, component_coverage_dataset) + assert batch_size == 100 + + def test_delete_repository_data( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + commit = CommitFactory.create(branch="bar", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + + assert ( + dbsession.query(Dataset).filter_by(repository_id=repository.repoid).count() + == 3 + ) + # repo coverage + 2x flag coverage for each commit + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == 6 + ) + + delete_repository_data(repository) + + assert ( + dbsession.query(Dataset).filter_by(repository_id=repository.repoid).count() + == 0 + ) + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == 0 + ) + + def test_delete_repository_data_side_effects( + self, dbsession, sample_report, repository, dataset_names, mocker + ): + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=False, + ) + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report(sample_report), + ) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + commit = CommitFactory.create(branch="bar", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + + # Another unrelated repository, make sure that this one isn't deleted as a side effect + other_repository = _create_repository(dbsession) + other_commit = CommitFactory.create(branch="foo", repository=other_repository) + dbsession.add(other_commit) + dbsession.flush() + save_commit_measurements(other_commit, dataset_names=dataset_names) + other_commit = CommitFactory.create(branch="bar", repository=other_repository) + dbsession.add(other_commit) + dbsession.flush() + save_commit_measurements(other_commit, dataset_names=dataset_names) + + assert ( + dbsession.query(Dataset) + .filter_by(repository_id=other_repository.repoid) + .count() + != 0 + ) + assert ( + dbsession.query(Measurement) + .filter_by(repo_id=other_repository.repoid) + .count() + != 0 + ) + + delete_repository_data(repository) + + # Intended repo data/measurement is deleted + assert ( + dbsession.query(Dataset).filter_by(repository_id=repository.repoid).count() + == 0 + ) + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == 0 + ) + + # Other repo data/measurement is not deleted + assert ( + dbsession.query(Dataset) + .filter_by(repository_id=other_repository.repoid) + .count() + != 0 + ) + assert ( + dbsession.query(Measurement) + .filter_by(repo_id=other_repository.repoid) + .count() + != 0 + ) + + def test_delete_repository_data_measurements_only( + self, + dbsession, + sample_report_for_components, + repository, + dataset_names, + mocker, + mock_repo_provider, + ): + def validate_invariants(repository, other_repository): + assert ( + dbsession.query(Dataset) + .filter_by(repository_id=repository.repoid) + .count() + == 3 + ) + assert ( + dbsession.query(Dataset) + .filter_by(repository_id=other_repository.repoid) + .count() + == 3 + ) + # 2x(1 coverage, 3 flag coverage, 4 component coverage) + assert ( + dbsession.query(Measurement) + .filter_by(repo_id=other_repository.repoid) + .count() + == 16 + ) + + mocker.patch( + "tasks.save_commit_measurements.PARALLEL_COMPONENT_COMPARISON.check_value", + return_value=True, + ) + dbsession = repository.get_db_session() + mocker.patch.object(dbsession, "close") + mocker.patch("tasks.base.get_db_session", return_value=dbsession) + mocker.patch.object(group, "apply_async", group.apply) + + mocker.patch( + "services.report.ReportService.get_existing_report_for_commit", + return_value=ReadOnlyReport.create_from_report( + sample_report_for_components + ), + ) + + get_repo_yaml = mocker.patch("tasks.save_commit_measurements.get_repo_yaml") + get_current_yaml = mocker.patch("tasks.upsert_component.get_repo_yaml") + yaml_dict = { + "component_management": { + "default_rules": { + "paths": [r".*\.go"], + "flag_regexes": [r"test-flag-*"], + }, + "individual_components": [ + {"component_id": "python_files", "paths": [r".*\.py"]}, + {"component_id": "rules_from_default"}, + { + "component_id": "i_have_flags", + "flag_regexes": [r"random-.*"], + }, + { + "component_id": "all_settings", + "name": "all settings", + "flag_regexes": [], + "paths": [r"folder/*"], + }, + ], + } + } + get_repo_yaml.return_value = UserYaml(yaml_dict) + get_current_yaml.return_value = UserYaml(yaml_dict) + + commit = CommitFactory.create(branch="foo", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + commit = CommitFactory.create(branch="bar", repository=repository) + dbsession.add(commit) + dbsession.flush() + save_commit_measurements(commit, dataset_names=dataset_names) + + # Another unrelated repository, make sure that this one isn't deleted as a side effect + other_repository = _create_repository(dbsession) + other_commit = CommitFactory.create(branch="foo", repository=other_repository) + dbsession.add(other_commit) + dbsession.flush() + save_commit_measurements(other_commit, dataset_names=dataset_names) + other_commit = CommitFactory.create(branch="bar", repository=other_repository) + dbsession.add(other_commit) + dbsession.flush() + save_commit_measurements(other_commit, dataset_names=dataset_names) + + flag_ids = { + flag.measurable_id + for flag in ( + dbsession.query(Measurement).filter_by( + repo_id=repository.repoid, + name=MeasurementName.flag_coverage.value, + ) + ) + } + + m = dbsession.query(Measurement).filter_by(repo_id=repository.repoid).all() + + # 2x(1 coverage, 3 flag coverage, 4 component coverage) = 16 + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == 16 + ) + validate_invariants(repository, other_repository) + + # Delete the coverage type + delete_repository_measurements( + repository, MeasurementName.coverage.value, f"{repository.repoid}" + ) + + # 2x(0 coverage, 3 flag coverage, 4 component coverage) = 14 + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == 14 + ) + validate_invariants(repository, other_repository) + + # Delete the flag coverages + expected_measurement_count = 14 + for flag_id in flag_ids: + assert ( + dbsession.query(Measurement) + .filter_by(repo_id=repository.repoid) + .count() + == expected_measurement_count + ) + validate_invariants(repository, other_repository) + delete_repository_measurements( + repository, MeasurementName.flag_coverage.value, f"{flag_id}" + ) + # Lose a flag coverage measurement from each commit (ie total should be 2 less) + expected_measurement_count -= 2 + + # 2x(0 coverage, 0 flag coverage, 4 component coverage) = 8 + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == expected_measurement_count + ) + validate_invariants(repository, other_repository) + + for component in yaml_dict["component_management"]["individual_components"]: + assert ( + dbsession.query(Measurement) + .filter_by(repo_id=repository.repoid) + .count() + == expected_measurement_count + ) + validate_invariants(repository, other_repository) + component_id = component["component_id"] + delete_repository_measurements( + repository, MeasurementName.component_coverage.value, component_id + ) + # Lose a component coverage measurement from each commit (ie total should be 2 less) + expected_measurement_count -= 2 + + # 2x(0 coverage, 0 flag coverage, 0 component coverage) = 0 + assert ( + dbsession.query(Measurement).filter_by(repo_id=repository.repoid).count() + == expected_measurement_count + ) + validate_invariants(repository, other_repository) diff --git a/sample_app/tests/test_urls.py b/sample_app/tests/test_urls.py new file mode 100644 index 0000000..135d6fe --- /dev/null +++ b/sample_app/tests/test_urls.py @@ -0,0 +1,89 @@ +from database.tests.factories import OwnerFactory, PullFactory, RepositoryFactory +from services.urls import append_tracking_params_to_urls, get_members_url, get_plan_url + + +def test_append_tracking_params_to_urls(): + message = [ + "[This link](https://stage.codecov.io/gh/test_repo/pull/pull123?src=pr&el=h1) should be changed", + "And [this one](https://codecov.io/bb/test_repo/pull) too, plus also [this one](codecov.io)", + "However, [this one](https://www.xkcd.com/) should not be changed since it does not link to Codecov", + "(Also should not replace this parenthetical non-link reference to codecov.io)", + "Also should recognize that these are two separate URLs: [banana](https://codecov.io/pokemon)and[banana](https://codecov.io/pokemon)", + ] + + service = "github" + notification_type = "comment" + org_name = "Acme Corporation" + + expected_result = [ + "[This link](https://stage.codecov.io/gh/test_repo/pull/pull123?src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Acme+Corporation) should be changed", + "And [this one](https://codecov.io/bb/test_repo/pull?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Acme+Corporation) too, plus also [this one](codecov.io?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Acme+Corporation)", + "However, [this one](https://www.xkcd.com/) should not be changed since it does not link to Codecov", + "(Also should not replace this parenthetical non-link reference to codecov.io)", + "Also should recognize that these are two separate URLs: [banana](https://codecov.io/pokemon?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Acme+Corporation)and[banana](https://codecov.io/pokemon?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=Acme+Corporation)", + ] + result = [ + append_tracking_params_to_urls( + m, service=service, notification_type=notification_type, org_name=org_name + ) + for m in message + ] + + assert result == expected_result + + +class TestURLs: + def test_gitlab_url_username_swap(self, dbsession): + base_for_member_url = "https://app.codecov.io/members/" + base_for_plan_url = "https://app.codecov.io/plan/" + + github_org = OwnerFactory.create( + service="github", + username="gh", + ) + dbsession.add(github_org) + r = RepositoryFactory.create(owner=github_org) + dbsession.add(r) + gh_pull = PullFactory.create(repository=r) + dbsession.add(gh_pull) + dbsession.flush() + member_url = get_members_url(gh_pull) + assert member_url == base_for_member_url + "gh/gh" + + gitlab_root_org = OwnerFactory.create(service="gitlab", username="gl_root") + dbsession.add(gitlab_root_org) + r = RepositoryFactory.create(owner=gitlab_root_org) + dbsession.add(r) + gl_root_pull = PullFactory.create(repository=r) + dbsession.add(gl_root_pull) + dbsession.flush() + plan_url = get_plan_url(gl_root_pull) + assert plan_url == base_for_plan_url + "gl/gl_root" + + gitlab_mid_org = OwnerFactory.create( + service="gitlab", + username="gl_mid", + parent_service_id=gitlab_root_org.service_id, + ) + dbsession.add(gitlab_mid_org) + r = RepositoryFactory.create(owner=gitlab_mid_org) + dbsession.add(r) + gl_mid_pull = PullFactory.create(repository=r) + dbsession.add(gl_mid_pull) + dbsession.flush() + member_url = get_members_url(gl_mid_pull) + assert member_url == base_for_member_url + "gl/gl_root" + + gitlab_sub_org = OwnerFactory.create( + service="gitlab", + username="gl_child", + parent_service_id=gitlab_mid_org.service_id, + ) + dbsession.add(gitlab_sub_org) + r = RepositoryFactory.create(owner=gitlab_sub_org) + dbsession.add(r) + gl_child_pull = PullFactory.create(repository=r) + dbsession.add(gl_child_pull) + dbsession.flush() + plan_url = get_plan_url(gl_child_pull) + assert plan_url == base_for_plan_url + "gl/gl_root" diff --git a/sample_app/tests/unit/test_archive_service.py b/sample_app/tests/unit/test_archive_service.py new file mode 100644 index 0000000..f569a28 --- /dev/null +++ b/sample_app/tests/unit/test_archive_service.py @@ -0,0 +1,105 @@ +import json + +from database.tests.factories import RepositoryFactory +from shared.api_archive.archive import ArchiveService +from shared.storage.memory import MemoryStorageService + + +class TestArchiveService: + def test_read_file_hard_to_decode(self, mocker, mock_storage): + mock_read_file = mocker.patch.object(MemoryStorageService, "read_file") + mock_read_file.return_value = b"\x80abc" + repo = RepositoryFactory.create() + service = ArchiveService(repo) + expected_result = b"\x80abc" + path = "path/to/file" + result = service.read_file(path) + assert expected_result == result + + +class TestWriteJsonData: + def test_write_report_details_to_storage(self, mocker, dbsession, mock_storage): + repo = RepositoryFactory() + dbsession.add(repo) + dbsession.flush() + mock_write_file = mocker.patch.object(MemoryStorageService, "write_file") + + data = [ + { + "filename": "file_1.go", + "file_index": 0, + "file_totals": [0, 8, 5, 3, 0, "62.50000", 0, 0, 0, 0, 10, 2, 0], + "diff_totals": None, + }, + { + "filename": "file_2.py", + "file_index": 1, + "file_totals": [0, 2, 1, 0, 1, "50.00000", 1, 0, 0, 0, 0, 0, 0], + "diff_totals": None, + }, + ] + archive_service = ArchiveService(repository=repo) + commitid = "some-commit-sha" + external_id = "some-uuid4-id" + path = archive_service.write_json_data_to_storage( + commit_id=commitid, + table="reports_reportdetails", + field="files_array", + external_id=external_id, + data=data, + ) + assert ( + path + == f"v4/repos/{archive_service.storage_hash}/commits/{commitid}/json_data/reports_reportdetails/files_array/{external_id}.json" + ) + mock_write_file.assert_called_with( + archive_service.root, + path, + json.dumps(data), + is_already_gzipped=False, + reduced_redundancy=False, + ) + + def test_write_report_details_to_storage_no_commitid( + self, mocker, dbsession, mock_storage + ): + repo = RepositoryFactory() + dbsession.add(repo) + dbsession.flush() + mock_write_file = mocker.patch.object(MemoryStorageService, "write_file") + + data = [ + { + "filename": "file_1.go", + "file_index": 0, + "file_totals": [0, 8, 5, 3, 0, "62.50000", 0, 0, 0, 0, 10, 2, 0], + "diff_totals": None, + }, + { + "filename": "file_2.py", + "file_index": 1, + "file_totals": [0, 2, 1, 0, 1, "50.00000", 1, 0, 0, 0, 0, 0, 0], + "diff_totals": None, + }, + ] + archive_service = ArchiveService(repository=repo) + commitid = None + external_id = "some-uuid4-id" + path = archive_service.write_json_data_to_storage( + commit_id=commitid, + table="reports_reportdetails", + field="files_array", + external_id=external_id, + data=data, + ) + assert ( + path + == f"v4/repos/{archive_service.storage_hash}/json_data/reports_reportdetails/files_array/{external_id}.json" + ) + mock_write_file.assert_called_with( + archive_service.root, + path, + json.dumps(data), + is_already_gzipped=False, + reduced_redundancy=False, + ) diff --git a/sample_app/tests/unit/test_bots.py b/sample_app/tests/unit/test_bots.py new file mode 100644 index 0000000..a87e097 --- /dev/null +++ b/sample_app/tests/unit/test_bots.py @@ -0,0 +1,654 @@ +import datetime +from unittest.mock import patch + +import pytest + +from database.models.core import ( + GithubAppInstallation, +) +from database.tests.factories.core import ( + GithubAppInstallationFactory, + OwnerFactory, + RepositoryFactory, +) +from shared.bots import get_adapter_auth_information +from shared.bots.types import AdapterAuthInformation +from shared.rate_limits import gh_app_key_name, owner_key_name +from shared.torngit.base import TokenType +from shared.typings.oauth_token_types import Token +from shared.typings.torngit import GithubInstallationInfo +from shared.utils.test_utils import mock_config_helper + + +def get_github_integration_token_side_effect( + service: str, + installation_id: int = None, + app_id: str | None = None, + pem_path: str | None = None, +): + return f"installation_token_{installation_id}_{app_id}" + + +# The tests for this fn also exist on shared. These, however, are testing the sqlalchemy implementation of them +class TestGettingAdapterAuthInformation: + class TestGitHubOwnerNoRepoInfo: + def _generate_test_owner( + self, + dbsession, + *, + with_bot: bool, + integration_id: int | None = None, + ghapp_installations: list[GithubAppInstallation] = None, + ): + if ghapp_installations is None: + ghapp_installations = [] + owner = OwnerFactory( + service="github", + bot=None, + unencrypted_oauth_token="owner_token: :refresh_token", + integration_id=integration_id, + ) + if with_bot: + owner.bot = OwnerFactory( + service="github", + unencrypted_oauth_token="bot_token: :bot_refresh_token", + ) + dbsession.add(owner) + dbsession.flush() + + if ghapp_installations: + for app in ghapp_installations: + app.owner = owner + dbsession.add(app) + + dbsession.flush() + + assert bool(owner.bot) == with_bot + assert owner.github_app_installations == ghapp_installations + + return owner + + def test_select_owner_info(self, dbsession): + owner = self._generate_test_owner(dbsession, with_bot=False) + expected = AdapterAuthInformation( + token=Token( + key="owner_token", + refresh_token="refresh_token", + secret=None, + entity_name=owner_key_name(owner.ownerid), + ), + token_owner=owner, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert get_adapter_auth_information(owner) == expected + + def test_select_owner_bot_info(self, dbsession): + owner = self._generate_test_owner(dbsession, with_bot=True) + expected = AdapterAuthInformation( + token=Token( + key="bot_token", + refresh_token="bot_refresh_token", + secret=None, + entity_name=owner_key_name(owner.bot.ownerid), + ), + token_owner=owner.bot, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert get_adapter_auth_information(owner) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_owner_single_installation(self, dbsession): + installations = [ + GithubAppInstallationFactory( + repository_service_ids=None, + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ) + ] + owner = self._generate_test_owner( + dbsession, with_bot=False, ghapp_installations=installations + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1200_200", + entity_name="200_1200", + username="installation_1200", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(owner) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_owner_single_installation_ignoring_installations( + self, dbsession + ): + installations = [ + GithubAppInstallationFactory( + repository_service_ids=None, + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ) + ] + owner = self._generate_test_owner( + dbsession, with_bot=False, ghapp_installations=installations + ) + expected = AdapterAuthInformation( + token=Token( + key="owner_token", + refresh_token="refresh_token", + secret=None, + entity_name=owner_key_name(owner.ownerid), + ), + token_owner=owner, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert ( + get_adapter_auth_information(owner, ignore_installations=True) + == expected + ) + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_owner_deprecated_using_integration(self, dbsession): + owner = self._generate_test_owner( + dbsession, with_bot=False, integration_id=1500 + ) + owner.oauth_token = None + # Owner has no GithubApp, no token, and no bot configured + # The integration_id is selected + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1500_None", + entity_name=gh_app_key_name( + installation_id=owner.integration_id, app_id=None + ), + username="installation_1500", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo(installation_id=1500), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(owner) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_owner_multiple_installations_default_name(self, dbsession): + installations = [ + GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + # This should be ignored in the selection because of the name + GithubAppInstallationFactory( + installation_id=1300, + name="my_dedicated_app", + app_id=300, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + ] + owner = self._generate_test_owner( + dbsession, with_bot=False, ghapp_installations=installations + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1200_200", + entity_name="200_1200", + username="installation_1200", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(owner) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_owner_multiple_installations_custom_name(self, dbsession): + installations = [ + GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + # This should be selected first + GithubAppInstallationFactory( + installation_id=1300, + name="my_dedicated_app", + app_id=300, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + ] + owner = self._generate_test_owner( + dbsession, with_bot=False, ghapp_installations=installations + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1300_300", + entity_name="300_1300", + username="installation_1300", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[1].id, + installation_id=1300, + app_id=300, + pem_path="pem_path", + ), + fallback_installations=[ + GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ) + ], + token_type_mapping=None, + ) + assert ( + get_adapter_auth_information( + owner, installation_name_to_use="my_dedicated_app" + ) + == expected + ) + + class TestGitHubOwnerWithRepoInfo: + def _generate_test_repo( + self, + dbsession, + *, + with_bot: bool, + with_owner_bot: bool, + integration_id: int | None = None, + ghapp_installations: list[GithubAppInstallation] = None, + ): + if ghapp_installations is None: + ghapp_installations = [] + owner = OwnerFactory( + service="github", + bot=None, + unencrypted_oauth_token="owner_token: :refresh_token", + integration_id=integration_id, + ) + if with_owner_bot: + owner.bot = OwnerFactory( + service="github", + unencrypted_oauth_token="bot_token: :bot_refresh_token", + ) + dbsession.add(owner) + dbsession.flush() + + if ghapp_installations: + for app in ghapp_installations: + app.owner = owner + dbsession.add(app) + + dbsession.flush() + + repo = RepositoryFactory( + owner=owner, using_integration=(integration_id is not None) + ) + if with_bot: + repo.bot = OwnerFactory( + service="github", + unencrypted_oauth_token="repo_bot_token: :repo_bot_refresh_token", + ) + + dbsession.add(repo) + dbsession.flush() + + assert bool(owner.bot) == with_owner_bot + assert bool(repo.bot) == with_bot + assert owner.github_app_installations == ghapp_installations + + return repo + + def test_select_repo_info_fallback_to_owner(self, dbsession): + repo = self._generate_test_repo( + dbsession, with_bot=False, with_owner_bot=False + ) + expected = AdapterAuthInformation( + token=Token( + key="owner_token", + refresh_token="refresh_token", + secret=None, + username=repo.owner.username, + entity_name=owner_key_name(repo.owner.ownerid), + ), + token_owner=repo.owner, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + def test_select_owner_bot_info(self, dbsession): + repo = self._generate_test_repo( + dbsession, with_owner_bot=True, with_bot=False + ) + expected = AdapterAuthInformation( + token=Token( + key="bot_token", + refresh_token="bot_refresh_token", + secret=None, + username=repo.owner.bot.username, + entity_name=owner_key_name(repo.owner.bot.ownerid), + ), + token_owner=repo.owner.bot, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + def test_select_repo_bot_info(self, dbsession): + repo = self._generate_test_repo( + dbsession, with_owner_bot=True, with_bot=True + ) + expected = AdapterAuthInformation( + token=Token( + key="repo_bot_token", + refresh_token="repo_bot_refresh_token", + secret=None, + username=repo.bot.username, + entity_name=owner_key_name(repo.bot.ownerid), + ), + token_owner=repo.bot, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + def test_select_repo_bot_info_public_repo(self, dbsession, mock_configuration): + repo = self._generate_test_repo( + dbsession, with_owner_bot=True, with_bot=True + ) + mock_configuration.set_params( + { + "github": { + "bot": {"key": "some_key"}, + "bots": { + "read": {"key": "read_bot_key"}, + "status": {"key": "status_bot_key"}, + "comment": {"key": "commenter_bot_key"}, + }, + } + } + ) + repo.private = False + + repo_bot_token = Token( + key="repo_bot_token", + refresh_token="repo_bot_refresh_token", + secret=None, + username=repo.bot.username, + entity_name=owner_key_name(repo.bot.ownerid), + ) + expected = AdapterAuthInformation( + token=repo_bot_token, + token_owner=repo.bot, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping={ + TokenType.comment: Token(key="commenter_bot_key"), + TokenType.read: repo_bot_token, + TokenType.admin: repo_bot_token, + TokenType.status: repo_bot_token, + TokenType.tokenless: repo_bot_token, + TokenType.pull: repo_bot_token, + TokenType.commit: repo_bot_token, + }, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_repo_single_installation(self, dbsession): + installations = [ + GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ) + ] + repo = self._generate_test_repo( + dbsession, + with_bot=False, + with_owner_bot=False, + ghapp_installations=installations, + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1200_200", + entity_name="200_1200", + username="installation_1200", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_repo_deprecated_using_integration(self, dbsession): + repo = self._generate_test_repo( + dbsession, with_bot=False, integration_id=1500, with_owner_bot=False + ) + repo.owner.oauth_token = None + # Repo's owner has no GithubApp, no token, and no bot configured + # The repo has not a bot configured + # The integration_id is no longer verified + # So we fail with exception + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1500_None", + username="installation_1500", + entity_name=gh_app_key_name( + installation_id=repo.owner.integration_id, app_id=None + ), + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo(installation_id=1500), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_repo_multiple_installations_default_name(self, dbsession): + installations = [ + GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + # This should be ignored in the selection because of the name + GithubAppInstallationFactory( + installation_id=1300, + name="my_dedicated_app", + app_id=300, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + ] + repo = self._generate_test_repo( + dbsession, + with_bot=False, + with_owner_bot=False, + ghapp_installations=installations, + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1200_200", + entity_name="200_1200", + username="installation_1200", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ), + fallback_installations=[], + token_type_mapping=None, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected + + @patch( + "shared.bots.github_apps.get_github_integration_token", + side_effect=get_github_integration_token_side_effect, + ) + def test_select_repo_multiple_installations_custom_name(self, dbsession): + installations = [ + GithubAppInstallationFactory( + installation_id=1200, + app_id=200, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + # This should be selected first + GithubAppInstallationFactory( + installation_id=1300, + name="my_dedicated_app", + app_id=300, + pem_path="pem_path", + created_at=datetime.datetime.now(datetime.UTC), + ), + ] + repo = self._generate_test_repo( + dbsession, + with_bot=False, + with_owner_bot=False, + ghapp_installations=installations, + ) + expected = AdapterAuthInformation( + token=Token( + key="installation_token_1300_300", + entity_name="300_1300", + username="installation_1300", + ), + token_owner=None, + selected_installation_info=GithubInstallationInfo( + id=installations[1].id, + installation_id=1300, + app_id=300, + pem_path="pem_path", + ), + fallback_installations=[ + GithubInstallationInfo( + id=installations[0].id, + installation_id=1200, + app_id=200, + pem_path="pem_path", + ) + ], + token_type_mapping=None, + ) + assert ( + get_adapter_auth_information( + repo.owner, repo, installation_name_to_use="my_dedicated_app" + ) + == expected + ) + + @pytest.mark.parametrize("service", ["github", "gitlab"]) + def test_select_repo_public_with_no_token_no_admin_token_configured( + self, service, dbsession, mocker + ): + repo = RepositoryFactory(owner__service=service, private=False) + repo.owner.oauth_token = None + dbsession.add(repo) + dbsession.flush() + mock_config_helper( + mocker, + configs={ + f"{service}.bots.tokenless": {"key": "tokenless_bot_token"}, + f"{service}.bots.comment": {"key": "commenter_bot_token"}, + f"{service}.bots.read": {"key": "reader_bot_token"}, + f"{service}.bots.status": {"key": "status_bot_token"}, + }, + ) + expected = AdapterAuthInformation( + token=Token( + key="tokenless_bot_token", + entity_name="tokenless", + ), + token_owner=None, + selected_installation_info=None, + fallback_installations=None, + token_type_mapping={ + TokenType.comment: Token(key="commenter_bot_token"), + TokenType.read: Token( + key="reader_bot_token", + entity_name="read", + ), + TokenType.admin: None, + TokenType.status: Token(key="status_bot_token"), + TokenType.tokenless: Token( + key="tokenless_bot_token", + entity_name="tokenless", + ), + TokenType.pull: None, + TokenType.commit: None, + }, + ) + assert get_adapter_auth_information(repo.owner, repo) == expected