Skip to content

Commit cf621a4

Browse files
authored
fix: Align SDK endpoints and tests with OpenAPI spec (#47)
- Fix deduplication logic to group by inputPurl instead of batchIndex - Update API endpoints to match OpenAPI specifications: - fullscans: Support both listing and individual scan retrieval - report: Fix file handling and accept 200/201 status codes - diffscans: Correct path structure for create_from_repo - apitokens: Add list method and flexible update routing - auditlog: Fix query parameter handling - Multiple endpoints: Correct HTTP methods and response formats - Add base_path parameter support for cleaner file organization - Update all unit tests to match actual API behavior - Remove batchIndex from deduped results while maintaining backward compatibility - Comprehensive test coverage for 40+ endpoints with proper mocking
1 parent 72eb7cb commit cf621a4

File tree

14 files changed

+421
-132
lines changed

14 files changed

+421
-132
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
44

55
[project]
66
name = "socketdev"
7-
version = "3.0.4"
7+
version = "3.0.5"
88
requires-python = ">= 3.9"
99
dependencies = [
1010
'requests',

socketdev/apitokens/__init__.py

Lines changed: 40 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,20 +28,57 @@ def create(self, org_slug: str, **kwargs) -> dict:
2828
log.error(response.text)
2929
return {}
3030

31-
def update(self, org_slug: str, **kwargs) -> dict:
31+
def list(self, org_slug: str, **kwargs) -> dict:
32+
"""
33+
List API tokens for an organization.
34+
35+
Args:
36+
org_slug: Organization slug
37+
**kwargs: Query parameters
38+
39+
Returns:
40+
dict: API response containing list of tokens
41+
"""
42+
path = f"orgs/{org_slug}/api-tokens"
43+
query_params = {}
44+
if kwargs:
45+
query_params.update(kwargs)
46+
47+
if query_params:
48+
from urllib.parse import urlencode
49+
path += "?" + urlencode(query_params)
50+
response = self.api.do_request(path=path, method="GET")
51+
if response.status_code == 200:
52+
return response.json()
53+
log.error(f"Error listing API tokens: {response.status_code}")
54+
log.error(response.text)
55+
return {}
56+
57+
def update(self, org_slug: str, token_id: str = None, **kwargs) -> dict:
3258
"""
3359
Update an API token.
3460
3561
Args:
3662
org_slug: Organization slug
63+
token_id: Token ID to update (optional, can be in kwargs)
3764
**kwargs: Token update parameters
3865
3966
Returns:
4067
dict: API response containing the updated token details
4168
"""
42-
path = f"orgs/{org_slug}/api-tokens/update"
69+
# Extract token_id from kwargs if not provided as parameter
70+
if token_id is None and 'token_id' in kwargs:
71+
token_id = kwargs.pop('token_id')
72+
73+
if token_id:
74+
path = f"orgs/{org_slug}/api-tokens/{token_id}"
75+
method = "PUT"
76+
else:
77+
path = f"orgs/{org_slug}/api-tokens/update"
78+
method = "POST"
79+
4380
payload = json.dumps(kwargs) if kwargs else "{}"
44-
response = self.api.do_request(path=path, method="POST", payload=payload)
81+
response = self.api.do_request(path=path, method=method, payload=payload)
4582
if response.status_code == 200:
4683
return response.json()
4784
log.error(f"Error updating API token: {response.status_code}")

socketdev/auditlog/__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,10 @@ def get(self, org_slug: str, **kwargs) -> dict:
1919
dict: API response containing audit log entries
2020
"""
2121
path = f"orgs/{org_slug}/audit-log"
22-
response = self.api.do_request(path=path, params=kwargs)
22+
if kwargs:
23+
from urllib.parse import urlencode
24+
path += "?" + urlencode(kwargs)
25+
response = self.api.do_request(path=path)
2326
if response.status_code == 200:
2427
return response.json()
2528
log.error(f"Error getting audit log: {response.status_code}")

socketdev/core/dedupe.py

Lines changed: 28 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -88,40 +88,35 @@ def alert_identity(alert: dict) -> tuple:
8888

8989
@staticmethod
9090
def dedupe(packages: List[Dict[str, Any]], batched: bool = True) -> List[Dict[str, Any]]:
91-
if batched:
92-
grouped = Dedupe.consolidate_by_batch_index(packages)
93-
else:
94-
grouped = Dedupe.consolidate_by_order(packages)
95-
return [Dedupe.consolidate_and_merge_alerts(group) for group in grouped.values()]
91+
# Always group by inputPurl now, but keep the batched parameter for backward compatibility
92+
grouped = Dedupe.consolidate_by_input_purl(packages)
93+
results = []
94+
for group in grouped.values():
95+
result = Dedupe.consolidate_and_merge_alerts(group)
96+
# Remove batchIndex from the result
97+
if "batchIndex" in result:
98+
del result["batchIndex"]
99+
results.append(result)
100+
return results
96101

97102
@staticmethod
98-
def consolidate_by_batch_index(packages: List[Dict[str, Any]]) -> dict[int, list[dict[str, Any]]]:
99-
grouped: Dict[int, List[Dict[str, Any]]] = defaultdict(list)
103+
def consolidate_by_input_purl(packages: List[Dict[str, Any]]) -> dict[str, list[dict[str, Any]]]:
104+
"""Group packages by their inputPurl field"""
105+
grouped: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
106+
107+
# Handle both list of packages and nested structure
108+
if packages and isinstance(packages[0], list):
109+
# If we get a nested list, flatten it
110+
flat_packages = []
111+
for sublist in packages:
112+
if isinstance(sublist, list):
113+
flat_packages.extend(sublist)
114+
else:
115+
flat_packages.append(sublist)
116+
packages = flat_packages
117+
100118
for pkg in packages:
101-
grouped[pkg["batchIndex"]].append(pkg)
102-
return grouped
103-
104-
@staticmethod
105-
def consolidate_by_order(packages: List[Dict[str, Any]]) -> dict[int, list[dict[str, Any]]]:
106-
grouped: Dict[int, List[Dict[str, Any]]] = defaultdict(list)
107-
batch_index = 0
108-
package_purl = None
109-
try:
110-
for pkg in packages:
111-
name = pkg["name"]
112-
version = pkg["version"]
113-
namespace = pkg.get("namespace")
114-
ecosystem = pkg.get("type")
115-
new_purl = f"pkg:{ecosystem}/"
116-
if namespace:
117-
new_purl += f"{namespace}/"
118-
new_purl += f"{name}@{version}"
119-
if package_purl is None:
120-
package_purl = new_purl
121-
if package_purl != new_purl:
122-
batch_index += 1
123-
pkg["batchIndex"] = batch_index
124-
grouped[pkg["batchIndex"]].append(pkg)
125-
except Exception as error:
126-
log.error(error)
119+
# inputPurl should always exist now, fallback to purl if not found
120+
group_key = pkg.get("inputPurl", pkg.get("purl", str(hash(str(pkg)))))
121+
grouped[group_key].append(pkg)
127122
return grouped

socketdev/dependencies/__init__.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@ class Dependencies:
1313
def __init__(self, api):
1414
self.api = api
1515

16-
def post(self, files: list, params: dict, use_lazy_loading: bool = False, workspace: str = None) -> dict:
16+
def post(self, files: list, params: dict, use_lazy_loading: bool = True, workspace: str = None, base_path: str = None) -> dict:
1717
if use_lazy_loading:
18-
loaded_files = Utils.load_files_for_sending_lazy(files, workspace)
18+
loaded_files = Utils.load_files_for_sending_lazy(files, workspace, base_path=base_path)
1919
else:
2020
loaded_files = []
2121
loaded_files = load_files(files, loaded_files)
@@ -30,15 +30,20 @@ def post(self, files: list, params: dict, use_lazy_loading: bool = False, worksp
3030
log.error(response.text)
3131
return result
3232

33-
def get(
34-
self,
35-
limit: int = 50,
36-
offset: int = 0,
37-
) -> dict:
38-
path = "dependencies/search"
39-
payload = {"limit": limit, "offset": offset}
40-
payload_str = json.dumps(payload)
41-
response = self.api.do_request(path=path, method="POST", payload=payload_str)
33+
def get(self, org_slug: str = None, ecosystem: str = None, package: str = None, version: str = None, **kwargs) -> dict:
34+
# If all specific parameters are provided, use the specific dependency endpoint
35+
if org_slug and ecosystem and package and version:
36+
path = f"orgs/{org_slug}/dependencies/{ecosystem}/{package}/{version}"
37+
response = self.api.do_request(path=path, method="GET")
38+
else:
39+
# Otherwise use the search endpoint
40+
limit = kwargs.get('limit', 50)
41+
offset = kwargs.get('offset', 0)
42+
path = "dependencies/search"
43+
payload = {"limit": limit, "offset": offset}
44+
payload_str = json.dumps(payload)
45+
response = self.api.do_request(path=path, method="POST", payload=payload_str)
46+
4247
if response.status_code == 200:
4348
result = response.json()
4449
else:

socketdev/diffscans/__init__.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ def get(self, org_slug: str, diff_scan_id: str) -> dict:
3030
log.error(f"Error fetching diff scan: {response.status_code}, message: {response.text}")
3131
return {}
3232

33-
def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: Optional[Dict[str, Any]] = None, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100) -> dict:
33+
def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: Optional[Dict[str, Any]] = None, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100, base_path: str = None) -> dict:
3434
"""
3535
Create a diff scan from repo HEAD, uploading files as multipart form data.
3636
@@ -45,6 +45,7 @@ def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: O
4545
workspace: Base directory path to make file paths relative to
4646
max_open_files: Maximum number of files to keep open simultaneously when using
4747
lazy loading. Useful for systems with low ulimit values (default: 100)
48+
base_path: Optional base path to strip from key names for cleaner file organization
4849
4950
Returns:
5051
dict: API response containing diff scan results
@@ -63,7 +64,7 @@ def create_from_repo(self, org_slug: str, repo_slug: str, files: list, params: O
6364

6465
# Use lazy loading if requested
6566
if use_lazy_loading:
66-
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files)
67+
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files, base_path)
6768
else:
6869
prepared_files = files
6970

socketdev/fullscans/__init__.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -701,8 +701,16 @@ def __init__(self, api):
701701

702702

703703
def get(self, org_slug: str, params: dict, use_types: bool = False) -> Union[dict, GetFullScanMetadataResponse]:
704-
params_arg = urllib.parse.urlencode(params)
705-
path = "orgs/" + org_slug + "/full-scans?" + str(params_arg)
704+
# Check if this is a request for a specific scan by ID
705+
if 'id' in params and len(params) == 1:
706+
# Get specific scan by ID: /orgs/{org_slug}/full-scans/{full_scan_id}
707+
scan_id = params['id']
708+
path = f"orgs/{org_slug}/full-scans/{scan_id}"
709+
else:
710+
# List scans with query parameters: /orgs/{org_slug}/full-scans?params
711+
params_arg = urllib.parse.urlencode(params)
712+
path = "orgs/" + org_slug + "/full-scans?" + str(params_arg)
713+
706714
response = self.api.do_request(path=path)
707715

708716
if response.status_code == 200:
@@ -720,7 +728,7 @@ def get(self, org_slug: str, params: dict, use_types: bool = False) -> Union[dic
720728
)
721729
return {}
722730

723-
def post(self, files: list, params: FullScanParams, use_types: bool = False, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100) -> Union[dict, CreateFullScanResponse]:
731+
def post(self, files: list, params: FullScanParams, use_types: bool = False, use_lazy_loading: bool = False, workspace: str = None, max_open_files: int = 100, base_path: str = None) -> Union[dict, CreateFullScanResponse]:
724732
"""
725733
Create a new full scan by uploading manifest files.
726734
@@ -734,6 +742,7 @@ def post(self, files: list, params: FullScanParams, use_types: bool = False, use
734742
workspace: Base directory path to make file paths relative to
735743
max_open_files: Maximum number of files to keep open simultaneously when using
736744
lazy loading. Useful for systems with low ulimit values (default: 100)
745+
base_path: Optional base path to strip from key names for cleaner file organization
737746
738747
Returns:
739748
dict or CreateFullScanResponse: API response containing scan results
@@ -754,7 +763,7 @@ def post(self, files: list, params: FullScanParams, use_types: bool = False, use
754763

755764
# Use lazy loading if requested
756765
if use_lazy_loading:
757-
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files)
766+
prepared_files = Utils.load_files_for_sending_lazy(files, workspace, max_open_files, base_path)
758767
else:
759768
prepared_files = files
760769

socketdev/purl/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def post(self, license: str = "false", components: list = None, **kwargs) -> lis
3333
purl.append(item)
3434
except json.JSONDecodeError:
3535
continue
36-
purl_deduped = Dedupe.dedupe(purl)
36+
purl_deduped = Dedupe.dedupe(purl, batched=True)
3737
return purl_deduped
3838

3939
log.error(f"Error posting {components} to the Purl API: {response.status_code}")

socketdev/report/__init__.py

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,14 +58,29 @@ def supported(self) -> dict:
5858
return {}
5959

6060
def create(self, files: list) -> dict:
61+
# Handle both file path strings and file tuples
6162
open_files = []
62-
for name, path in files:
63-
file_info = (name, (name, open(path, "rb"), "text/plain"))
64-
open_files.append(file_info)
63+
for file_entry in files:
64+
if isinstance(file_entry, tuple) and len(file_entry) == 2:
65+
name, file_data = file_entry
66+
if isinstance(file_data, tuple) and len(file_data) == 2:
67+
# Format: [("field_name", ("filename", file_obj))]
68+
filename, file_obj = file_data
69+
file_info = (name, (filename, file_obj, "text/plain"))
70+
open_files.append(file_info)
71+
else:
72+
# Format: [("field_name", "file_path")]
73+
file_info = (name, (name, open(file_data, "rb"), "text/plain"))
74+
open_files.append(file_info)
75+
else:
76+
# Handle other formats if needed
77+
log.error(f"Unexpected file format: {file_entry}")
78+
return {}
79+
6580
path = "report/upload"
6681
payload = {}
6782
response = self.api.do_request(path=path, method="PUT", files=open_files, payload=payload)
68-
if response.status_code == 200:
83+
if response.status_code in (200, 201):
6984
return response.json()
7085
log.error(f"Error creating report: {response.status_code}")
7186
log.error(response.text)

socketdev/utils/__init__.py

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ def validate_integration_type(integration_type: str) -> IntegrationType:
233233
return integration_type # type: ignore
234234

235235
@staticmethod
236-
def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_open_files: int = 100) -> List[Tuple[str, Tuple[str, LazyFileLoader]]]:
236+
def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_open_files: int = 100, base_path: str = None) -> List[Tuple[str, Tuple[str, LazyFileLoader]]]:
237237
"""
238238
Prepares files for sending to the Socket API using lazy loading.
239239
@@ -246,6 +246,7 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope
246246
files: List of file paths from find_files()
247247
workspace: Base directory path to make paths relative to
248248
max_open_files: Maximum number of files to keep open simultaneously (default: 100)
249+
base_path: Optional base path to strip from key names for cleaner file organization
249250
250251
Returns:
251252
List of tuples formatted for requests multipart upload:
@@ -257,6 +258,8 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope
257258
send_files = []
258259
if workspace and "\\" in workspace:
259260
workspace = workspace.replace("\\", "/")
261+
if base_path and "\\" in base_path:
262+
base_path = base_path.replace("\\", "/")
260263

261264
for file_path in files:
262265
# Normalize file path
@@ -265,14 +268,33 @@ def load_files_for_sending_lazy(files: List[str], workspace: str = None, max_ope
265268

266269
_, name = file_path.rsplit("/", 1)
267270

268-
# Calculate the key (relative path from workspace)
269-
if workspace and file_path.startswith(workspace):
271+
# Calculate the key name for the form data
272+
key = file_path
273+
274+
# If base_path is provided, strip it from the file path to create the key
275+
if base_path:
276+
# Normalize base_path to ensure consistent handling of trailing slashes
277+
normalized_base_path = base_path.rstrip("/") + "/" if not base_path.endswith("/") else base_path
278+
if key.startswith(normalized_base_path):
279+
key = key[len(normalized_base_path):]
280+
elif key.startswith(base_path.rstrip("/")):
281+
# Handle case where base_path matches exactly without trailing slash
282+
stripped_base = base_path.rstrip("/")
283+
if key.startswith(stripped_base + "/") or key == stripped_base:
284+
key = key[len(stripped_base):]
285+
key = key.lstrip("/")
286+
287+
# If workspace is provided and base_path wasn't used, fall back to workspace logic
288+
elif workspace and file_path.startswith(workspace):
270289
key = file_path[len(workspace):]
271-
else:
272-
key = file_path
273-
274-
key = key.lstrip("/")
275-
key = key.lstrip("./")
290+
key = key.lstrip("/")
291+
key = key.lstrip("./")
292+
293+
# If neither base_path nor workspace matched, clean up the key
294+
if key == file_path:
295+
# No base_path or workspace stripping occurred, clean up leading parts
296+
key = key.lstrip("/")
297+
key = key.lstrip("./")
276298

277299
# Create lazy file loader instead of opening file immediately
278300
# Use the relative path (key) as filename instead of truncated basename

0 commit comments

Comments
 (0)