Skip to content

Commit abfdf19

Browse files
authored
Merge pull request #22 from ScrapingAnt/feature/add-custom-headers-support
feature/add-custom-headers-support: done
2 parents a5accc5 + f975b07 commit abfdf19

File tree

5 files changed

+55
-2
lines changed

5 files changed

+55
-2
lines changed

README.md

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ https://docs.scrapingant.com/request-response-format#available-parameters
5050
| --- | --- | --- |
5151
| url | <code>string</code> | |
5252
| cookies | <code>List[Cookie]</code> | None |
53+
| headers | <code>List[Dict[str, str]]</code> | None |
5354
| js_snippet | <code>string</code> | None |
5455
| proxy_type | <code>ProxyType</code> | datacenter |
5556
| proxy_country | <code>str</code> | None |
@@ -171,6 +172,29 @@ else:
171172
print(f'Successfully parsed data: {parsed_data}')
172173
```
173174

175+
### Sending custom headers
176+
177+
```python3
178+
from scrapingant_client import ScrapingAntClient
179+
180+
client = ScrapingAntClient(token='<YOUR-SCRAPINGANT-API-TOKEN>')
181+
182+
result = client.general_request(
183+
'https://httpbin.org/headers',
184+
headers={
185+
'test-header': 'test-value'
186+
}
187+
)
188+
print(result.content)
189+
190+
# Http basic auth example
191+
result = client.general_request(
192+
'https://jigsaw.w3.org/HTTP/Basic/',
193+
headers={'Authorization': 'Basic Z3Vlc3Q6Z3Vlc3Q='}
194+
)
195+
print(result.content)
196+
```
197+
174198

175199
## Useful links
176200
- [Scrapingant API doumentation](https://docs.scrapingant.com)

scrapingant_client/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "0.3.7"
1+
__version__ = "0.3.8"
22

33
from scrapingant_client.client import ScrapingAntClient
44
from scrapingant_client.cookie import Cookie

scrapingant_client/client.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import platform
22
import sys
3-
from typing import List, Optional
3+
from typing import List, Optional, Dict
44

55
import requests
66

@@ -14,6 +14,7 @@
1414
ScrapingantSiteNotReachableException,
1515
ScrapingantDetectedException,
1616
)
17+
from scrapingant_client.headers import convert_headers
1718
from scrapingant_client.proxy_type import ProxyType
1819
from scrapingant_client.response import Response
1920
from scrapingant_client.utils import base64_encode_string
@@ -34,6 +35,7 @@ def general_request(
3435
self,
3536
url: str,
3637
cookies: Optional[List[Cookie]] = None,
38+
headers: Optional[Dict[str, str]] = None,
3739
js_snippet: Optional[str] = None,
3840
proxy_type: ProxyType = ProxyType.datacenter,
3941
proxy_country: Optional[str] = None,
@@ -58,6 +60,7 @@ def general_request(
5860
response = self.requests_session.post(
5961
SCRAPINGANT_API_BASE_URL + '/general',
6062
json=request_data,
63+
headers=convert_headers(headers),
6164
)
6265
if response.status_code == 403:
6366
raise ScrapingantInvalidTokenException()

scrapingant_client/headers.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
from typing import Dict, Optional
2+
3+
4+
def convert_headers(headers: Optional[Dict[str, str]]):
5+
if headers is None:
6+
return None
7+
return {
8+
f'ant-{header_name}': header_value
9+
for header_name, header_value in headers.items()
10+
}

tests/test_headers.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
from scrapingant_client.headers import convert_headers
2+
3+
4+
def test_convert_empty_headers():
5+
assert convert_headers(None) is None
6+
7+
8+
def test_convert_headers():
9+
headers = {
10+
'User-Agent': 'test',
11+
'Accept-Language': 'en-US'
12+
}
13+
assert convert_headers(headers) == {
14+
'ant-User-Agent': 'test',
15+
'ant-Accept-Language': 'en-US'
16+
}

0 commit comments

Comments
 (0)