Skip to content

Commit 41c0f59

Browse files
authored
Merge pull request #3 from flashnuke/feat/accept_resolved
Feat/accept resolved
2 parents 52be066 + 7ea11fb commit 41c0f59

File tree

3 files changed

+29
-19
lines changed

3 files changed

+29
-19
lines changed

WebRecon.py

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import urllib.parse
55
import pprint
66
import pkg_resources
7+
import ipaddress
78

89
from sys import platform
910
from typing import Tuple, Type
@@ -68,6 +69,7 @@ def __init__(self,
6869
self._scans = self._parse_scan_list(scans) # only the ones we call using `_do_scan()`
6970

7071
self.scheme, self.subdomain, self.target_hostname = self._parse_target_url(target_url)
72+
self.host_is_resolved = self.subdomain is None
7173
self._default_general_scanner_args = {
7274
"scheme": self.scheme,
7375
"target_hostname": self.target_hostname,
@@ -111,14 +113,19 @@ def _parse_scan_list(self, scan_list: List[str]) -> List[Type[Scanner]]:
111113
scans.append(scanner)
112114
return scans
113115

114-
def _parse_target_url(self, target_url: str) -> Tuple[str, str, str]:
115-
parsed_target = urllib.parse.urlparse(target_url)
116-
scheme = parsed_target.scheme
117-
netloc = parsed_target.netloc
118-
sub = netloc.split(".")[0] if self._contains_subdomain(target_url) else ScannerDefaultParams.DefaultSubdomain
119-
hostname = netloc.split(".", 1)[-1] if self._contains_subdomain(target_url) else netloc
120-
121-
return scheme, sub, hostname
116+
def _parse_target_url(self, target_url: str) -> Tuple[str, Union[str, None], str]:
117+
try:
118+
scheme, ip_hostname = target_url.split('://')
119+
ip_test = ipaddress.ip_address(ip_hostname) # check for valid ip address
120+
return scheme, None, ip_hostname
121+
except Exception as exc: # not an IP address
122+
parsed_target = urllib.parse.urlparse(target_url)
123+
scheme = parsed_target.scheme
124+
netloc = parsed_target.netloc
125+
sub = netloc.split(".")[0] if self._contains_subdomain(
126+
target_url) else ScannerDefaultParams.DefaultSubdomain
127+
hostname = netloc.split(".", 1)[-1] if self._contains_subdomain(target_url) else netloc
128+
return scheme, sub, hostname
122129

123130
def _start_scans_for_target(self, target: str) -> List[threading.Thread]:
124131
scanner_threads = list()
@@ -162,6 +169,9 @@ def _setup_targets(self) -> queue.Queue:
162169
domains = queue.Queue()
163170
domains.put(self.target_url)
164171
if self.dns_recursion:
172+
if self.host_is_resolved:
173+
self._log_progress("skipping dns scan, host is resolved...")
174+
return domains
165175
subdomain_scanner.DNSScanner(target_url=self.target_hostname, domains_queue=domains,
166176
**self._generate_scanner_args(DNSScanner.SCAN_NICKNAME)).start_scanner()
167177
return domains
@@ -201,8 +211,8 @@ def _get_scanner_name(self, *args, **kwargs) -> str:
201211

202212

203213
if __name__ == "__main__":
204-
if "linux" not in platform:
205-
raise UnsupportedOS(platform)
214+
# if "linux" not in platform:
215+
# raise UnsupportedOS(platform)
206216
with open("requirements.txt", "r") as reqs:
207217
pkg_resources.require(reqs.readlines())
208218

scanners/base_scanner.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,8 @@ def _clear_cache_file(self):
195195

196196
@lru_cache(maxsize=5)
197197
def generate_url_base_path(self, dnsname: str) -> str:
198-
return f"{self.scheme}://{dnsname}.{self.target_hostname}"
198+
return f"{self.scheme}://{dnsname}.{self.target_hostname}" if \
199+
dnsname is not None else f"{self.scheme}://{self.target_hostname}"
199200

200201
@lru_cache(maxsize=5)
201202
def _format_name_for_path(self, name: str) -> str:
@@ -309,7 +310,8 @@ def _make_request(self, method: str, url: str, headers=None, **kwargs):
309310
headers = dict()
310311
headers.update(self._default_headers)
311312

312-
res = self._session.request(method=method, url=url, headers=headers, timeout=self.request_timeout, **kwargs)
313+
res = self._session.request(method=method, url=url, headers=headers, timeout=self.request_timeout,
314+
verify=False, **kwargs)
313315

314316
if res.status_code == ScannerDefaultParams.LimitRateSCode:
315317
self._log_exception("too many requests", abort=False)

scanners/content_scanner.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -47,18 +47,16 @@ def single_bruter(self):
4747
attempt = self.words_queue.get().strip("/")
4848
found_any = False
4949

50-
# check if there is a file extension, if not then it's a directory we're bruting
51-
if "." not in attempt:
52-
attempt_list.append(f"/{attempt}/")
53-
else:
54-
attempt_list.append(f"/{attempt}")
55-
50+
attempt_list.append(f"/{attempt}")
51+
if "." in attempt: # check if there is a file extension
5652
if ScannerDefaultParams.FileExtensions:
5753
for extension in ScannerDefaultParams.FileExtensions:
5854
attempt_post = "." + attempt.split(".")[-1]
5955

6056
if attempt_post != extension:
6157
attempt_list.append(f"/{attempt.replace(attempt_post, extension)}")
58+
else:
59+
attempt_list.append(f"/{attempt}/")
6260

6361
for brute in attempt_list:
6462
path = urllib.parse.quote(brute)
@@ -92,11 +90,11 @@ def single_bruter(self):
9290
except Exception as exc:
9391
self.abort_scan(reason=f"target {url}, exception - {exc}")
9492
finally:
95-
attempt_list.clear()
9693
if found_any:
9794
self._save_results()
9895
time.sleep(self.request_cooldown)
9996

97+
attempt_list.clear()
10098
self._update_count(attempt, found_any)
10199

102100
def _start_scanner(self):

0 commit comments

Comments
 (0)