Skip to content

Commit

Permalink
*fix windows unicodeEncodeError
Browse files Browse the repository at this point in the history
  • Loading branch information
thisisshubhamkumar committed May 12, 2024
1 parent d0982f1 commit 02cdb85
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 40 deletions.
8 changes: 4 additions & 4 deletions admin_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,15 @@ def generate_admin_panel_link(domain):
hosting_location = domain_info.org

if hosting_location:
result.append(f'{G}\u2514\u27A4{G} The website is hosted by: {hosting_location}')
result.append(f'{G}[+] {C} The website is hosted by: {hosting_location}')
else:
#result.append(f'{G}\u2514\u27A4{R} Hosting location information not available.')
potential_admin_panel_link = generate_admin_panel_link(domain)
result.append(f'{G}\u2514\u27A4{Y} You can try the potential admin panel link:{W} {potential_admin_panel_link}')
result.append(f'{G}[+] {Y} You can try the potential admin panel link:{W} {potential_admin_panel_link}')
potential_admin_panel_link = generate_admin_panel_link(domain_ip)
result.append(f'\n{G}\u2514\u27A4{Y} You can try the potential admin panel link:{W} {potential_admin_panel_link}')
result.append(f'\n{G}[+] {Y} You can try the potential admin panel link:{W} {potential_admin_panel_link}')

except Exception as e:
result.append(f'{G}\u2514\u27A4{R} Failed to determine hosting location and generate a potential admin panel link. Error: {e}')
result.append(f'{G}[+] {R} Failed to determine hosting location and generate a potential admin panel link. Error: {e}')

return result
4 changes: 2 additions & 2 deletions dmarc_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def fetch_dmarc_links(domain):

return links
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
print(f"{G}\u2514\u27A4 {R}No DMARC record found for {domain}")
print(f"{G}[+] {R}No DMARC record found for {domain}")
return []
except dns.exception.DNSException as e:
print(f"{G}\u2514\u27A4 {R}An error occurred: {e}")
print(f"{G}[+] {R}An error occurred: {e}")
return []

if __name__ == "__main__":
Expand Down
6 changes: 3 additions & 3 deletions dns_enumeration.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
def get_domain_ip(domain):
try:
ip_address = socket.gethostbyname(domain)
print(f"{G}\u2514\u27A4 {C}IP Address:{W} {ip_address}")
print(f"{G}[+] {C}IP Address:{W} {ip_address}")
return ip_address
except socket.gaierror:
print(f"{R}[-] {C}Could not resolve IP address for the domain.{W}")
Expand All @@ -35,7 +35,7 @@ def dnsrec(domain):
try:
response = resolver.query(domain, record_type)
for answer in response:
print(f'{G}\u2514\u27A4 {C}{record_type}:{W} {answer}')
print(f'{G}[+] {C}{record_type}:{W} {answer}')
result['dns'].append(f'{record_type}: {answer}')
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer, dns.resolver.Timeout):
pass
Expand All @@ -44,7 +44,7 @@ def dnsrec(domain):
try:
dmarc_response = resolver.query(dmarc_target, 'TXT')
for answer in dmarc_response:
print(f'{G}\u2514\u27A4 {C}DMARC:{W} {answer}')
print(f'{G}[+] {C}DMARC:{W} {answer}')
result['dmarc'].append(f'DMARC: {answer}')
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer, dns.resolver.Timeout):
pass
Expand Down
2 changes: 1 addition & 1 deletion header.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def fetch_headers(url):
print(f'\n{Y}[~] Headers :{W}\n')
for key, value in headers.items():
result[key] = value
print(f'{G}\u2514\u27A4 {C}{key}:{W} {value}')
print(f'{G}[+] {C}{key}:{W} {value}')
except requests.exceptions.RequestException as e:
print(f'\n{R}[-] {C}Exception :{W} {e}\n')
result['Exception'] = str(e)
Expand Down
14 changes: 7 additions & 7 deletions omnisci3nt.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,25 +294,25 @@ def write(self, obj):
programming_languages, technologies, javascript_libraries, web_server = analyze_website(website_url)

if programming_languages:
print(f"{G}\u2514\u27A4 {C}Detected programming languages:{W}", f", ".join(programming_languages))
print(f"{G}[+] {C}Detected programming languages:{W}", f", ".join(programming_languages))
else:
print(f"{R}No programming language detected or an error occurred.")

if technologies:
print(f"\n{G}\u2514\u27A4 {C}Website technologies:")
print(f"\n{G}[+] {C}Website technologies:")
for tech, details in technologies.items():
print(f"{W}{tech}: {details}")
else:
print(f"{R}An error occurred while fetching technologies.")

if javascript_libraries:
print(f"\n{G}\u2514\u27A4 {C}JavaScript libraries:")
print(f"\n{G}[+] {C}JavaScript libraries:")
for library in javascript_libraries:
print(f"{W}- " + library)
else:
print(f"{R}No JavaScript libraries detected.")

print(f"\n{G}\u2514\u27A4 {C}Web server:", f"{W}{web_server}")
print(f"\n{G}[+] {C}Web server:", f"{W}{web_server}")

except Exception as e:
print(e)
Expand Down Expand Up @@ -419,8 +419,8 @@ def write(self, obj):
'''
print(f"{G}{banner3}")
#print(f"\n{R}Recon completed\n")
print(f"{G}\u2514\u27A4 {C}Date: {W}{formatted_date}")
print(f"{G}\u2514\u27A4 {C}Time taken: {W}{elapsed_time:.2f} seconds")
print(f"{G}[+] {C}Date: {W}{formatted_date}")
print(f"{G}[+] {C}Time taken: {W}{elapsed_time:.2f} seconds")
sys.stdout = sys.__stdout__ # Restore standard output

print(f"{G}\u2514\u27A4 {C}Output saved to '{output_filename}'")
print(f"\n{G}\u2514\u27A4 {C}Output saved to '{output_filename}'")
6 changes: 3 additions & 3 deletions sdomain.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ def check_subdomain(subdomain):

end_time = time.time()
elapsed_time = end_time - start_time
print(f"{G}\u2514\u27A4 {C}Total Subdomains Scanned:{W} 781")
print(f"{G}\u2514\u27A4 {C}Total Subdomains Found:{W} {len(subdomains_found)}")
print(f"{G}\u2514\u27A4 {C}Time taken:{W} {elapsed_time:.2f} seconds")
print(f"{G}[+] {C}Total Subdomains Scanned:{W} 781")
print(f"{G}[+] {C}Total Subdomains Found:{W} {len(subdomains_found)}")
print(f"{G}[+] {C}Time taken:{W} {elapsed_time:.2f} seconds")
print("\nSubdomains Found Links:\n")
for link in subdomains_found:
print(link)
Expand Down
10 changes: 5 additions & 5 deletions sl.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ def get_certificate_info(hostname, port=443, timeout=10):

def print_certificate_info(certificate):
print(f'\n{Y}[~] SSL Certificate Information :{W}\n')
#print(f"{G}\u2514\u27A4 {C}Certificate Information:")
print(f"{G}\u2514\u27A4 {C}Subject:{W}", dict(x[0] for x in certificate['subject']))
print(f"{G}\u2514\u27A4 {C}Issuer:{W}", dict(x[0] for x in certificate['issuer']))
print(f"{G}\u2514\u27A4 {C}Valid From:{W}", certificate['notBefore'])
print(f"{G}\u2514\u27A4 {C}Valid Until:{W}", certificate['notAfter'])
#print(f"{G}[+] {C}Certificate Information:")
print(f"{G}[+] {C}Subject:{W}", dict(x[0] for x in certificate['subject']))
print(f"{G}[+] {C}Issuer:{W}", dict(x[0] for x in certificate['issuer']))
print(f"{G}[+] {C}Valid From:{W}", certificate['notBefore'])
print(f"{G}[+] {C}Valid Until:{W}", certificate['notAfter'])


if __name__ == "__main__":
Expand Down
30 changes: 15 additions & 15 deletions web_carwl.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,31 +65,31 @@ def perform_web_recon(website_url):
external_links.add(full_url)

# Print the URLs
print(f"{G}\u2514\u27A4 {C}JS Files:{W}")
print(f"{G}[+] {C}JS Files:{W}")
for js_file in js_files:
print(js_file)

print(f"\n{G}\u2514\u27A4 {C}CSS Files:{W}")
print(f"\n{G}[+]{C}CSS Files:{W}")
for css_file in css_files:
print(css_file)

print(f"\n{G}\u2514\u27A4 {C}HTML Files:{W}")
print(f"\n{G}[+] {C}HTML Files:{W}")
for html_file in html_files:
print(html_file)

print(f"\n{G}\u2514\u27A4 {C}PHP Files:{W}")
print(f"\n{G}[+] {C}PHP Files:{W}")
for php_file in php_files:
print(php_file)

print(f"\n{G}\u2514\u27A4 {C}Image Files:{W}")
print(f"\n{G}[+] {C}Image Files:{W}")
for image_file in image_files:
print(image_file)

print(f"\n{G}\u2514\u27A4 {C}Internal Links:{W}")
print(f"\n{G}[+] {C}Internal Links:{W}")
for internal_link in internal_links:
print(internal_link)

print(f"\n{G}\u2514\u27A4 {C}External Links:{W}")
print(f"\n{G}[+] {C}External Links:{W}")
for external_link in external_links:
print(external_link)

Expand All @@ -107,21 +107,21 @@ def perform_web_recon(website_url):
full_url = urljoin(directory_search, href)
directory_links.append(full_url)

print("\nDirectory Links:")
print("\n{G}[+] {C}Directory Links:")
for directory_link in directory_links:
print(directory_link)

else:
print("\nFailed to fetch directory. Status code:", directory_response.status_code)

# Print the counts
print(f"\n{G}\u2514\u27A4 {C}Total JS Files:{W}", len(js_files))
print(f"{G}\u2514\u27A4 {C}Total CSS Files:{W}", len(css_files))
print(f"{G}\u2514\u27A4 {C}Total HTML Files:{W}", len(html_files))
print(f"{G}\u2514\u27A4 {C}Total PHP Files:{W}", len(php_files))
print(f"{G}\u2514\u27A4 {C}Total Image Files:{W}", len(image_files))
print(f"{G}\u2514\u27A4 {C}Total Internal Links:{W}", len(internal_links))
print(f"{G}\u2514\u27A4 {C}Total External Links:{W}", len(external_links))
print(f"\n{G}[+] {C}Total JS Files:{W}", len(js_files))
print(f"{G}[+] {C}Total CSS Files:{W}", len(css_files))
print(f"{G}[+] {C}Total HTML Files:{W}", len(html_files))
print(f"{G}[+] {C}Total PHP Files:{W}", len(php_files))
print(f"{G}[+] {C}Total Image Files:{W}", len(image_files))
print(f"{G}[+] {C}Total Internal Links:{W}", len(internal_links))
print(f"{G}[+] {C}Total External Links:{W}", len(external_links))

else:
print(f"{R}Failed to fetch the website. Status code:", response.status_code)
Expand Down

0 comments on commit 02cdb85

Please sign in to comment.