-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathreqhead
More file actions
executable file
·131 lines (107 loc) · 4.5 KB
/
reqhead
File metadata and controls
executable file
·131 lines (107 loc) · 4.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
#!/usr/bin/env python3
""" Status Checker to detect response codes and server information """
# full source: https://github.com/angela-d/request-headers-checker
# license: GPLv3
# author: angela-d
import pprint
from json import dumps, loads
import sys
from ssl import create_default_context
import socket
from argparse import ArgumentParser
from requests import get, exceptions
# font colors
GREEN = '\033[32m'
YELLOW = '\033[33m'
CLR_COLOR = '\033[m' # reset color to default
def startup(entered_url):
""" Startup """
try:
if entered_url is None:
print(
GREEN
+ "\t\tEnter the url you wish to investigate, including the http(s)://"
+ CLR_COLOR
)
entered_url = input("URL: ")
get_http_info(entered_url)
# if a user chooses to cancel their session before processing a url
except KeyboardInterrupt:
print('\nExiting..')
sys.exit(0)
except EOFError:
print('\nExiting..')
sys.exit(0)
def get_http_info(check_url):
""" Parse the user-entered URL """
# try & circumvent user agent blocking for python requests
spoof_agent = {
'Accept-Language': 'en-US,en;q=0.5',
'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/66.0', # pylint: disable=C0301
'Accept' : 'text/html,application/xhtml+xml,application/xml;q =0.9,*/*;q=0.8',
'Referrer' : 'https://www.startpage.com'
}
try:
access_url = get(check_url, headers=spoof_agent)
if len(access_url.history) < 1:
# normal request
print(GREEN + '\n\t\t----- Headers for', check_url, ' -----' + CLR_COLOR)
print("\nHTTP Response: " + str(access_url.status_code))
request_header = dumps(dict(access_url.headers))
format_headers = pprint.PrettyPrinter(indent=2)
format_headers.pprint(loads(request_header))
# obtain ssl issuer information (if https)
if 'https:' in check_url:
print(GREEN + '\n\t\t----- SSL Headers for', check_url, ' -----' + CLR_COLOR)
hostname = check_url.split("//")[-1].split("/")[0].split('?')[0]
context = create_default_context()
server = context.wrap_socket(socket.socket(), server_hostname=hostname) #pylint: disable=C0326
server.connect((hostname, 443))
cert = server.getpeercert()
issued = dict(extract[0] for extract in cert['subject'])
issuer = dict(extract[0] for extract in cert['issuer'])
print('SSL cert issued to', issued['commonName'], 'by', issuer['commonName'],\
'on', cert['notBefore'], 'expires', cert['notAfter'])
else:
# redirects detected
print(YELLOW + '\n\t\t----- Redirect(s) detected for', check_url, ' -----' + CLR_COLOR)
redirect_status = get(check_url, allow_redirects=False)
print(redirect_status.status_code, ' detected from', check_url, \
'to', redirect_status.headers['Location'], \
'\n\t\x1B[3mRun a check on ' + redirect_status.headers['Location'] \
+ ' instead.\x1B[23m\n\n')
startup(entered_url=None)
# some errors encountered
except exceptions.InvalidSchema as show_error:
print(show_error)
startup(entered_url=None)
except exceptions.MissingSchema as show_error:
print(show_error)
startup(entered_url=None)
except exceptions.ConnectionError:
print("Took too long to connect; ensure the URL is valid and try again.")
sys.exit(1)
except exceptions.Timeout:
print("Timeout. Server might be offline or not responding.")
sys.exit(1)
except exceptions.TooManyRedirects:
print("Infinite Redirects. Site is poorly configured.")
sys.exit(1)
if __name__ == '__main__':
PARSE = ArgumentParser(description='A Python tool to get header \
information from a website.\nhttp:// or https:// are required.')
PARSE.add_argument(
'--version', '-v',
action='version',
version='%(prog)s 2.0.0'
)
PARSE.add_argument(
'--url', '-url',
action='store_true'
)
NAMESPACE = PARSE.parse_known_args()
if len(NAMESPACE[1]) == 1:
ARGTUP = NAMESPACE[1]
startup(ARGTUP[0])
else:
startup(entered_url=None)