Skip to content

Commit b596c3b

Browse files
committed
Added performance efficiency and module output upliftment
1 parent fa5bdb8 commit b596c3b

File tree

15 files changed

+1476
-286
lines changed

15 files changed

+1476
-286
lines changed

modules/0x01-OSINT+Footprinting/0x02-ActiveReconnaissance/commentssrc.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def commentssrc(web):
4646
req = requests.get(w).content
4747
print(O+' [!] Setting parse parameters...')
4848
comments = re.findall('<!--(.*)-->',req)
49-
print(G+" [+] Comments on page: "+O+web+'\n')
49+
print(G+" [+] Searching for comments on page: "+O+web+'\n')
5050
for comment in comments:
5151
print(C+' '+comment)
5252
time.sleep(0.03)
@@ -68,7 +68,7 @@ def commentssrc(web):
6868

6969
try:
7070
for uurl in urls:
71-
print(G+"\n [+] Comments on page: "+O+uurl+'\n')
71+
print(G+"\n [+] Searching for comments on page: "+O+uurl+'\n')
7272
req = requests.get(uurl)
7373
comments = re.findall('<!--(.*)-->',req.text)
7474
for comment in comments:

modules/0x01-OSINT+Footprinting/0x02-ActiveReconnaissance/serverdetect.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def serverdetect(web):
6868
else:
6969
print(B+' [+] Server : ' +C+header)
7070
try:
71-
print(O+' [+] Powered By : ' +G+ r.headers['X-Powered-By'])
71+
print(O+' [+] Running On : ' +G+ r.headers['X-Powered-By'])
7272
except:
7373
pass
7474
except:

modules/0x01-OSINT+Footprinting/0x02-ActiveReconnaissance/sharedns.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ def sharedns(web):
2727
system('dig +nocmd '+web+' ns +noall +answer')
2828
h = raw_input(O+'\n [*] Enter any DNS Server from above :> ')
2929
time.sleep(0.4)
30-
print('' + GR + ' [!] Discovering hosts on same DNS Server...')
30+
print(GR + ' [!] Discovering hosts on same DNS Server...')
3131
time.sleep(0.4)
32-
print(""+ GR +" [~] Result: \n"+ color.END)
32+
print(GR +" [~] Result: \n"+ color.END)
3333
domains = [h]
3434
for dom in domains:
3535
text = requests.get('http://api.hackertarget.com/findshareddns/?q=' + dom).text

modules/0x01-OSINT+Footprinting/0x03-InformationDisclosure/credit.py

Lines changed: 82 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -12,93 +12,137 @@
1212
from __future__ import print_function
1313
import re
1414
import sys
15-
sys.path.append('files/')
15+
sys.path.append('files/signature-db/')
16+
import lxml
1617
import time
1718
import requests
1819
from colors import *
20+
urls = []
21+
links = []
22+
found = 0x00
23+
from bs4 import BeautifulSoup
24+
from infodisc_signatures import EXPRESS_CARD_SIGNATURE
25+
from infodisc_signatures import VISA_MASTERCARD_SIGNATURE
26+
from infodisc_signatures import MASTERCARD_SIGNATURE, DISCOVER_CARD_SIGNATURE
27+
from infodisc_signatures import VISA_SIGNATURE, AMEX_CARD_SIGNATURE
28+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
29+
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
1930

2031
def credit0x00(url):
2132

22-
print(R+'\n ========================')
23-
print(R+' CREDIT CARD DISCLOSURE')
24-
print(R+' ========================\n')
33+
print(G+' [+] Importing credit card signatures...')
34+
time.sleep(0.5)
35+
links = [url]
36+
po = url.split('//')[1]
37+
for w in links:
38+
print(GR+' [*] Scraping Page: '+O+url)
39+
req = requests.get(w).text
40+
check0x00(req)
41+
42+
soup = BeautifulSoup(req,'lxml')
43+
for line in soup.find_all('a', href=True):
44+
newline = line['href']
45+
try:
46+
if newline[:4] == "http":
47+
if po in newline:
48+
urls.append(str(newline))
49+
elif newline[:1] == "/":
50+
combline = url+newline
51+
urls.append(str(combline))
52+
except:
53+
print(R+' [-] Unhandled Exception Occured!')
54+
55+
try:
56+
for uurl in urls:
57+
print(G+"\n [+] Scraping Page: "+O+uurl)
58+
req = requests.get(uurl).text
59+
check0x00(req)
60+
61+
except requests.exceptions:
62+
print(R+' [-] Outbound Query Exception...')
63+
64+
if found == 0x00:
65+
print(R+' [-] No Credit Cards found disclosed in plaintext in source code!')
66+
67+
print(G+' [+] Scraping Done!')
2568

26-
print(O+' [*] Making the request...')
27-
req = requests.get(url, verify=False)
28-
req_read = str(req).split()
29-
print(GR+' [*] Reading response...')
30-
time.sleep(1)
31-
append_name = str("".join(req_read))
32-
AMEX = re.match(r"^3[47][0-9]{13}$", append_name)
33-
VISA = re.match(r"^4[0-9]{12}(?:[0-9]{3})?$", append_name)
34-
MASTERCARD = re.match(r'^(?:5[1-5][0-9]{2}|222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}$', append_name)
35-
DISCOVER = re.match(r"^6(?:011|5[0-9]{2})[0-9]{12}$", append_name)
36-
EXPRESS = re.match(r'^[34|37][0-9]{14}$', append_name)
37-
VISA_MASTERCARD = re.match(r'^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14})$', append_name)
69+
70+
def check0x00(req):
71+
72+
try:
73+
append_name = ' '.join(req.encode('utf-8')).strip()
74+
except UnicodeDecodeError:
75+
append_name = ' '.join(req.decode('utf-8')).strip()
76+
print(O+' [!] Reading response...')
77+
print(GR+' [*] Searching for credit cards...')
78+
AMEX = re.match(AMEX_CARD_SIGNATURE, append_name)
79+
VISA = re.match(VISA_SIGNATURE, append_name)
80+
MASTERCARD = re.match(MASTERCARD_SIGNATURE, append_name)
81+
DISCOVER = re.match(DISCOVER_CARD_SIGNATURE, append_name)
82+
EXPRESS = re.match(EXPRESS_CARD_SIGNATURE, append_name)
83+
VISA_MASTERCARD = re.match(VISA_MASTERCARD_SIGNATURE, append_name)
84+
print(O+' [!] Matching signatures...')
3885

3986
try:
40-
print(GR+' [*] Trying to find out existing American Express Cards...')
41-
time.sleep(1)
4287
if EXPRESS.group():
4388
print(G+" [+] Website has American Express Cards!")
4489
print(O+' [!] Card : ' + GR+EXPRESS.group())
90+
found = 0x01
4591

4692
except:
47-
print(R+" [-] No American Express Cards found!")
93+
pass
4894

4995
try:
50-
print(GR+' [*] Trying to find out existing Visa-Master Cards...')
51-
time.sleep(1)
5296
if VISA_MASTERCARD.group():
5397
print(G+" [+] Website has a Visa-Master Card!")
5498
print(O+' [!] Card : ' + GR+VISA_MASTERCARD.group())
99+
found = 0x01
55100

56101
except:
57-
print(R+" [-] No Visa-MasterCard found!")
102+
pass
58103

59104
try:
60-
print(GR+' [*] Trying to find out existing MasterCards...')
61-
time.sleep(1)
62105
if MASTERCARD.group():
63106
print(G+" [+] Website has a Master Card!")
64107
print(O+' [!] Card : ' + GR+MASTERCARD.group())
108+
found = 0x01
65109

66110
except:
67-
print(R+" [-] No MasterCard found!")
111+
pass
68112

69113
try:
70-
print(GR+' [*] Trying to find out existing VISA credit cards...')
71-
time.sleep(1)
72114
if VISA.group():
73115
print(G+" [+] Website has a VISA card!")
74116
print(O+' [!] Card : ' + GR+VISA.group())
117+
found = 0x01
75118

76119
except:
77-
print(R+" [-] No VISA Cards found!")
120+
pass
78121

79122
try:
80-
print(GR+' [*] Trying to find out existing AMEX Cards...')
81-
time.sleep(1)
82123
if AMEX.group():
83124
print(G+" [+] Website has a AMEX card!")
84125
print(O+' [!] Card : ' + GR+AMEX.group())
126+
found = 0x01
85127

86128
except:
87-
print(R+" [-] No Amex Cards found!")
129+
pass
88130

89131
try:
90-
print(GR+' [*] Trying to find out existing Discover Cards...')
91-
time.sleep(1)
92132
if DISCOVER.group():
93133
print(G+" [+] Website has a DISCOVER card!")
94134
print(O+' [!] Card : ' + GR+DISCOVER.group())
135+
found = 0x01
95136

96137
except:
97-
print(R+" [-] No Discover Cards found!")
138+
pass
98139

99140
def credit(web):
100141

101-
print(GR+' [*] Initiating module...')
102-
time.sleep(0.5)
103-
credit0x00(web)
104-
142+
print(GR+' [*] Initiating module...')
143+
time.sleep(0.5)
144+
print(R+'\n ========================')
145+
print(R+' CREDIT CARD DISCLOSURE')
146+
print(R+' ========================\n')
147+
credit0x00(web)
148+
Lines changed: 55 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env python2
2-
# coding: utf-8
2+
# -*- coding: utf-8 -*-
33
#
44
#-:-:-:-:-:-:-:-:-:-:-:-:#
55
# TIDoS Framework #
@@ -12,48 +12,71 @@
1212
from __future__ import print_function
1313
import re
1414
import sys
15-
sys.path.append('files/')
15+
sys.path.append('files/signature-db/')
1616
import time
1717
import requests
1818
from colors import *
19+
links = []
20+
urls = []
21+
found = 0x00
22+
from bs4 import BeautifulSoup
23+
from infodisc_signatures import EMAIL_HARVESTER_SIGNATURE as signature
24+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
25+
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
1926

2027
def mail0x00(url):
2128

2229
print(R+'\n ======================')
2330
print(R+' EMAIl INFO HARVESTER')
2431
print(R+' ======================\n')
2532
time.sleep(0.5)
26-
print(GR+' [*] Making the request...')
27-
req = requests.get(url, allow_redirects=False)
28-
print(O+' [*] Harvesting emails...')
29-
time.sleep(1)
30-
text = req.text
31-
patron = re.compile("[-a-zA-Z0-9._]+@[-a-zA-Z0-9_]+.[a-zA-Z0-9_.]+")
32-
mails = re.findall(patron,text)
33-
for mail in mails:
34-
print(G+' [+] Found : '+O+mail)
35-
url0 = url+'/contact'
36-
q = requests.get(url0, allow_redirects=False)
37-
if str(q.status_code) == '200':
38-
print(O+' [*] Harvesting emails from '+url0+'...')
39-
text = q.text
40-
patron = re.compile("[-a-zA-Z0-9._]+@[-a-zA-Z0-9_]+.[a-zA-Z0-9_.]+")
41-
mails = re.findall(patron,text)
42-
for mail in mails:
43-
print(G+' [+] Found : '+O+mail)
44-
else:
45-
pass
33+
links = [url]
34+
po = url.split('//')[1]
35+
for w in links:
36+
print(GR+' [*] Scraping Page: '+O+url)
37+
req = requests.get(w).text
38+
check0x00(req)
4639

47-
def emailext(web):
40+
soup = BeautifulSoup(req,'lxml')
41+
for line in soup.find_all('a', href=True):
42+
newline = line['href']
43+
try:
44+
if newline[:4] == "http":
45+
if po in newline:
46+
urls.append(str(newline))
47+
elif newline[:1] == "/":
48+
combline = url+newline
49+
urls.append(str(combline))
50+
except:
51+
print(R+' [-] Unhandled Exception Occured!')
4852

4953
try:
50-
print(GR+' [*] Loading module...')
51-
time.sleep(0.6)
52-
mail0x00(web)
53-
print(G+'\n [+] Done!\n')
54-
55-
except Exception as e:
56-
print(R+' [-] Exception Encountered!')
57-
print(R+' [-] Error: '+str(e))
58-
pass
54+
for uurl in urls:
55+
print(G+"\n [+] Scraping Page: "+O+uurl)
56+
req = requests.get(uurl).text
57+
check0x00(req)
58+
59+
except requests.exceptions:
60+
print(R+' [-] Outbound Query Exception...')
61+
62+
if found == 0x00:
63+
print(R+'\n [-] No Emails found disclosed in plaintext in source code!\n')
64+
65+
print(G+' [+] Scraping Done!')
66+
67+
def check0x00(req):
68+
comments = re.findall(signature,req)
69+
print(GR+" [*] Searching for Emails...")
70+
if comments:
71+
print(G+'\n [+] Found Email(s):')
72+
for comment in comments:
73+
print(C+' - '+comment)
74+
time.sleep(0.03)
75+
found = 0x01
76+
77+
def emailext(web):
78+
79+
print(GR+' [*] Loading module...')
80+
time.sleep(0.6)
81+
mail0x00(web)
5982

modules/0x01-OSINT+Footprinting/0x03-InformationDisclosure/error_patterns.py

Lines changed: 0 additions & 65 deletions
This file was deleted.

0 commit comments

Comments
 (0)