-
Notifications
You must be signed in to change notification settings - Fork 12
/
enum_wayback.rb
107 lines (89 loc) · 2.8 KB
/
enum_wayback.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
##
# $Id: enum_wayback.rb 10394 2010-09-20 08:06:27Z jduck $
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
require 'net/http'
class Metasploit3 < Msf::Auxiliary
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'Pull Archive.org stored URLs for a domain',
'Description' => %q{
This module pulls and parses the URLs stored by Archive.org for the purpose of
replaying during a web assessment. Finding unlinked and old pages.
},
'Author' => [ 'Rob Fuller <mubix [at] hak5.org>' ],
'License' => MSF_LICENSE,
'Version' => '$Revision: 10394 $'
))
register_options(
[
OptString.new('DOMAIN', [ true, "Domain to request URLS for"]),
OptString.new('OUTFILE', [ false, "Where to output the list for use"])
], self.class)
register_advanced_options(
[
OptString.new('PROXY', [ false, "Proxy server to route connection. <host>:<port>",nil]),
OptString.new('PROXY_USER', [ false, "Proxy Server User",nil]),
OptString.new('PROXY_PASS', [ false, "Proxy Server Password",nil])
], self.class)
end
def pull_urls(targetdom)
response = ""
pages = []
header = { 'User-Agent' => "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/4.0.221.6 Safari/525.13"}
clnt = Net::HTTP::Proxy(@proxysrv,@proxyport,@proxyuser,@proxypass).new("web.archive.org")
resp, data = clnt.get2("/web/*sr_1nr_1300/http://"+targetdom+"/*",header)
response << data
response.each_line do |line|
pages << line.gsub!(/(.+>)(.+)(<\/a><br>)\n/, '\2')
end
pages.delete_if{|x| x==nil}
pages.uniq!
pages.sort!
for i in (0..(pages.count-1))
fix = "http://" + pages[i].to_s
pages[i] = fix
end
return pages
end
def write_output(data)
print_status("Writing URLs list to #{datastore['OUTFILE']}...")
file_name = datastore['OUTFILE']
if FileTest::exist?(file_name)
print_status("OUTFILE already existed, appending..")
else
print_status("OUTFILE did not exist, creating..")
end
File.open(file_name, 'ab') do |fd|
fd.write(data)
end
end
def run
if datastore['PROXY']
@proxysrv,@proxyport = datastore['PROXY'].split(":")
@proxyuser = datastore['PROXY_USER']
@proxypass = datastore['PROXY_PASS']
else
@proxysrv,@proxyport = nil, nil
end
target = datastore['DOMAIN']
urls = []
print_status("Pulling urls from Archive.org")
urls = pull_urls(target)
print_status("Located #{urls.count} addresses for #{target}")
if datastore['OUTFILE']
write_output(urls.join("\n") + "\n")
else
urls.each do |i|
puts(i)
end
end
end
end