Thursday, August 16, 2018

Enumerate and find active Webites

It has been a while since I posted anything. Yesterday I made a thing. A Python script to discover websites based of the URL. The below script uses argparse to define options. Provide either an individual url using the "-u" or "--url". To enumerate several URLs you can dump them to a file and read the file using the "-f" or "--file" option

#!/usr/bin/python3
import requests
import sys
import getopt
import re
import argparse

# This script was created by Steve Johnson. Feel free to share improvise and make it better as long as you follow the below rules
# Sharing is Caring
# Defining variables

parser = argparse.ArgumentParser()
parser.add_argument("-u ", "--url", help="Website address")
parser.add_argument("-f","--file", help="Read Websites from a file")
args = parser.parse_args()
#if args.verbosity:
#    print("verbosity turned on")
# Defining variables
def main():
        print("//****WebServer Enumeration Script by Steve Johnson.****//\n//****Provide individual URL or use a File for multiple URLs****//\n//****Output contains status code of active websites****//\n")
        print ("Connecting to Webserver: ")
        if args.file:
            with open(args.file) as f:
                url_list = f.read().splitlines()
            for url in url_list:
                r = requests.get(url)
                if r.status_code !=404:
                    print (url+" : "+str(r.status_code))
                    #break
        if args.url:
            url = args.url
            r = requests.get(url)
            if r.status_code !=404:
                print (url+" : "+str(r.status_code))
                #break
main()

No comments:

Post a Comment

Note: Only a member of this blog may post a comment.