BigSnarf blog

Infosec FTW

PySpark analysis Apache Access Log

Screen Shot 2014-03-19 at 3.04.44 PM
# read in hdfs file to Spark Context object and cache
logs = sc.textFile('hdfs:///big-access-log').cache()

# create filters
errors500 = logs.filter(lambda logline: "500" in logline)
errors404 = logs.filter(lambda logline: "404" in logline)
errors200 = logs.filter(lambda logline: "200" in logline)
# grab counts
e500_count = errors500.count()
e404_count = errors404.count()
e200_count = errors200.count()
# bring the results back to this box locally
local_500 = errors500.take(e500_count)
local_404 = errors404.take(e404_count)
local_200 = errors200.take(e200_count)

def make_ip_list(iterable):
    m = []
    for line in iterable:
        m.append(line.split()[0])
    return m
def list_count(iterable):
    d = {}
    for i in iterable:
        if i in d:
            d[i] = d[i] + 1
        else:
            d[i] = 1
    return d
# results of people making 500, 404, and 200 requests for the dataset
ip_addresses_making_500_requests = list_count(make_ip_list(local_500))
ip_addresses_making_404_requests = list_count(make_ip_list(local_404))
ip_addresses_making_200_requests = list_count(make_ip_list(local_200))

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s

%d bloggers like this: