/*
SimpleApp.scala
Simple Spark Job for processing Apache auth.log for Invalid user login attempts and Failed password counts
./bin/spark-submit --class "SimpleApp" --master local[4] target/scala-2.10/simple-project_2.10-1.0.jar
*/
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
object SimpleApp {
def main(args: Array[String]) {
val logFile = "/Users/antigen/Downloads/sanitized_log/auth.log"
val conf = new SparkConf().setAppName("SimpleApacheLogProcessing Application")
val sc = new SparkContext(conf)
val logData = sc.textFile(logFile, 2).cache()
val numAs = logData.filter(line => line.contains("Invalid user")).count()
val numBs = logData.filter(line => line.contains("Failed password")).count()
println("Lines with INVALID USER: %s, Lines with FAILED PASSWORD: %s".format(numAs, numBs))
}
}
Code, Folder Structure, simple.sbt, and packaged jar files here:
https://github.com/bigsnarfdude/SimpleApp
http://databricks.gitbooks.io/databricks-spark-reference-applications/content/logs_analyzer/app/README.html