In this article, We have explored the Bidding Auction Data Analysis The dataset to be used is from eBay like online auctions. Even by using small data, I could still gain a lot of valuable insights. I have used Spark RDD in Databricks.
In this activity, we will load data into Apache Spark and inspect the data using the Spark In this section, we use the SparkContext method, textFile, to load the data into a Resilient Distributed Dataset (RDD).
Our dataset is a .csv file that consists of online auction data. Each auction has an auction id associated with it and can have multiple bids. Each row represents a bid. For each bid, we have the following information:
Attribute Information or Dataset Details:
We load this data into Spark using RDDs
Objectives
• Load data into Spark
• Use transformations and actions to inspect the data
What transformations and actions would you use in each case?
- How do you see the first element of the inputRDD?
- What do you use to see the first 5 elements of the RDD?
- What is the total number of bids?
- What is the total number of distinct items that were auctioned?
- What is the total number of item types that were auctioned?
- What is the total number of bids per item type?
- Across all auctioned items, what is the minimum number of bids?
- Across all auctioned items, what is the maximum number of bids?
- What is the average number of bids?
Input file (contains 10654 Records)

Spark Code is written in Java
Java
x
135
1
package BiddingDataAnalysis;
2
import java.util.*;
3
4
import org.apache.spark.SparkConf;
5
import org.apache.spark.api.java.JavaPairRDD;
6
import org.apache.spark.api.java.JavaRDD;
7
import org.apache.spark.api.java.JavaSparkContext;
8
import org.apache.spark.api.java.function.*;
9
10
import scala.Function;
11
import scala.Tuple2;
12
13
import java.util.Arrays;
14
import java.util.Iterator;
15
import java.util.List;
16
import java.util.regex.Pattern;
17
18
public class Bidding
19
{
20
public static void main(String[] args) throws Exception
21
{
22
if (args.length<1)
23
{
24
System.err.println("Usage: Auction Bid <File> ");
25
System.exit(1);
26
}
27
28
SparkConf conf = new SparkConf().setAppName("AuctionBid").setMaster("local");
29
JavaSparkContext sc = new JavaSparkContext(conf);
30
31
JavaRDD<String> diskfile = sc.textFile(args[0]);
32
33
/* How do you see the first element of the Input RDD? */
34
35
System.out.println("First Element is = " + diskfile.first());
36
37
/* How do you use to see the first 5 element of the RDD? */
38
39
System.out.println("First Five Element are = " + diskfile.take(5));
40
41
/* What is the total number of bid? */
42
43
System.out.println("Total Number of Bids are = " + diskfile.count());
44
45
JavaRDD<String> words = diskfile.flatMap(new FlatMapFunction<String, String>() {
46
public Iterator<String> call(String s) {
47
return Arrays.asList(s.split(",")[0]).iterator();
48
}
49
});
50
51
System.out.println("Total number of distinct item that were auctioned =" +words.distinct().count());
52
53
/* What is the total number of item types that were auctioned? */
54
55
JavaRDD<String> items = diskfile.flatMap(new FlatMapFunction<String, String>() {
56
public Iterator<String> call(String s){
57
return Arrays.asList(s.split(",")[7]).iterator();
58
}
59
});
60
61
System.out.println("Total number of Item types that were auctioned = " + items.distinct().count());
62
63
/* What is the Total number of Bids per item type */
64
65
JavaPairRDD<String,Integer> pairs = items.mapToPair(new PairFunction<String, String, Integer>() {
66
67
public Tuple2<String, Integer> call(String t) throws Exception {
68
// TODO Auto-generated method stub
69
return new Tuple2(t,1);
70
}
71
});
72
73
JavaPairRDD<String,Integer> counts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {
74
75
public Integer call(Integer v1, Integer v2) throws Exception {
76
// TODO Auto-generated method stub
77
return v1 + v2;
78
}
79
});
80
81
System.out.println("Total number of Bids per item type" + counts.collect());
82
83
JavaPairRDD<Integer,Integer> auctionidkey = diskfile.mapToPair(s-> new Tuple2(s.split(",")[0],1));
84
85
JavaPairRDD<Integer, Integer> countgroup = auctionidkey.reduceByKey((a,b)->a+b);
86
87
JavaRDD<Integer> onlycount = countgroup.values();
88
89
90
JavaRDD<Integer> sortedcount = onlycount.sortBy(new org.apache.spark.api.java.function.Function<Integer,Integer>() {
91
92
public Integer call(Integer v1) throws Exception {
93
// TODO Auto-generated method stub
94
return v1.intValue();
95
}
96
}, true, 1 );
97
98
System.out.println("Across all auctioned items,the minimum number of bids" + sortedcount.first());
99
100
Integer max = sortedcount.reduce(new Function2<Integer, Integer, Integer>() {
101
102
103
public Integer call(Integer v1, Integer v2) throws Exception {
104
// TODO Auto-generated method stub
105
if (v1 > v2)
106
{
107
return v1;
108
}
109
else
110
{
111
return v2;
112
}
113
}
114
});
115
116
System.out.println("Across all auctioned items,the maximum number of bids = " + max);
117
118
Integer summationcount = sortedcount.reduce(new Function2<Integer, Integer, Integer>() {
119
120
121
public Integer call(Integer v1, Integer v2) throws Exception {
122
// TODO Auto-generated method stub
123
return v1 + v2;
124
}
125
});
126
127
int totalcount = (int) sortedcount.count();
128
129
float average = (float) summationcount/totalcount;
130
131
System.out.println("Across all auctioned items,the avarage number of bids = " + average);
132
133
}
134
135
}
Output






