import org.apache.hadoop.conf.
Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
class MatrixMultiplication {
public static class MatrixMultiplicationMapper extends Mapper<Object, Text, Text, IntWritable> {
private static final int A = 0;
private static final int B = 1;
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString().trim();
String[] elements = line.split("\\s+");
if (key.toString().contains("A")) {
for (int k = 0; k < elements.length; k++) {
context.write(new Text("A#" + key.toString().split("_")[0] + "," + k), new
IntWritable(Integer.parseInt(elements[k])));
}
} else if (key.toString().contains("B")) {
for (int j = 0; j < elements.length; j++) {
context.write(new Text("B#" + j + "," + key.toString().split("_")[1]), new
IntWritable(Integer.parseInt(elements[j])));
}
}
}
}
public static class MatrixMultiplicationReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException,
InterruptedException {
List<Integer> A_values = new ArrayList<>();
List<Integer> B_values = new ArrayList<>();
for (IntWritable value : values) {
String[] parts = key.toString().split("#");
String matrix = parts[0];
if (matrix.equals("A")) {
A_values.add(value.get());
} else if (matrix.equals("B")) {
B_values.add(value.get());
}
}
int sum = 0;
for (int i = 0; i < A_values.size(); i++) {
sum += A_values.get(i) * B_values.get(i);
}
context.write(key, new IntWritable(sum));
}
}
public static class MatrixMultiplicationDriver {
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: MatrixMultiplication <input_path><output_path>");
System.exit(-1);
}
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "Matrix Multiplication");
job.setJarByClass(MatrixMultiplicationDriver.class);
job.setMapperClass(MatrixMultiplicationMapper.class);
job.setReducerClass(MatrixMultiplicationReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.io.LongWritable;
import java.io.IOException;
import java.util.StringTokenizer;
import java.util.Iterator;
class WordCountMapper implements Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
@Override
public void configure(JobConf job) {}
@Override
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter
reporter) throws IOException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
output.collect(word, one);
}
}
@Override
public void close() throws IOException {}
}
class WordCountReducer implements Reducer<Text, IntWritable, Text, IntWritable> {
private IntWritable result = new IntWritable();
@Override
public void configure(JobConf job) {}
@Override
public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output,
Reporter reporter) throws IOException {
int sum = 0;
while (values.hasNext()) {
sum += values.next().get();
}
result.set(sum);
output.collect(key, result);
}
@Override
public void close() throws IOException {}
}
public class WordCountDriver {
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: WordCountDriver <input path> <output path>");
System.exit(-1);
}
Configuration conf = new Configuration();
JobConf job = new JobConf(conf, WordCountDriver.class);
job.setJobName("Word Count");
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setInputFormat(TextInputFormat.class);
job.setOutputFormat(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
JobClient.runJob(job);
}
}