Hadoop Vertiefung

main
SoftwareObservatorium 2023-04-04 16:07:42 +02:00
parent 6e580698fd
commit 893b56179c
2 changed files with 30 additions and 2 deletions

View File

@ -0,0 +1,28 @@
package de.hsma.bdea;
import java.util.Arrays;
public class JavaMVM {
public static void main(String[] args) {
double[] vector = {1, 2, 3};
double[][] matrix = {
{3, 2, 1},
{5, 4, 3},
{7, 8, 9}
};
double[] result = new double[vector.length];
for (int i = 0; i < matrix.length; i++) {
double sum = 0;
for (int j = 0; j < matrix.length; j++) {
sum += matrix[i][j] * vector[j];
}
result[i] = sum;
}
System.out.println(Arrays.toString(result));
}
}

View File

@ -33,7 +33,7 @@ public class MVM2 {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
// conf.set("length", "3"); // conf.set("length", "3");
length = 3; // ggf. mit einem vorherigen Job ermitteln length = 3; // Anzahl Spalten in Matrix (== Zeilen Vector) (ggf. mit einem vorherigen Job ermitteln)
Job job = Job.getInstance(conf, "mvm1"); Job job = Job.getInstance(conf, "mvm1");
job.setJarByClass(MVM2.class); job.setJarByClass(MVM2.class);
@ -60,7 +60,7 @@ public class MVM2 {
job.setMapperClass(Mapper.class); job.setMapperClass(Mapper.class);
job.setReducerClass(MVMReducer.class); job.setReducerClass(MVMReducer.class);
job.setNumReduceTasks(2); job.setNumReduceTasks(2);
job.setPartitionerClass(VectorPartitioner.class); job.setPartitionerClass(VectorPartitioner.class); // zur Sortierung nach Vektor Koordinate (optional)
job.setOutputKeyClass(Text.class); job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class); job.setOutputValueClass(IntWritable.class);