Stemming.java Analisis sentimen data twitter menggunakan K-Means Clustering.

114 } else if dictionary.containst + word.substring3 { word = t + word.substring3; } else if word.startsWithpeng { if dictionary.containsk + word.substring4 { word = k + word.substring4; } else if dictionary.containsword.substring4 { word = word.substring4; } } else if word.startsWithpeny { if dictionary.containss + word.substring4 { word = s + word.substring4; } } } } } if dictionary.containsword { if word.startsWithme { if dictionary.containsword.substring2 { word = word.substring2; } else if word.startsWithmem { if word.substring3, 4.matches[bpf] { word = word.substring3; } else if dictionary.containsp + word.substring3 { word = p + word.substring3; } } else if word.startsWithmen { if word.substring3, 4.matches[cdj] { word = word.substring3; } else if dictionary.containst + word.substring3 { word = t + word.substring3; } else if word.startsWithmeng { if word.substring4, 5.matches[gh] { 115 word = word.substring4; } else if dictionary.containsk + word.substring4 { word = k + word.substring4; } else if dictionary.containsword.substring4 { word = word.substring4; } } else if word.startsWithmeny { if dictionary.containss + word.substring4 { word = s + word.substring4; } } } } } if dictionary.containsword { if word.startsWithm { if dictionary.containsp + word.substring1 { word = p + word.substring1; } else if dictionary.containsword.substring1 { word = word.substring1; } } else if word.startsWithng { if word.startsWithnge { if dictionary.containsnge + word.substring3 { word = word.substring3; } } else if dictionary.containsk + word.substring2 { word = k + word.substring2; } else if dictionary.containsword.substring2 { word = word.substring2; } } else if word.startsWithny { if dictionary.containsc + word.substring2 { 116 word = c + word.substring2; } else if dictionary.containss + word.substring2 { word = s + word.substring2; } else if dictionary.containsword.substring2 { word = word.substring2; } } else if word.startsWithn { if dictionary.containst + word.substring1 { word = t + word.substring1; } } } } catch StringIndexOutOfBoundsException e { return word; } return word; } private String deleteSuffix { if dictionary.containsword { if word.endsWithlah { word = word.substring0, word.length - 3; } else if word.endsWithkah { word = word.substring0, word.length - 3; } else if word.endsWithtah { word = word.substring0, word.length - 3; } else if word.endsWithpun { word = word.substring0, word.length - 3; } } if dictionary.containsword { if word.endsWithku { word = word.substring0, word.length - 2; 117 } else if word.endsWithmu { word = word.substring0, word.length - 2; } else if word.endsWithnya { word = word.substring0, word.length - 3; } else if word.endsWithny { word = word.substring0, word.length - 2; } } if dictionary.containsword { if word.endsWithi { word = word.substring0, word.length - 1; } if word.endsWithan { if word.endsWithkan { word = word.substring0, word.length - 3; } else { word = word.substring0, word.length - 2; } } if dictionary.containsword { if word.endsWithin { word = word.substring0, word.length - 2; } } } return word; } private String deleteReduplicate { String firstWord; String lastWord; if word.contains- { 118 firstWord = word.substring0, word.indexOf-; lastWord = word.substringword.indexOf- + 1; if dictionary.containsfirstWord { word = firstWord; } else if dictionary.containslastWord { word = lastWord; } else { word = lastWord; } } return word; } }

4. TfIdf.java

package sentimentanalysis; import java.util.ArrayList; import java.util.LinkedHashMap; public class TfIdf { private LinkedHashMapInteger, double[] tfIdfDocsVector; public double tfCalculatorArrayListString docTerms, String termToCheck { int tf = 0; for String s : docTerms { if s.equalsIgnoreCasetermToCheck { tf++; } } return tf; 119 } public double idfCalculatorLinkedHashMapInteger, ArrayListString allTermsMap, String termToCheck { int df = 0; for int key : allTermsMap.keySet { if allTermsMap.getkey.containstermToCheck { df++; } } if df = 0 { if df == allTermsMap.size { return 1 + Math.log10allTermsMap.size df; } else { return Math.log10allTermsMap.size df; } } else { return 0; } } public LinkedHashMap tfIdfCalculatorLinkedHashMapInteger, ArrayListString allTermsMap, ArrayListString terms { double tf; double idf; double tfIdf; tfIdfDocsVector = new LinkedHashMap; for int key : allTermsMap.keySet { double[] tfIdfVector = new double[terms.size]; int c = 0; for String term : terms { tf = tfCalculatorallTermsMap.getkey, term; idf = idfCalculatorallTermsMap, term; tfIdf = tf idf; 120 tfIdfVector[c] = tfIdf; c++; } tfIdfDocsVector.putkey, tfIdfVector; } return tfIdfDocsVector; } }

5. Normalization.java

package sentimentanalysis; import java.util.LinkedHashMap; public class Normalization { double[] mean; double[] std; LinkedHashMapInteger, double[] zScoreMap; double[] min; double[] max; LinkedHashMapInteger, double[] minMaxMap; public LinkedHashMap ZScoreLinkedHashMapInteger, double[] input, int n { double[][] matrix; matrix = new double[input.size][input.get1.length]; for int i : input.keySet { int j = 0; for double d : input.geti { matrix[i - 1][j] = d; j++; 121 } } getMeanmatrix; getStdDevmatrix; for int i = 0; i matrix.length; i++ { for int j = 0; j matrix[i].length; j++ { if int matrix[i][j] = n { matrix[i][j] = matrix[i][j] - mean[i] std[i]; } } } zScoreMap = new LinkedHashMap; for int i = 0; i matrix.length; i++ { zScoreMap.puti + 1, matrix[i]; } return zScoreMap; } public LinkedHashMap MinMaxLinkedHashMapInteger, double[] input, int n { double[][] matrix; matrix = new double[input.size][input.get1.length]; for int i : input.keySet { int j = 0; for double d : input.geti { matrix[i - 1][j] = d; j++; } } 122 getMinmatrix; getMaxmatrix; for int i = 0; i matrix[0].length; i++ { for int j = 0; j matrix.length; j++ { if int matrix[j][i] = n { if min[i] == max[i] { matrix[j][i] = 0.5; } else { matrix[j][i] = matrix[j][i] - min[i] max[i] - min[i]; } } } } minMaxMap = new LinkedHashMap; for int i = 0; i matrix.length; i++ { minMaxMap.puti + 1, matrix[i]; } return minMaxMap; } private double[] getMeandouble[][] mat { mean = new double[mat.length]; for int i = 0; i mat.length; i++ { double sum = 0; for int j = 0; j mat[i].length; j++ { sum += mat[i][j]; } mean[i] = sum mat[i].length; } 123 return mean; } private double[] getStdDevdouble[][] mat { std = new double[mat.length]; for int i = 0; i mat.length; i++ { double temp = 0; for int j = 0; j mat[i].length; j++ { temp += Math.powmat[i][j] - mean[i], 2; } temp = temp mat[i].length; std[i] = Math.sqrttemp; } return std; } private double[] getMindouble[][] mat { min = new double[mat[0].length]; for int i = 0; i mat[0].length; i++ { double m = mat[0][i]; for int j = 0; j mat.length; j++ { if mat[j][i] m { m = mat[j][i]; } } min[i] = m; } return min; } private double[] getMaxdouble[][] mat { max = new double[mat[0].length]; for int i = 0; i mat[0].length; i++ {