KmeansClustering.java Analisis sentimen data twitter menggunakan K-Means Clustering.

142 rightAnswers += d; } } System.out.println; } float accuration; int rows = 0; for int i = 0; i row.length; i++ { rows += row[i]; } System.out.println; accuration = float rightAnswers rows 100; System.out.printlnAccuration = + accuration + ; } }

13. PreprocessingWeighting.java

package sentimentanalysis; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import sentimentanalysis.WordFreq.Word; public class PreprocessingWeighting { public static void mainString[] args throws IOException, Exception { TODO code application logic here int n = 200; 143 System.out.printTokenizing : ; Tokenizing love = new Tokenizingdata ver.4cinta.txt; Tokenizing anger = new Tokenizingdata ver.4marah.txt; Tokenizing sad = new Tokenizingdata ver.4sedih.txt; Tokenizing happy = new Tokenizingdata ver.4senang.txt; Tokenizing fear = new Tokenizingdata ver.4takut.txt; LinkedHashMapInteger, ArrayListString loveMap = love.getAllToken1; LinkedHashMapInteger, ArrayListString angerMap = anger.getAllToken1 + n; LinkedHashMapInteger, ArrayListString sadMap = sad.getAllToken1 + 2 n; LinkedHashMapInteger, ArrayListString happyMap = happy.getAllToken1 + 3 n; LinkedHashMapInteger, ArrayListString fearMap = fear.getAllToken1 + 4 n; LinkedHashMapInteger, ArrayListString tweetsMap = new LinkedHashMap; tweetsMap.putAllloveMap; tweetsMap.putAllangerMap; tweetsMap.putAllsadMap; tweetsMap.putAllhappyMap; tweetsMap.putAllfearMap; LinkedHashMapInteger, ArrayListString oriTweetsMap = new LinkedHashMaptweetsMap; System.out.printlndone; for int key : tweetsMap.keySet { System.out.println\t + key + + tweetsMap.getkey; } System.out.println\t + Size = + tweetsMap.size; System.out.println; System.out.printRemoving Stopwords : ; RemoveStopword removeStopword = new RemoveStopwordstopwords.txt; removeStopword.setWordsHashMaptweetsMap; 144 tweetsMap = removeStopword.getWordsHashMap; System.out.printlndone; for int key : tweetsMap.keySet { System.out.println\t + key + + tweetsMap.getkey; } System.out.println\t + Size = + tweetsMap.size; System.out.println; System.out.printStemming : ; Stemming stemming = new Stemmingdictionary.txt, synonym.txt; tweetsMap = stemming.stemtweetsMap; System.out.printlndone; for int key : tweetsMap.keySet { System.out.println\t + key + + tweetsMap.getkey; } System.out.println\t + Size = + tweetsMap.size; System.out.println; System.out.printNegation Handling : ; NegationHandling negation = new NegationHandling; tweetsMap = negation.negationHandlingtweetsMap; System.out.printlndone; for int key : tweetsMap.keySet { System.out.println\t + key + + tweetsMap.getkey; } System.out.println\t + Size = + tweetsMap.size; System.out.println; System.out.printCounting Terms Freq : ; ArrayListString words = new ArrayList; for int key : tweetsMap.keySet { words.addAlltweetsMap.getkey; 145 } Word[] wordFreq = new WordFreq.getFreqWordswords; LinkedHashMapString, Integer wordsCount = new LinkedHashMap; for Word w : wordFreq { wordsCount.putw.getWord, w.getCount; } System.out.printlndone; for String key : wordsCount.keySet { System.out.println\t + key + + wordsCount.getkey; } System.out.println; ArrayListString terms = new ArrayList; terms.addAllwordsCount.keySet; System.out.printForming Document Vector : ; TfIdf tfIdf = new TfIdf; LinkedHashMapInteger, double[] features = tfIdf.tfIdfCalculatortweetsMap, terms; System.out.printlndone; for int key : features.keySet { System.out.println\t + key + + Arrays.toStringfeatures.getkey; } System.out.println; CSV csv = new CSV; csv.writeFreqToCSVwordsCount, freq.csv; csv.writeFeaturesToCSVfeatures, features.csv; csv.writeTweetsToCSVoriTweetsMap, tweets.csv; } }

14. Clustering.java

146 package sentimentanalysis; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.TreeSet; public class Clustering { public static void mainString[] args throws IOException { int k = 5; int minFreq = 3; boolean zscore = false; boolean minmax = true; int n = 5; CSV csv = new CSV; LinkedHashMapInteger, double[] features = csv.readFeaturesFromCSVfeatures.csv; LinkedHashMapInteger, ArrayListString tweets = csv.readTweetsFromCSVtweets.csv; LinkedHashMapString, Integer freq = csv.readFreqFromCSVfreq.csv; System.out.printlnAll Terms = + features.get1.length; double m = features.get1[0]; for int key : features.keySet { for double d : features.getkey { if d m { m = d; }