1   package eu.fbk.dkm.pikes.raid.mdfsa;
2   
3   import edu.stanford.nlp.semgraph.SemanticGraph;
4   import edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations.BasicDependenciesAnnotation;
5   import edu.stanford.nlp.util.CoreMap;
6   import eu.fbk.shell.mdfsa.data.structures.DomainGraph;
7   import eu.fbk.shell.mdfsa.data.structures.FuzzyMembership;
8   import eu.fbk.dkm.pikes.raid.mdfsa.wordnet.WordNetLexicalizer;
9   import eu.fbk.dkm.pikes.raid.mdfsa.wordnet.WordNetLoader;
10  import org.tartarus.snowball.ext.porterStemmer;
11  
12  import java.io.IOException;
13  import java.io.InputStream;
14  import java.util.ArrayList;
15  import java.util.HashMap;
16  import java.util.Iterator;
17  import java.util.Properties;
18  
19  public class APIManager {
20  
21    private Properties prp;
22    private DomainGraph domain;
23    private WordNetLoader wnl;
24    private WordNetLexicalizer wnlex;
25    private HashMap<String, Long> labels;
26    private HashMap<Long, FuzzyMembership> polarities;
27  
28    public APIManager() throws IOException {
29      this.prp = new Properties();
30      InputStream iS = ClassLoader.getSystemClassLoader().getSystemResourceAsStream("mdfsa.properties");
31      prp.load(iS);
32    }
33  
34  
35    public APIManager(Properties prp) throws IOException {
36      this.prp = prp;
37    }
38  
39  
40    public void loadModel(String modelPath) {
41      this.wnl = new WordNetLoader(this.prp);
42      this.wnl.load();
43      this.domain = new DomainGraph(this.prp);
44      this.domain.setDomainGraphFromSerializedData(modelPath);
45      this.wnlex = new WordNetLexicalizer(this.wnl.getAllTerms(), this.wnl.getAllExceptions());
46      this.labels = this.domain.getGraph().getLabels();
47      this.polarities = this.domain.getPolarities();
48    }
49  
50  
51    public double evaluateSentence(String text) {
52      double textPolarity = -2.0;
53      double fuzzyShapeFound = 0.0;
54      double tempPolarity = 0.0;
55  
56      //WordNetLexicalizer wnlex = new WordNetLexicalizer(this.wnl.getAllTerms(), this.wnl.getAllExceptions());
57      //HashMap<String, Long> labels = this.domain.getGraph().getLabels();
58      //HashMap<Long, FuzzyMembership> polarities = this.domain.getPolarities();
59  //    System.out.print(text);
60  
61      HashMap<String, Integer> sentenceTokens = new HashMap<String, Integer>();
62      HashMap<String, Integer> stemmedTokens = new HashMap<String, Integer>();
63  
64      FuzzyMembership eT = new FuzzyMembership(1.0, 1.0, -1.0, -1.0);
65      String[] tokens = text.split(" ");
66      for(int i = 0; i < tokens.length; i++) {
67        if(tokens[i].compareTo("") != 0) {
68          String lexToken = this.wnlex.getWordLexicalizationByType(tokens[i].toLowerCase(), "MIX");
69          //System.out.print(" " + lexToken + " ");
70          if(lexToken != null) {
71            tokens[i] = lexToken;
72          }
73          sentenceTokens.put(tokens[i], new Integer(i));
74  
75          porterStemmer stemmer = new porterStemmer();
76          stemmer.setCurrent(tokens[i].toLowerCase());
77          String stemLink = tokens[i].toLowerCase();
78          if(stemmer.stem()) {
79            stemLink = stemmer.getCurrent();
80            //System.out.print(stemLink + " ");
81          }
82          stemmedTokens.put(stemLink, new Integer(i));
83        }
84      }
85      Iterator<String> it = this.labels.keySet().iterator();
86      while(it.hasNext()) {
87        String currentConcept = it.next();
88        String[] cts = currentConcept.split("_");
89        int higherIdx = 0;
90        int lowerIdx = tokens.length;
91        int foundCT = 0;
92        int flagNegation = 1;
93        for(String ct: cts) {
94          
95          /*
96          if(ct.compareTo("regret") == 0) {
97            int A = 1;
98          }
99          */
100 
101         Integer tempIdx = sentenceTokens.get(ct);
102         if(tempIdx == null) {
103           tempIdx = stemmedTokens.get(ct);
104         }
105         if(tempIdx != null) {
106           if(tempIdx < lowerIdx) lowerIdx = tempIdx;
107           if(tempIdx > higherIdx) higherIdx = tempIdx;
108           foundCT++;
109         }
110       }
111       Integer notToken = sentenceTokens.get("not");
112       if(notToken == null) {
113         //notToken = sentenceTokens.get("no");
114       }
115 
116       if(notToken != null &&
117         (
118          (notToken >= (lowerIdx - 2)) ||
119          (
120           (notToken > lowerIdx) && (notToken < higherIdx)
121          )
122         )
123         ) {
124         flagNegation = -1;
125       }
126       /*
127       if(notToken != null && notToken == lowerIdx - 1) {
128         flagNegation = -1;
129       }
130       */
131       if(higherIdx >= 0 && foundCT == cts.length && (higherIdx - lowerIdx) < (cts.length + 2)) {
132 
133         Long feature = this.labels.get(currentConcept);
134         //Long feature = labels.get(stemLink);
135         
136         /*
137         double ratioFactor = 1.0 / (sentenceTokens.size() - lowerIdx);
138         if(ratioFactor == Double.NaN || ratioFactor == 0.0) {
139           ratioFactor = 1.0;
140         }
141         */
142         double ratioFactor = 1.0;
143 
144         FuzzyMembership fm = this.polarities.get(feature);
145         double a = 0.0;
146         try {
147           a = fm.getA() * ratioFactor * flagNegation;
148         } catch (Exception e) {
149           System.out.println("Error on getting fuzzy shape: " + currentConcept);
150           //System.exit(0);
151           return -2.0;
152         }
153 
154         double b = fm.getB() * ratioFactor * flagNegation;
155         double c = fm.getC() * ratioFactor * flagNegation;
156         double d = fm.getD() * ratioFactor * flagNegation;
157 
158         if(flagNegation == -1) {
159           double t = a;
160           a = d;
161           d = t;
162 
163           t = b;
164           b = c;
165           c = t;
166         }
167 
168         double eA = eT.getA();
169         double eB = eT.getB();
170         double eC = eT.getC();
171         double eD = eT.getD();
172 
173         if(a < eA) eA = a;
174         if(b < eB) eB = b;
175         if(c > eC) eC = c;
176         if(d > eD) eD = d;
177 
178         eT.setA(eA);
179         eT.setB(eB);
180         eT.setC(eC);
181         eT.setD(eD);
182 
183         fuzzyShapeFound += 1.0;
184         tempPolarity += fm.getCentroidXAxis();
185       }
186     }
187 
188     if(eT.getA() != 1.0) {
189       //textPolarity = eT.getCentroid();
190       textPolarity = eT.getCentroidXAxis();
191       if(Double.isNaN(textPolarity)) {
192         return -2.0;
193       }
194       //textPolarity = tempPolarity / fuzzyShapeFound;
195     }
196 
197     return textPolarity;
198   }
199 
200 
201 
202 
203 
204 
205   public double evaluateSentence(CoreMap sentence, int startNodeId, ArrayList<Integer> blockedNodes) {
206     SemanticGraph dependencies = sentence.get(BasicDependenciesAnnotation.class);
207     String[] dependenciesList = dependencies.toString("list").split("\n");
208     return evaluateSentence(dependenciesList, startNodeId, blockedNodes);
209   }
210 
211   public double evaluateSentence(String[] dependenciesList, int startNodeId, ArrayList<Integer> blockedNodes) {
212     HashMap<Integer, String> tokensToPolarize = new HashMap<Integer, String>();
213     double textPolarity = -2.0;
214     double fuzzyShapeFound = 0.0;
215     double tempPolarity = 0.0;
216 
217     int tIdx = 0;
218     HashMap<Integer, Integer> tokensToBlock = new HashMap<Integer, Integer>();
219     ArrayList<Integer> tokensToAnalyze = new ArrayList<Integer>();
220 
221     for(Integer bn: blockedNodes) {
222       tokensToBlock.put(bn, bn);
223     }
224 
225     tokensToAnalyze.add(startNodeId);
226     while(tokensToAnalyze.size() > 0) {
227       int currentNode = tokensToAnalyze.get(0);
228       for(int i = 0; i < dependenciesList.length; i++) {
229         String[] rel = dependenciesList[i].substring(dependenciesList[i].indexOf("(") + 1, dependenciesList[i].indexOf(")")).split(", ");
230         String[] gov = rel[0].split("-");
231         String[] dep = rel[1].split("-");
232         Integer blockFlag = tokensToBlock.get(Integer.valueOf(gov[gov.length - 1]));
233         if(Integer.valueOf(gov[gov.length - 1]) == currentNode && blockFlag == null) {
234 
235           String tokenToAdd;
236 
237           /**
238            * If the map is still empty, it means that the starting node has not been saved. So, this save it.
239            */
240           if(tokensToPolarize.size() == 0) {
241             if(gov.length > 2) {
242               tokenToAdd = new String(gov[0]);
243               for(int j = 1; j < gov.length - 2; j++) {
244                 tokenToAdd = tokenToAdd.concat(" " + gov[j]);
245               }
246               tokenToAdd = tokenToAdd.trim();
247             } else {
248               tokenToAdd = new String(gov[0]);
249             }
250             tokensToPolarize.put(Integer.valueOf(gov[gov.length - 1]), tokenToAdd);
251             if(Integer.valueOf(gov[gov.length - 1]) > tIdx) {
252               tIdx = Integer.valueOf(gov[gov.length - 1]);
253             }
254           }
255 
256 
257           /**
258            * Saves the dependent
259            */
260           if(dep.length > 2) {
261             tokenToAdd = new String(dep[0]);
262             for(int j = 1; j < dep.length - 2; j++) {
263               tokenToAdd = tokenToAdd.concat(" " + dep[j]);
264             }
265             tokenToAdd = tokenToAdd.trim();
266           } else {
267             tokenToAdd = new String(dep[0]);
268           }
269           tokensToPolarize.put(Integer.valueOf(dep[dep.length - 1]), tokenToAdd);
270           tokensToAnalyze.add(Integer.valueOf(dep[dep.length - 1]));
271           if(Integer.valueOf(dep[dep.length - 1]) > tIdx) {
272             tIdx = Integer.valueOf(dep[dep.length - 1]);
273           }
274         }
275       }
276       tokensToAnalyze.remove(0);
277     }
278 
279     String text = new String();
280     for(int i = 0; i <= tIdx; i++) {
281       String token = tokensToPolarize.get(new Integer(i));
282       if(token != null) {
283         text = text.concat(token + " ");
284       }
285     }
286     text = text.trim();
287 
288 
289     //WordNetLexicalizer wnlex = new WordNetLexicalizer(this.wnl.getAllTerms(), this.wnl.getAllExceptions());
290     //HashMap<String, Long> labels = this.domain.getGraph().getLabels();
291     //HashMap<Long, FuzzyMembership> polarities = this.domain.getPolarities();
292 //    System.out.println(text);
293 
294     HashMap<String, Integer> sentenceTokens = new HashMap<String, Integer>();
295     HashMap<String, Integer> stemmedTokens = new HashMap<String, Integer>();
296 
297     FuzzyMembership eT = new FuzzyMembership(1.0, 1.0, -1.0, -1.0);
298     String[] tokens = text.split(" ");
299     for(int i = 0; i < tokens.length; i++) {
300       if(tokens[i].compareTo("") != 0) {
301         String lexToken = this.wnlex.getWordLexicalizationByType(tokens[i].toLowerCase(), "MIX");
302         //System.out.print(" " + lexToken + " ");
303         if(lexToken != null) {
304           tokens[i] = lexToken;
305         }
306         sentenceTokens.put(tokens[i], new Integer(i));
307 
308         porterStemmer stemmer = new porterStemmer();
309         stemmer.setCurrent(tokens[i].toLowerCase());
310         String stemLink = tokens[i].toLowerCase();
311         if(stemmer.stem()) {
312           stemLink = stemmer.getCurrent();
313           //System.out.print(stemLink + " ");
314         }
315         stemmedTokens.put(stemLink, new Integer(i));
316       }
317     }
318     Iterator<String> it = this.labels.keySet().iterator();
319     while(it.hasNext()) {
320       String currentConcept = it.next();
321       String[] cts = currentConcept.split("_");
322       int higherIdx = 0;
323       int lowerIdx = tokens.length;
324       int foundCT = 0;
325       int flagNegation = 1;
326       for(String ct: cts) {
327         
328         /*
329         if(ct.compareTo("regret") == 0) {
330           int A = 1;
331         }
332         */
333 
334         Integer tempIdx = sentenceTokens.get(ct);
335         if(tempIdx == null) {
336           tempIdx = stemmedTokens.get(ct);
337         }
338         if(tempIdx != null) {
339           if(tempIdx < lowerIdx) lowerIdx = tempIdx;
340           if(tempIdx > higherIdx) higherIdx = tempIdx;
341           foundCT++;
342         }
343       }
344       Integer notToken = sentenceTokens.get("not");
345       if(notToken == null) {
346         //notToken = sentenceTokens.get("no");
347       }
348 
349       if(notToken != null &&
350         (
351          (notToken >= (lowerIdx - 2)) ||
352          (
353           (notToken > lowerIdx) && (notToken < higherIdx)
354          )
355         )
356         ) {
357         flagNegation = -1;
358       }
359       /*
360       if(notToken != null && notToken == lowerIdx - 1) {
361         flagNegation = -1;
362       }
363       */
364       if(higherIdx >= 0 && foundCT == cts.length && (higherIdx - lowerIdx) < (cts.length + 2)) {
365 
366         Long feature = this.labels.get(currentConcept);
367         //Long feature = labels.get(stemLink);
368         
369         /*
370         double ratioFactor = 1.0 / (sentenceTokens.size() - lowerIdx);
371         if(ratioFactor == Double.NaN || ratioFactor == 0.0) {
372           ratioFactor = 1.0;
373         }
374         */
375         double ratioFactor = 1.0;
376 
377         FuzzyMembership fm = this.polarities.get(feature);
378         double a = 0.0;
379         try {
380           a = fm.getA() * ratioFactor * flagNegation;
381         } catch (Exception e) {
382           System.out.println("Error on getting fuzzy shape: " + currentConcept);
383           //System.exit(0);
384           return -2.0;
385         }
386 
387         double b = fm.getB() * ratioFactor * flagNegation;
388         double c = fm.getC() * ratioFactor * flagNegation;
389         double d = fm.getD() * ratioFactor * flagNegation;
390 
391         if(flagNegation == -1) {
392           double t = a;
393           a = d;
394           d = t;
395 
396           t = b;
397           b = c;
398           c = t;
399         }
400 
401         double eA = eT.getA();
402         double eB = eT.getB();
403         double eC = eT.getC();
404         double eD = eT.getD();
405 
406         if(a < eA) eA = a;
407         if(b < eB) eB = b;
408         if(c > eC) eC = c;
409         if(d > eD) eD = d;
410 
411         eT.setA(eA);
412         eT.setB(eB);
413         eT.setC(eC);
414         eT.setD(eD);
415 
416         fuzzyShapeFound += 1.0;
417         tempPolarity += fm.getCentroidXAxis();
418       }
419     }
420 
421     if(eT.getA() != 1.0) {
422       //textPolarity = eT.getCentroid();
423       textPolarity = eT.getCentroidXAxis();
424       if(Double.isNaN(textPolarity)) {
425         return -2.0;
426       }
427       //textPolarity = tempPolarity / fuzzyShapeFound;
428     }
429 
430     return textPolarity;
431   }
432 
433 
434 
435 
436 }