diff --git a/WikiRating/src/main/java/computations/BadgeGenerator.java b/WikiRating/src/main/java/computations/BadgeGenerator.java index 081b081..2afe2e9 100644 --- a/WikiRating/src/main/java/computations/BadgeGenerator.java +++ b/WikiRating/src/main/java/computations/BadgeGenerator.java @@ -1,188 +1,191 @@ package main.java.computations; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.List; + import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import main.java.utilities.Connections; import main.java.utilities.Loggings; import main.java.utilities.PropertiesAccess; /** * This class will calculate the Badges that will be assigned * to the Pages based on the analysis of Page Rating distribution */ public class BadgeGenerator { static Class className=BadgeGenerator.class; //These variables will store the computed cutoffs for the various badges static double platinumBadgeRatingCutoff; static double goldBadgeRatingCutoff; static double silverBadgeRatingCutoff; static double bronzeBadgeRatingCutoff; static double stoneBadgeRatingCutoff; /** * This ENUM has the percentile ranges for various badges */ public enum Badges { PLATINUM(1), GOLD(2), SILVER(3), BRONZE(4), STONE(5), PLATINUM_BADGE_START_PERCENTILE(80), GOLD_BADGE_START_PERCENTILE(60), SILVER_BADGE_START_PERCENTILE(40), BRONZE_BADGE_START_PERCENTILE(20), STONE_BADGE_START_PERCENTILE(0); private int value; Badges(int value) { this.value = value; } public int getValue() { return value; } } /** * This class will store our PageObjects to insert into ArrayList for * percentile calculations */ public class PageRatingData{ int pid; double pageRating; String pageName; public PageRatingData(int pid,double pageRating,String pageName) { this.pid=pid; this.pageRating=pageRating; this.pageName=pageName; } } /** * This is the custom comparator to sort the pageList in the ascending * order of PageRatings */ class PageRatingComparator implements Comparator{ @Override public int compare(PageRatingData pageRating1, PageRatingData pageRating2) { if(pageRating1.pageRating>pageRating2.pageRating) return 1; else return -1; } } /** * This method will assign badges based on the percentile */ public void generateBadges(){ - ArrayList pageList=new ArrayList(); + ArrayList pageList=new ArrayList<>(); OrientGraph graph = Connections.getInstance().getDbGraph(); - Vertex currentPageNode=null; - int badgeNumber=4; - int currentPageID=0,noOfPages=0; - double currentPageRating=0,maxPageRating=0; - String currentPageName=""; + Vertex currentPageNode; + int badgeNumber; + int currentPageID,noOfPages; + double currentPageRating,maxPageRating; + String currentPageName; for(Vertex pageNode:graph.getVertices("@class","Page")){ currentPageID=pageNode.getProperty("pid"); currentPageRating=pageNode.getProperty("PageRating"); currentPageName=pageNode.getProperty("title"); pageList.add(new PageRatingData(currentPageID, currentPageRating,currentPageName)); } Collections.sort(pageList,new PageRatingComparator()); calculateBadgeCutoff(pageList); + maxPageRating=0; noOfPages=pageList.size(); int noOfPagesCounter=0; for(PageRatingData currentPage:pageList){ badgeNumber=getBadgeNumber(currentPage.pageRating); Loggings.getLogs(className).info(currentPage.pageName + " ------with ratings= "+currentPage.pageRating+" earned "+badgeNumber); currentPageNode=graph.getVertices("pid",currentPage.pid).iterator().next(); currentPageNode.setProperty("badgeNumber",badgeNumber); graph.commit(); noOfPagesCounter++; if(noOfPagesCounter==noOfPages) maxPageRating=currentPage.pageRating; } //Adding the max value to the Preferences for later access PropertiesAccess.putParameter("maxRating", maxPageRating); graph.shutdown(); } /** * This method will calculate the cutoff for the various badges * @param pageList The ArrayList containing Page Objects */ - public static void calculateBadgeCutoff(ArrayList pageList){ + public static void calculateBadgeCutoff(List pageList){ int noOfPages=pageList.size(); int platinumPageIndex; int goldPageIndex; int silverPageIndex; int bronzePageIndex; int stonePageIndex; //Storing index where the cutoff of badges start to get the respective cutoffs platinumPageIndex=(int)(noOfPages*(Badges.PLATINUM_BADGE_START_PERCENTILE.value/100.00)); goldPageIndex=(int)(noOfPages*(Badges.GOLD_BADGE_START_PERCENTILE.value/100.00)); silverPageIndex=(int)(noOfPages*(Badges.SILVER_BADGE_START_PERCENTILE.value/100.00)); bronzePageIndex=(int)(noOfPages*(Badges.BRONZE_BADGE_START_PERCENTILE.value/100.00)); stonePageIndex=(int)(noOfPages*(Badges.STONE_BADGE_START_PERCENTILE.value/100.00)); //Storing cutoffs platinumBadgeRatingCutoff=pageList.get(platinumPageIndex).pageRating; goldBadgeRatingCutoff=pageList.get(goldPageIndex).pageRating; silverBadgeRatingCutoff=pageList.get(silverPageIndex).pageRating; bronzeBadgeRatingCutoff=pageList.get(bronzePageIndex).pageRating; stoneBadgeRatingCutoff=pageList.get(stonePageIndex).pageRating; Loggings.getLogs(className).info("Index "+platinumPageIndex+"marks platinum cutoff -------"+platinumBadgeRatingCutoff); Loggings.getLogs(className).info("Index "+goldPageIndex+"marks gold cutoff------"+goldBadgeRatingCutoff); Loggings.getLogs(className).info("Index "+silverPageIndex+"marks silver cutoff------"+silverBadgeRatingCutoff); Loggings.getLogs(className).info("Index "+bronzePageIndex+"marks bronze cutoff------"+bronzeBadgeRatingCutoff); Loggings.getLogs(className).info("Index "+stonePageIndex+"marks stone cutoff------"+stoneBadgeRatingCutoff); } /** * This method will pick the badge according to the passed pageRating * @param pageRating PageRating of the page under consideration * @return The name of the Badge earned */ public static int getBadgeNumber(double pageRating){ if(pageRating>=platinumBadgeRatingCutoff) return Badges.PLATINUM.value; else if(pageRating>=goldBadgeRatingCutoff) return Badges.GOLD.value; else if(pageRating>=silverBadgeRatingCutoff) return Badges.SILVER.value; else if(pageRating>=bronzeBadgeRatingCutoff) return Badges.BRONZE.value; else return Badges.STONE.value; } } diff --git a/WikiRating/src/main/java/computations/NormalisedVotes.java b/WikiRating/src/main/java/computations/NormalisedVotes.java index 7cb59d6..e7fa427 100644 --- a/WikiRating/src/main/java/computations/NormalisedVotes.java +++ b/WikiRating/src/main/java/computations/NormalisedVotes.java @@ -1,168 +1,168 @@ package main.java.computations; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import main.java.utilities.Connections; import main.java.utilities.Loggings; import main.java.utilities.PropertiesAccess; /** * This class will calculate the Normalised Votes * of all the revisions and hence the page by using the given * recursive formula that takes keeps scaling the votes on the * previous versions with the new ones * */ public class NormalisedVotes { static Class className=NormalisedVotes.class; //To check for cases where latest version is voted on without any change static boolean latestVoteCheck=true; final static double PHI_POWER_PARAMETER=Double.parseDouble(PropertiesAccess.getParameterProperties("PHI_POWER_PARAMETER")); /** *This method will calculate the Normalised Votes of all the pages in on the platform *along with their respective revisions. * @return void */ public static void calculatePageVotes(){ OrientGraph graph = Connections.getInstance().getDbGraph(); double currentPageVote=0; Vertex revisionNode=null; for (Vertex pageNode : graph.getVertices("@class","Page")) { latestVoteCheck=true; try{ revisionNode = pageNode.getEdges(Direction.OUT, "@class", "PreviousVersionOfPage").iterator().next().getVertex(Direction.IN); currentPageVote=recursiveVotes(graph,(int)revisionNode.getProperty("revid")); pageNode.setProperty("currentPageVote",currentPageVote); graph.commit(); }catch(Exception e){Loggings.getLogs(className).error(e);} } getTotalVotes(graph); graph.shutdown(); } /** * This method will calculate and store the Normalised votes for all the revisions of a particular page * and then return the final Normalised vote for the page itself * @param graph OrientGraph object * @param revid Revision Id of the latest version connected to the Page * @return final vote of the latest version is computed and returned */ public static double recursiveVotes(OrientGraph graph,int revid){ double lastVote=0,phi=0,normalVote=0,currVote=0; Vertex revisionNode=graph.getVertices("revid", revid).iterator().next(); - //Since we can't directly check for equality with floating numebers safetly therefore working with inequalities + //Since we can't directly check for equality with floating numbers safetly therefore working with inequalities if(latestVoteCheck==false&&(double)revisionNode.getProperty("previousVote")>-1){ Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+revisionNode.getProperty("previousVote")); return (double)revisionNode.getProperty("previousVote"); } latestVoteCheck=false; if((int)revisionNode.getProperty("parentid")==0){ lastVote=simpleVote(graph,revid); revisionNode.setProperty("previousVote",lastVote); graph.commit(); Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+lastVote); return lastVote; } else{ phi=getPhi(graph,revid); currVote=simpleVote(graph,revid); normalVote=((simpleVote(graph,revid)+phi*recursiveVotes(graph,(int)revisionNode.getProperty("parentid")))/(phi+1)); revisionNode.setProperty("previousVote",normalVote); graph.commit(); Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+normalVote); return normalVote; } } /**This method will calculate the weighted average of votes of the current Revision Node * * @param graph OrientGraph object * @param revid Revision Id for the revision node under the calculation * @return The calculated Simple weighted average. */ public static double simpleVote(OrientGraph graph,int revid){ double denominator=0,numerator=0,simpleVote=0; Vertex userNode=null; Vertex revisionNode=graph.getVertices("revid",revid).iterator().next(); for(Edge reviewEdge:revisionNode.getEdges(Direction.IN,"@class","Review")){ //userNode=reviewEdge.getVertex(Direction.OUT); numerator+=(double)reviewEdge.getProperty("voteCredibility")*(double)reviewEdge.getProperty("vote"); denominator+=(double)reviewEdge.getProperty("vote"); } //denominator=1; if(denominator>0) simpleVote=numerator/denominator; return simpleVote; } /** * This will calculate the parameter phi to scale the votes of the previous versions * @param graph OrientGraph object * @param revid Revision Id for the revision node under the calculation * @return The parameter phi */ public static double getPhi(OrientGraph graph,int revid){ double phi=0; double sizePrev=0,newEdits=0,currSize=0; Vertex revisionNode=graph.getVertices("revid",revid).iterator().next(); Vertex parentNode =graph.getVertices("revid",(int)revisionNode.getProperty("parentid")).iterator().next(); sizePrev=(int)parentNode.getProperty("size"); currSize=(int)revisionNode.getProperty("size"); newEdits=Math.abs(sizePrev-currSize); //sizePrev=1; if(sizePrev>0) - phi=Math.pow(Math.E,-1*(Math.pow(newEdits/sizePrev, PHI_POWER_PARAMETER))); + phi=Math.pow(Math.E,-1*(Math.pow(newEdits/sizePrev, PHI_POWER_PARAMETER))); return phi; } /** * This method will compute the no of Votes given to a particular page * for all the pages * @param graph OrientGraph */ public static void getTotalVotes(OrientGraph graph){ long totalVotes=0; OrientVertex revisionNode=null; for (Vertex pageNode : graph.getVertices("@class","Page")) { totalVotes=0; revisionNode=(OrientVertex)pageNode.getEdges(Direction.OUT, "@class", "PreviousVersionOfPage").iterator().next().getVertex(Direction.IN); while((int)revisionNode.getProperty("parentid")!=0){ totalVotes+=revisionNode.countEdges(Direction.IN, "@class","Review"); revisionNode=(OrientVertex) graph.getVertices("revid", (int)revisionNode.getProperty("parentid")).iterator().next(); Loggings.getLogs(className).info(revisionNode.getProperty("revid")); } totalVotes+=revisionNode.countEdges(Direction.IN, "@class","Review"); Loggings.getLogs(className).info(pageNode.getProperty("title")+" "+totalVotes); //Adding the totalVotes into the DB for faster retrieval pageNode.setProperty("totalVotes", totalVotes); graph.commit(); } } } diff --git a/WikiRating/src/main/java/computations/Reliability.java b/WikiRating/src/main/java/computations/Reliability.java index 469b8db..c3de6e2 100644 --- a/WikiRating/src/main/java/computations/Reliability.java +++ b/WikiRating/src/main/java/computations/Reliability.java @@ -1,139 +1,140 @@ package main.java.computations; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import main.java.utilities.Connections; import main.java.utilities.PropertiesAccess; import main.java.utilities.Loggings; /** * This class will calculate the reliability of the vote given by the users. * The structure and the methods of this class are very similar to class NormalisedVotes.java */ public class Reliability { static Class className=Reliability.class; //To check for cases where latest version is voted on without any change static boolean latestVoteCheck=true; final static double PHI_POWER_PARAMETER=Double.parseDouble(PropertiesAccess.getParameterProperties("PHI_POWER_PARAMETER")); /** *This method will calculate the reliability of the votes given by the user *to the versions. * @return void */ public static void calculateReliability(){ OrientGraph graph = Connections.getInstance().getDbGraph(); double currentPageReliability=0; Vertex revisionNode=null; double maxPageReliability=-1; for (Vertex pageNode : graph.getVertices("@class","Page")) { try{ revisionNode = pageNode.getEdges(Direction.OUT, "@class", "PreviousVersionOfPage").iterator().next().getVertex(Direction.IN); currentPageReliability=recursiveReliability(graph,(int)revisionNode.getProperty("revid")); if(maxPageReliability<=currentPageReliability){ maxPageReliability=currentPageReliability; } pageNode.setProperty("currentPageReliability",currentPageReliability); graph.commit(); }catch(Exception e){ Loggings.getLogs(className).error(e); } } //graph.commit(); PropertiesAccess.putParameter("maxPageReliability", maxPageReliability); graph.shutdown(); } /** * This method will calculate and store the reliability * of votes for all the revisions of a particular page * and then return the final reliability of vote for the page itself * @param graph OrientGraph object * @param revid Revision Id of the latest version connected to the Page * @return final reliability of the latest version is computed and returned */ public static double recursiveReliability(OrientGraph graph,int revid){ double lastReliability=0,phi=0,normalReliability=0,currReliability=0; Vertex revisionNode=graph.getVertices("revid", revid).iterator().next(); - - if(latestVoteCheck==false&&(double)revisionNode.getProperty("previousReliability")!=-1){ + //Since we can't directly check for equality with floating numbers safetly therefore working with inequalities + if(latestVoteCheck==false&&(double)revisionNode.getProperty("previousReliability")>-1){ Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+revisionNode.getProperty("previousReliability")); return (double)revisionNode.getProperty("previousReliability"); } latestVoteCheck=false; if((int)revisionNode.getProperty("parentid")==0){ lastReliability=simpleReliability(graph,revid); revisionNode.setProperty("previousReliability",lastReliability); graph.commit(); Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+lastReliability); return lastReliability; } else{ phi=getPhi(graph,revid); currReliability=simpleReliability(graph,revid); normalReliability=((simpleReliability(graph,revid)+phi*recursiveReliability(graph,(int)graph.getVertices("revid", revid).iterator().next().getProperty("parentid")))/(phi+1)); revisionNode.setProperty("previousReliability",normalReliability); graph.commit(); Loggings.getLogs(className).info(revisionNode.getProperty("revid")+" of "+revisionNode.getProperty("Page")+" has--- "+normalReliability); return normalReliability; } } /** * This method will calculate the average of reliabilities of the current Revision Node * * @param graph OrientGraph object * @param revid Revision Id for the revision node under the calculation * @return The calculated Simple weighted average. */ public static double simpleReliability(OrientGraph graph,int revid){ double numerator=0,simpleVote=0,globalVote=0,userVote=0; Vertex revisionNode=graph.getVertices("revid",revid).iterator().next(); for(Edge reviewEdge:revisionNode.getEdges(Direction.IN,"@class","Review")){ userVote=reviewEdge.getProperty("vote"); globalVote=revisionNode.getProperty("previousVote"); numerator+=(double)reviewEdge.getProperty("voteCredibility")*(1-Math.abs(userVote-globalVote)); } simpleVote=numerator; return simpleVote; } /** * This will calculate the parameter phi to scale the reliabilities of the previous versions * @param graph * @param revid * @return The parameter phi */ public static double getPhi(OrientGraph graph,int revid){ double phi=0; double sizePrev=0,newEdits=0,currSize=0; Vertex revisionNode=graph.getVertices("revid",revid).iterator().next(); Vertex parentNode =graph.getVertices("revid",(int)revisionNode.getProperty("parentid")).iterator().next(); sizePrev=(int)parentNode.getProperty("size"); currSize=(int)revisionNode.getProperty("size"); newEdits=Math.abs(sizePrev-currSize); - if(sizePrev==0)sizePrev=1; - phi=Math.pow(Math.E,-1*(Math.pow(newEdits/sizePrev, PHI_POWER_PARAMETER))); + //sizePrev=1; + if(sizePrev>0) + phi=Math.pow(Math.E,-1*(Math.pow(newEdits/sizePrev, PHI_POWER_PARAMETER))); return phi; } } diff --git a/WikiRating/src/main/java/computations/UserCredibility.java b/WikiRating/src/main/java/computations/UserCredibility.java index 15caa3e..1bfcb08 100644 --- a/WikiRating/src/main/java/computations/UserCredibility.java +++ b/WikiRating/src/main/java/computations/UserCredibility.java @@ -1,116 +1,116 @@ package main.java.computations; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import main.java.utilities.Connections; import main.java.utilities.PropertiesAccess; import main.java.utilities.Loggings; /** * This class will deal with the calculations of User Credibility */ public class UserCredibility { static Class className=CreditSystem.class; final static double USER_CONTRI_IMPORTANCE_PARAMETER=Double.parseDouble(PropertiesAccess.getParameterProperties("USER_CONTRI_IMPORTANCE_PARAMETER")); final static double USER_VOTE_IMPORTANCE_PARAMETER=Double.parseDouble(PropertiesAccess.getParameterProperties("USER_VOTE_IMPORTANCE_PARAMETER")); /** *This method will compute the credibility for all the Users */ public static void getUserCredibility(){ OrientGraph graph = Connections.getInstance().getDbGraph(); double alpha=0,relativeUserContribution=0,voteDeviation=0,credibility=0; HashMap pageEditMap=Contribution.getPageEdits(); //To iterate over all the Users for getting their respective Credibility try{ for(Vertex userNode:graph.getVertices("@class", "User")){ relativeUserContribution=getRelativeUserContribution(userNode,graph,pageEditMap); voteDeviation=getVoteDeviation(userNode,graph); alpha=(USER_CONTRI_IMPORTANCE_PARAMETER*relativeUserContribution+USER_VOTE_IMPORTANCE_PARAMETER*voteDeviation)/(USER_CONTRI_IMPORTANCE_PARAMETER+USER_VOTE_IMPORTANCE_PARAMETER); credibility=alpha; userNode.setProperty("credibility",credibility); Loggings.getLogs(className).info(userNode.getProperty("username")+" has "+credibility); graph.commit(); } }catch(Exception e){Loggings.getLogs(className).error(e);} //graph.commit(); graph.shutdown(); } /** * This method calculates the parameter 'a'(relativeUserContribution) for credibility calculation * @param userNode The Vertex of the User class whose credibility is being calculated * @param graph OrientGraph object * @param pageEditMap HashMap containing all the edits and their corresponding pid * @return The value of parameter 'a' */ public static double getRelativeUserContribution(Vertex userNode,OrientGraph graph,HashMap pageEditMap){ HashMap userPageContributions=new HashMap(); int contpid=0,countContribution=0; double userEdits=0,totalEdits=1,finalPageVote=0; double userPageContributionsTemp=0,userPageContributionsTotal=0; int contributionSize=0; for(Edge contributeEdge:userNode.getEdges(Direction.OUT,"@class","Contribute")){ contpid=(int)graph.getVertices("title",contributeEdge.getVertex(Direction.IN).getProperty("Page").toString()).iterator().next().getProperty("pid"); contributionSize=contributeEdge.getProperty("contributionSize"); if(userPageContributions.containsKey(contpid)){ contributionSize+=(int)userPageContributions.get(contpid); userPageContributions.put(contpid,(Integer)contributionSize); } else { userPageContributions.put(contpid,(Integer)contributionSize); } } Iterator it = userPageContributions.entrySet().iterator(); while (it.hasNext()) { Map.Entry pair = (Map.Entry) it.next(); contpid=(int)pair.getKey(); userEdits=(int)userPageContributions.get(contpid); totalEdits=(int)pageEditMap.get(contpid); finalPageVote=graph.getVertices("pid",contpid).iterator().next().getProperty("currentPageVote"); - if(totalEdits==0)totalEdits=1; + if(totalEdits==0)totalEdits=1; //Not a float comparison userPageContributionsTemp=(finalPageVote*userEdits/totalEdits); userPageContributionsTotal+=userPageContributionsTemp; countContribution++; } if(countContribution==0)countContribution=1; return userPageContributionsTotal/countContribution; } /** * This method calculates the parameter 'b'(voteDeviation) for credibility calculation * @param userNode The Vertex of the User class whose credibility is being calculated * @param graph OrientGraph object * @return The value of parameter 'b' */ public static double getVoteDeviation(Vertex userNode,OrientGraph graph){ double voteDeviationTemp=0,voteDeviationTotal=0,userVote,versionVote; int countReview=0; try{ for(Edge reviewEdge:userNode.getEdges(Direction.OUT,"@class","Review")){ userVote=reviewEdge.getProperty("vote"); versionVote=reviewEdge.getVertex(Direction.IN).getProperty("previousVote"); voteDeviationTemp=1-Math.abs(userVote-versionVote); voteDeviationTotal+=voteDeviationTemp; countReview++; } }catch(Exception e){Loggings.getLogs(className).error(e);} if(countReview==0)countReview=1; return voteDeviationTotal/countReview; } } diff --git a/WikiRating/src/main/java/controllers/WikiUtil.java b/WikiRating/src/main/java/controllers/WikiUtil.java index 4f85a26..82bfb9c 100644 --- a/WikiRating/src/main/java/controllers/WikiUtil.java +++ b/WikiRating/src/main/java/controllers/WikiUtil.java @@ -1,208 +1,208 @@ package main.java.controllers; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.wikidata.wdtk.wikibaseapi.ApiConnection; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientEdge; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import main.java.utilities.Connections; import main.java.utilities.Loggings; /**This class contains various utilities methods for the other classes * */ public class WikiUtil { static Class className=WikiUtil.class; /** * This method converts an InputStream object to String * @param in InputStream object to be converted * @return Converted String */ public static String streamToString(InputStream in) { String result = ""; BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(in, "UTF-8")); } catch (UnsupportedEncodingException e1) { - e1.printStackTrace(); + Loggings.getLogs(className).error(e1); } StringBuilder builder = new StringBuilder(); String line; try { if(reader!=null){ while ((line = reader.readLine()) != null) { builder.append(line); } } result = builder.toString(); in.close(); } catch (IOException e) { Loggings.getLogs(className).error(e); } return result; } /** * This method constructs the MAP of parameters to attach with the MediaWiki Query to fetch all the pages * residing in all the namespaces * @param ns The namespace whose pages are requested * @return Map having parameters */ public static Map getPageParam(String ns) { Map queryParameterMap = new HashMap(); queryParameterMap.put("action", "query"); queryParameterMap.put("list", "allpages"); queryParameterMap.put("apfrom", "a"); queryParameterMap.put("aplimit", "max"); queryParameterMap.put("apnamespace", ns); queryParameterMap.put("format", "json"); return queryParameterMap; } /** * This method constructs the MAP of parameters to attach with the MediaWiki Query to fetch all the revisions * of the given page * @param pid The PageID of the page for which revisions are requested * @return Map having parameters */ public static Map getRevisionParam(String pid) { Map queryParameterMap = new HashMap(); queryParameterMap.put("action", "query"); queryParameterMap.put("prop", "revisions"); queryParameterMap.put("pageids", pid); queryParameterMap.put("rvprop", "userid|ids|timestamp|user|flags|size"); queryParameterMap.put("rvlimit", "max"); queryParameterMap.put("rvdir", "newer"); queryParameterMap.put("format", "json"); return queryParameterMap; } /** * This method constructs the MAP of parameters to attach with the MediaWiki Query to get * all the backlinks for the specified page * @param pid The PageID of the page for which backlinks are requested * @return Map having parameters */ public static Map getLinkParam(String pid) { Map queryParameterMap = new HashMap(); queryParameterMap.put("action", "query"); queryParameterMap.put("list", "backlinks"); queryParameterMap.put("blpageid", pid); queryParameterMap.put("blfilterredir", "all"); queryParameterMap.put("bllimit", "max"); queryParameterMap.put("format", "json"); return queryParameterMap; } /** * This method constructs the MAP of parameters to attach with the MediaWiki Query to get * all the users * @param username username to continue from in case the results are more than 500 * @return Map having parameters */ public static Map getUserParam(String username) { Map queryParameterMap = new HashMap(); queryParameterMap.put("action", "query"); queryParameterMap.put("list", "allusers"); queryParameterMap.put("aulimit", "max"); queryParameterMap.put("aufrom", username); queryParameterMap.put("rawcontinue", ""); queryParameterMap.put("format", "json"); return queryParameterMap; } /** * This method constructs the MAP of parameters to attach with the MediaWiki Query to get * all the contributions by the specified User * @param username Username for whom the contributions have to be fetched * @return Map having parameters */ public static Map getUserContriParam(String username) { Map queryParameterMap = new HashMap(); queryParameterMap.put("action", "query"); queryParameterMap.put("list", "usercontribs"); queryParameterMap.put("uclimit", "max"); queryParameterMap.put("ucdir", "newer"); queryParameterMap.put("ucuser", username); queryParameterMap.put("ucshow", "!minor"); queryParameterMap.put("ucprop", "sizediff|title|ids|flags"); queryParameterMap.put("format", "json"); return queryParameterMap; } /** * This method sends a POST request to MediaWiki API and then gets back an InputStream * @param con The ApiConnection object * @param queryParameterMap The Map having all the query parameters * @return InputStream object having the requested data */ public static InputStream reqSend(ApiConnection con, Map queryParameterMap) { InputStream in = null; try { in = con.sendRequest("POST", queryParameterMap); } catch (IOException e) { Loggings.getLogs(className).error(e); } return in; } /** * This method will check for the duplicate entities in the database * @param key The name of the class for which redundancy needs to be checked * @param value The value to be checked * @param graph OrientGraph object * @return true or false depending on whether entity is absent or present respectively */ public static boolean rCheck(String key, int value, OrientGraph graph) { Iterable checkNode = graph.getVertices(key, value); Iterator it = checkNode.iterator(); if (it.hasNext()) { return false; } else return true; } /** * This method prints all the pages * @return A formatted string containing all the Page names */ public static String printVertex() { String result = ""; OrientGraph graph = Connections.getInstance().getDbGraph(); for (Vertex pageNode : graph.getVertices("@class", "Page")) { result = result + " \n" + pageNode.getProperty("title"); } return result; } }