本文整理了Java中de.tudarmstadt.ukp.wikipedia.api.Wikipedia.getMetaData()
方法的一些代码示例,展示了Wikipedia.getMetaData()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Wikipedia.getMetaData()
方法的具体详情如下:
包路径:de.tudarmstadt.ukp.wikipedia.api.Wikipedia
类名称:Wikipedia
方法名:getMetaData
暂无
public int getNumberOfEntities() {
long numberOfEntities = wiki.getMetaData().getNumberOfPages() - wiki.getMetaData().getNumberOfRedirectPages();
return new Long(numberOfEntities).intValue();
}
public int getNumberOfEntities() {
return new Long(wiki.getMetaData().getNumberOfCategories()).intValue();
}
代码示例来源:origin: dkpro/dkpro-jwpl
public void getCategorizedArticles(Wikipedia pWiki, CategoryGraph catGraph) throws WikiApiException {
double startTime = System.currentTimeMillis();
int numberOfCategorizedArticles = getNumberOfCategorizedArticles(pWiki, catGraph);
double categorizedArticlesRatio = (double) numberOfCategorizedArticles / (double) pWiki.getMetaData().getNumberOfPages();
logger.info("Categorized articles: {}", numberOfCategorizedArticles);
logger.info("All articles: {}", pWiki.getMetaData().getNumberOfPages());
logger.info("Ratio: {}", categorizedArticlesRatio);
double endTime = (System.currentTimeMillis() - startTime) / 1000.0;
logger.debug( "{}ms", endTime);
}
代码示例来源:origin: dkpro/dkpro-similarity
public File getSerializedCacheFile(Wikipedia wiki) throws WikiApiException {
MetaData metaData = wiki.getMetaData();
StringBuilder sb = new StringBuilder();
sb.append("WikipediaInlinkCache_");
sb.append(metaData.getLanguage());
sb.append("_");
sb.append(metaData.getVersion());
return new File(sb.toString());
}
public File getSerializedCacheFile(Wikipedia wiki) throws WikiApiException {
MetaData metaData = wiki.getMetaData();
StringBuilder sb = new StringBuilder();
sb.append("WikipediaInlinkCache_");
sb.append(metaData.getLanguage());
sb.append("_");
sb.append(metaData.getVersion());
return new File(sb.toString());
}
代码示例来源:origin: dkpro/dkpro-similarity
public WikiLinkComparator(Wikipedia pWiki, boolean useCache, boolean useOutboundLinks) {
super(pWiki, Measure.WikiLinkMeasure, CombinationStrategy.Best);
this.numberOfArticles = pWiki.getMetaData().getNumberOfPages() - pWiki.getMetaData().getNumberOfRedirectPages() - pWiki.getMetaData().getNumberOfDisambiguationPages();
if (useCache) {
this.cache = new WikiLinkCache(pWiki);
}
this.useOutboundLinks = useOutboundLinks;
}
public WikiLinkComparator(Wikipedia pWiki, boolean useCache) {
super(pWiki, Measure.WikiLinkMeasure, CombinationStrategy.Best);
this.numberOfArticles = pWiki.getMetaData().getNumberOfPages() - pWiki.getMetaData().getNumberOfRedirectPages() - pWiki.getMetaData().getNumberOfDisambiguationPages();
if (useCache) {
this.cache = new WikiLinkCache(pWiki);
}
}
代码示例来源:origin: dkpro/dkpro-jwpl
/**
* Deleted the root path map file.
* @throws WikiApiException Thrown if errors occurred.
*/
public void deleteRootPathMap() throws WikiApiException {
File rootPathFile = new File(this.rootPathMapFilename + "_" + wiki.getLanguage() + "_" + wiki.getMetaData().getVersion());
rootPathFile.delete();
}
代码示例来源:origin: dkpro/dkpro-jwpl
/**
* Articles in wikipedia may be tagged with multiple categories.
* It may be interesting to know how many articles have at least one category in common.
* Such articles would have a very high semantic relatedness even if they share a quite secondary category.
* @param pWiki The wikipedia object.
* @param catGraph The category graph.
* @throws WikiApiException
*/
public void getOverlapping(Wikipedia pWiki, CategoryGraph catGraph) throws WikiApiException {
double startTime = System.currentTimeMillis();
int articlesWithOverlappingCategories = getArticlesWithOverlappingCategories(pWiki, catGraph);
double overlappingCategoriesRatio = (double) articlesWithOverlappingCategories / (double) pWiki.getMetaData().getNumberOfPages();
logger.info(articlesWithOverlappingCategories + " - " + pWiki.getMetaData().getNumberOfPages() + " - " + overlappingCategoriesRatio);
double endTime = (System.currentTimeMillis() - startTime) / 1000.0;
logger.debug("{} ms", endTime);
}
public String getResourceVersion() {
StringBuilder sb = new StringBuilder();
String version = "";
String language = "";
try {
MetaData metaData = wiki.getMetaData();
language = metaData.getLanguage().toString();
version = metaData.getVersion();
if (language == null) {
language = "unknown-language";
}
if (version == null) {
version = "unknown-version";
}
} catch (WikiApiException e) {
language = "unknown-language";
version = "unknown-version";
}
sb.append(language);
sb.append("_");
sb.append(version);
return sb.toString();
}
public String getResourceVersion() {
StringBuilder sb = new StringBuilder();
String version = "";
String language = "";
try {
MetaData metaData = wiki.getMetaData();
language = metaData.getLanguage().toString();
version = metaData.getVersion();
if (language == null) {
language = "unknown-language";
}
if (version == null) {
version = "unknown-version";
}
} catch (WikiApiException e) {
language = "unknown-language";
version = "unknown-version";
}
sb.append(language);
sb.append("_");
sb.append(version);
return sb.toString();
}
代码示例来源:origin: dkpro/dkpro-jwpl
/**
* Computes the depth of the category graph, i.e. the maximum path length starting with the root node.
* @return The depth of the hierarchy.
* @throws WikiApiException Thrown if errors occurred.
*/
private double computeDepth() throws WikiApiException {
Category root = wiki.getMetaData().getMainCategory();
if (root == null) {
logger.error("There is no root node for this wiki. Check the parameter that provides the name of the root node.");
return 0.0;
}
// test whether the root category is in this graph
if (!graph.containsVertex(root.getPageId())) {
logger.error("The root node is not part of this graph. Cannot compute depth of this graph. Setting depth to 0.0");
return 0.0;
}
double maxPathLength = 0.0;
double[] returnValues = computeShortestPathLenghts(root.getPageId(), 0.0, maxPathLength, new HashSet<Integer>());
maxPathLength = returnValues[1];
return maxPathLength;
}
代码示例来源:origin: dkpro/dkpro-jwpl
public double getAveragePathLengthFromRoot(Wikipedia pWiki, CategoryGraph connectedCatGraph) throws WikiApiException {
// get root node
Category rootCategory = pWiki.getMetaData().getMainCategory();
int root = rootCategory.getPageId();
int pathLengthSum = computeShortestPathLenghts(root, connectedCatGraph);
return (double) pathLengthSum / (connectedCatGraph.getGraph().vertexSet().size()-1);
}
@Override
public Entity getRoot() throws LexicalSemanticResourceException {
try {
String rootTitle = this.wiki.getMetaData().getMainCategory().getTitle().getWikiStyleTitle();
Map<String,String> rootLexemes = new HashMap<String,String>();
rootLexemes.put(rootTitle, Entity.UNKNOWN_SENSE);
try {
return this.getEntity(rootLexemes, Entity.UNKNOWN_POS);
} catch (UnsupportedOperationException e) {
return null;
}
} catch (WikiTitleParsingException e) {
throw new LexicalSemanticResourceException(e);
} catch (WikiApiException e) {
throw new LexicalSemanticResourceException(e);
}
}
代码示例来源:origin: dkpro/dkpro-core
@Override
public void initialize(UimaContext context)
throws ResourceInitializationException {
super.initialize(context);
MetaData md = wiki.getMetaData();
this.nrOfArticles = md.getNumberOfPages() - md.getNumberOfDisambiguationPages()
- md.getNumberOfRedirectPages();
this.currentArticleIndex = 0;
RevisionAPIConfiguration revConfig = new RevisionAPIConfiguration(dbconfig);
try {
revApi = new RevisionApi(revConfig);
}
catch (WikiApiException e) {
throw new ResourceInitializationException(e);
}
idIter = wiki.getPageIds().iterator();
}
代码示例来源:origin: dkpro/dkpro-similarity
Category root = wiki.getMetaData().getMainCategory();
Category root = wiki.getMetaData().getMainCategory();
代码示例来源:origin: dkpro/dkpro-similarity
Category root = wiki.getMetaData().getMainCategory();
代码示例来源:origin: dkpro/dkpro-similarity
Category root = wiki.getMetaData().getMainCategory();
Category root = wiki.getMetaData().getMainCategory();
内容来源于网络,如有侵权,请联系作者删除!