yacy_search_server/htroot/api/getpageinfo_p.java

214 lines
7.7 KiB
Java
Raw Normal View History

import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.services.federated.yacy.CacheStrategy;
import net.yacy.document.parser.html.ContentScraper;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import de.anomic.crawler.RobotsTxtEntry;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
public class getpageinfo_p {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
// avoid UNRESOLVED PATTERN
prop.put("title", "");
prop.put("desc", "");
prop.put("lang", "");
* Complete number localization and provide a more reasonable interface to serverObjects: - put(key, value) methods are now used if a value added to the map should be kept as it is. Numbers are transformed (but not formatted) to an equivalent String representation. - putASIS(...) have been removed, now done with simple put(...) (see above). - puNum(...) can be used for number values which should be stored in a formatted way, either depending on the current locale setting for yacy (default) or in a "none" locale (see javadocs and setLocalize()). - putHTML(...) escapes special characters into corresponding HTML enities ('<' => '&lt;') which was done with put(...) before and so was called too often, becauses it is necessary only for very few cases. Additionally there is a "forXML" mode which only replaces < > & ". In short: Use put(...) for almost everything, use putXY(...) if you need some special transformation of the value. A few bugs have been fixed as well, and there should be a small performance improvement for complex pages with a lot of values. * added additional Sum/Avg rows to access tracker pages, see http://forum.yacy-websuche.de/viewtopic.php?f=5&t=456 * removed duplicate code (mostly related to the big changes above). TODO: - make sure, number formats work as expected _everywhere_, report overseen stuff http://forum.yacy-websuche.de/viewtopic.php?f=5&t=437 - probably a good idea to add special putDate() methods as they are used in many pages and create duplicated formatting code + maybe some centralized handling for memory value formatting. - further improve the speed of page creation for the WatchCrawler. git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4178 6c8d7289-2bf4-0310-a012-ef5d649a1542
2007-10-24 23:38:19 +02:00
prop.put("robots-allowed", "3"); //unknown
prop.put("robotsInfo", ""); //unknown
prop.put("sitemap", "");
prop.put("favicon","");
prop.put("sitelist", "");
prop.put("filter", ".*");
prop.put("oai", 0);
// default actions
String actions = "title,robots";
if (post != null && post.containsKey("url")) {
if (post.containsKey("actions"))
actions=post.get("actions");
String url=post.get("url");
if (url.toLowerCase().startsWith("ftp://")) {
prop.put("robots-allowed", "1"); // ok to crawl
prop.put("robotsInfo", "ftp does not follow robots.txt");
prop.putXML("title", "FTP: " + url);
return prop;
} else if (!url.startsWith("http://") &&
!url.startsWith("https://") &&
!url.startsWith("ftp://") &&
!url.startsWith("smb://") &&
!url.startsWith("file://")) {
url = "http://" + url;
}
if (actions.indexOf("title") >= 0) {
DigestURI u = null;
try {
u = new DigestURI(url);
} catch (final MalformedURLException e) {
Log.logException(e);
}
ContentScraper scraper = null;
if (u != null) try {
scraper = sb.loader.parseResource(u, CacheStrategy.IFEXIST);
} catch (final IOException e) {
Log.logException(e);
// bad things are possible, i.e. that the Server responds with "403 Bad Behavior"
// that should not affect the robots.txt validity
}
if (scraper != null) {
// put the document title
prop.putXML("title", scraper.getTitle());
// put the favicon that belongs to the document
* Complete number localization and provide a more reasonable interface to serverObjects: - put(key, value) methods are now used if a value added to the map should be kept as it is. Numbers are transformed (but not formatted) to an equivalent String representation. - putASIS(...) have been removed, now done with simple put(...) (see above). - puNum(...) can be used for number values which should be stored in a formatted way, either depending on the current locale setting for yacy (default) or in a "none" locale (see javadocs and setLocalize()). - putHTML(...) escapes special characters into corresponding HTML enities ('<' => '&lt;') which was done with put(...) before and so was called too often, becauses it is necessary only for very few cases. Additionally there is a "forXML" mode which only replaces < > & ". In short: Use put(...) for almost everything, use putXY(...) if you need some special transformation of the value. A few bugs have been fixed as well, and there should be a small performance improvement for complex pages with a lot of values. * added additional Sum/Avg rows to access tracker pages, see http://forum.yacy-websuche.de/viewtopic.php?f=5&t=456 * removed duplicate code (mostly related to the big changes above). TODO: - make sure, number formats work as expected _everywhere_, report overseen stuff http://forum.yacy-websuche.de/viewtopic.php?f=5&t=437 - probably a good idea to add special putDate() methods as they are used in many pages and create duplicated formatting code + maybe some centralized handling for memory value formatting. - further improve the speed of page creation for the WatchCrawler. git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4178 6c8d7289-2bf4-0310-a012-ef5d649a1542
2007-10-24 23:38:19 +02:00
prop.put("favicon", (scraper.getFavicon()==null) ? "" : scraper.getFavicon().toString());
// put keywords
final String list[] = scraper.getKeywords();
int count = 0;
for (final String element: list) {
final String tag = element;
if (!tag.equals("")) {
prop.putXML("tags_"+count+"_tag", tag);
count++;
}
}
prop.put("tags", count);
// put description
prop.putXML("desc", scraper.getDescription());
// put language
final Set<String> languages = scraper.getContentLanguages();
prop.putXML("lang", (languages == null) ? "unknown" : languages.iterator().next());
// get links and put them into a semicolon-separated list
final Set<MultiProtocolURI> uris = scraper.getAnchors().keySet();
final StringBuilder links = new StringBuilder(uris.size() * 80);
final StringBuilder filter = new StringBuilder(uris.size() * 40);
count = 0;
for (final MultiProtocolURI uri: uris) {
links.append(';').append(uri.toNormalform(true, false));
filter.append('|').append(uri.getProtocol()).append("://").append(uri.getHost()).append(".*");
prop.putXML("links_" + count + "_link", uri.toNormalform(true, false));
count++;
}
prop.put("links", count);
prop.putXML("sitelist", links.length() > 0 ? links.substring(1) : "");
prop.putXML("filter", filter.length() > 0 ? filter.substring(1) : ".*");
}
}
if (actions.indexOf("robots") >= 0) {
try {
final DigestURI theURL = new DigestURI(url);
// determine if crawling of the current URL is allowed
RobotsTxtEntry robotsEntry;
try {
robotsEntry = sb.robots.getEntry(theURL, sb.peers.myBotIDs());
} catch (final IOException e) {
robotsEntry = null;
Log.logException(e);
}
prop.put("robots-allowed", robotsEntry == null ? 1 : robotsEntry.isDisallowed(theURL) ? 0 : 1);
prop.putHTML("robotsInfo", robotsEntry.getInfo());
// get the sitemap URL of the domain
final MultiProtocolURI sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
} catch (final MalformedURLException e) {
Log.logException(e);
}
}
if (actions.indexOf("oai") >= 0) {
try {
final DigestURI theURL = new DigestURI(url
+ "?verb=Identify");
final String oairesult = checkOAI(theURL.toString());
prop.put("oai", oairesult == "" ? 0 : 1);
if (oairesult != "") {
prop.putXML("title", oairesult);
}
} catch (final MalformedURLException e) {
}
}
}
// return rewrite properties
return prop;
}
private static String checkOAI(final String url) {
final DocumentBuilderFactory factory = DocumentBuilderFactory
.newInstance();
try {
final DocumentBuilder builder = factory.newDocumentBuilder();
return parseXML(builder.parse(url));
} catch (final ParserConfigurationException ex) {
Log.logException(ex);
} catch (final SAXException ex) {
Log.logException(ex);
} catch (final IOException ex) {
Log.logException(ex);
}
return "";
}
private static String parseXML(final Document doc) {
String repositoryName = null;
final NodeList items = doc.getDocumentElement().getElementsByTagName(
"Identify");
if (items.getLength() == 0) {
return "";
}
for (int i = 0, n = items.getLength(); i < n; ++i) {
if (!"Identify".equals(items.item(i).getNodeName()))
continue;
final NodeList currentNodeChildren = items.item(i).getChildNodes();
for (int j = 0, m = currentNodeChildren.getLength(); j < m; ++j) {
final Node currentNode = currentNodeChildren.item(j);
if ("repositoryName".equals(currentNode.getNodeName())) {
repositoryName = currentNode.getFirstChild().getNodeValue();
}
}
if (repositoryName == null) {
return "";
}
}
return repositoryName;
}
}