2012-01-23 17:27:29 +01:00
|
|
|
// getpageinfo_p
|
|
|
|
// (C) 2011 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
|
|
|
|
// first published 11.11.2011 on http://yacy.net
|
|
|
|
//
|
|
|
|
// This is a part of YaCy, a peer-to-peer based web search engine
|
|
|
|
//
|
|
|
|
// $LastChangedDate$
|
|
|
|
// $LastChangedRevision$
|
|
|
|
// $LastChangedBy$
|
|
|
|
//
|
|
|
|
// LICENSE
|
|
|
|
//
|
|
|
|
// This program is free software; you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation; either version 2 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
//
|
|
|
|
// This program is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
//
|
|
|
|
// You should have received a copy of the GNU General Public License
|
|
|
|
// along with this program; if not, write to the Free Software
|
|
|
|
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
2005-12-29 18:45:50 +01:00
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.net.MalformedURLException;
|
2008-09-20 00:19:11 +02:00
|
|
|
import java.util.Set;
|
2005-12-29 18:45:50 +01:00
|
|
|
|
2011-11-16 02:03:49 +01:00
|
|
|
import javax.xml.parsers.DocumentBuilder;
|
|
|
|
import javax.xml.parsers.DocumentBuilderFactory;
|
|
|
|
import javax.xml.parsers.ParserConfigurationException;
|
|
|
|
|
2010-05-26 02:01:16 +02:00
|
|
|
import net.yacy.cora.document.MultiProtocolURI;
|
2012-09-25 21:20:03 +02:00
|
|
|
import net.yacy.cora.federate.yacy.CacheStrategy;
|
2013-05-20 22:05:28 +02:00
|
|
|
import net.yacy.cora.protocol.ClientIdentification;
|
2010-08-23 14:32:02 +02:00
|
|
|
import net.yacy.cora.protocol.RequestHeader;
|
2013-07-09 14:28:25 +02:00
|
|
|
import net.yacy.cora.util.ConcurrentLog;
|
2012-09-21 15:48:16 +02:00
|
|
|
import net.yacy.crawler.robots.RobotsTxtEntry;
|
2009-10-11 02:12:19 +02:00
|
|
|
import net.yacy.kelondro.data.meta.DigestURI;
|
2012-07-02 13:57:29 +02:00
|
|
|
import net.yacy.repository.Blacklist.BlacklistType;
|
2011-09-25 18:59:06 +02:00
|
|
|
import net.yacy.search.Switchboard;
|
2012-09-21 15:48:16 +02:00
|
|
|
import net.yacy.server.serverObjects;
|
|
|
|
import net.yacy.server.serverSwitch;
|
2011-11-15 13:22:19 +01:00
|
|
|
|
|
|
|
import org.w3c.dom.Document;
|
|
|
|
import org.w3c.dom.Node;
|
|
|
|
import org.w3c.dom.NodeList;
|
|
|
|
import org.xml.sax.SAXException;
|
|
|
|
|
2011-11-16 02:03:49 +01:00
|
|
|
|
2006-02-17 21:55:31 +01:00
|
|
|
public class getpageinfo_p {
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2012-07-05 09:14:04 +02:00
|
|
|
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
|
2009-07-19 22:37:44 +02:00
|
|
|
final Switchboard sb = (Switchboard) env;
|
2008-08-02 14:12:04 +02:00
|
|
|
final serverObjects prop = new serverObjects();
|
2011-06-13 23:44:03 +02:00
|
|
|
|
|
|
|
// avoid UNRESOLVED PATTERN
|
|
|
|
prop.put("title", "");
|
2008-09-19 16:27:44 +02:00
|
|
|
prop.put("desc", "");
|
|
|
|
prop.put("lang", "");
|
2007-10-24 23:38:19 +02:00
|
|
|
prop.put("robots-allowed", "3"); //unknown
|
2011-11-15 01:33:54 +01:00
|
|
|
prop.put("robotsInfo", ""); //unknown
|
2008-09-19 16:27:44 +02:00
|
|
|
prop.put("sitemap", "");
|
2011-06-13 23:44:03 +02:00
|
|
|
prop.put("favicon","");
|
2010-10-01 01:57:58 +02:00
|
|
|
prop.put("sitelist", "");
|
|
|
|
prop.put("filter", ".*");
|
2011-11-15 13:22:19 +01:00
|
|
|
prop.put("oai", 0);
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2008-09-19 16:27:44 +02:00
|
|
|
// default actions
|
2011-11-15 01:22:40 +01:00
|
|
|
String actions = "title,robots";
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2010-10-01 01:57:58 +02:00
|
|
|
if (post != null && post.containsKey("url")) {
|
2011-11-15 01:22:40 +01:00
|
|
|
if (post.containsKey("actions"))
|
2008-06-06 18:01:27 +02:00
|
|
|
actions=post.get("actions");
|
|
|
|
String url=post.get("url");
|
2011-11-15 01:22:40 +01:00
|
|
|
if (url.toLowerCase().startsWith("ftp://")) {
|
2011-11-16 02:03:49 +01:00
|
|
|
prop.put("robots-allowed", "1"); // ok to crawl
|
2011-11-15 01:33:54 +01:00
|
|
|
prop.put("robotsInfo", "ftp does not follow robots.txt");
|
2011-11-15 01:22:40 +01:00
|
|
|
prop.putXML("title", "FTP: " + url);
|
2006-09-09 14:34:24 +02:00
|
|
|
return prop;
|
2010-03-11 16:43:06 +01:00
|
|
|
} else if (!url.startsWith("http://") &&
|
|
|
|
!url.startsWith("https://") &&
|
|
|
|
!url.startsWith("ftp://") &&
|
|
|
|
!url.startsWith("smb://") &&
|
|
|
|
!url.startsWith("file://")) {
|
2006-02-17 21:55:31 +01:00
|
|
|
url = "http://" + url;
|
|
|
|
}
|
2011-11-25 12:23:52 +01:00
|
|
|
if (actions.indexOf("title",0) >= 0) {
|
2009-10-11 02:12:19 +02:00
|
|
|
DigestURI u = null;
|
2006-02-17 21:55:31 +01:00
|
|
|
try {
|
2010-10-26 18:10:20 +02:00
|
|
|
u = new DigestURI(url);
|
2009-07-25 23:38:57 +02:00
|
|
|
} catch (final MalformedURLException e) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(e);
|
2009-07-25 23:38:57 +02:00
|
|
|
}
|
2012-01-23 17:27:29 +01:00
|
|
|
net.yacy.document.Document scraper = null;
|
2009-07-25 23:38:57 +02:00
|
|
|
if (u != null) try {
|
2013-08-22 14:23:47 +02:00
|
|
|
ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
|
|
|
|
scraper = sb.loader.loadDocument(u, CacheStrategy.IFEXIST, BlacklistType.CRAWLER, agent);
|
2009-07-25 23:38:57 +02:00
|
|
|
} catch (final IOException e) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(e);
|
2011-11-16 02:03:49 +01:00
|
|
|
// bad things are possible, i.e. that the Server responds with "403 Bad Behavior"
|
|
|
|
// that should not affect the robots.txt validity
|
2011-06-13 23:44:03 +02:00
|
|
|
}
|
2009-07-25 23:38:57 +02:00
|
|
|
if (scraper != null) {
|
2011-06-13 23:44:03 +02:00
|
|
|
// put the document title
|
2012-01-23 17:27:29 +01:00
|
|
|
prop.putXML("title", scraper.dc_title());
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2007-06-09 17:22:37 +02:00
|
|
|
// put the favicon that belongs to the document
|
2007-10-24 23:38:19 +02:00
|
|
|
prop.put("favicon", (scraper.getFavicon()==null) ? "" : scraper.getFavicon().toString());
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2007-06-09 17:22:37 +02:00
|
|
|
// put keywords
|
2012-01-23 17:27:29 +01:00
|
|
|
final String list[] = scraper.dc_subject();
|
2008-05-18 14:48:57 +02:00
|
|
|
int count = 0;
|
2011-11-15 01:22:40 +01:00
|
|
|
for (final String element: list) {
|
2011-06-13 23:44:03 +02:00
|
|
|
final String tag = element;
|
|
|
|
if (!tag.equals("")) {
|
2009-07-25 23:38:57 +02:00
|
|
|
prop.putXML("tags_"+count+"_tag", tag);
|
|
|
|
count++;
|
|
|
|
}
|
2006-02-17 21:55:31 +01:00
|
|
|
}
|
2008-05-18 14:48:57 +02:00
|
|
|
prop.put("tags", count);
|
2011-06-13 23:44:03 +02:00
|
|
|
// put description
|
2013-07-30 12:48:57 +02:00
|
|
|
prop.putXML("desc", scraper.dc_description().length > 0 ? scraper.dc_description()[0] : "");
|
2008-09-20 00:19:11 +02:00
|
|
|
// put language
|
2011-06-13 23:44:03 +02:00
|
|
|
final Set<String> languages = scraper.getContentLanguages();
|
2012-08-31 10:30:43 +02:00
|
|
|
prop.putXML("lang", (languages == null || languages.size() == 0) ? "unknown" : languages.iterator().next());
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2010-09-30 14:50:34 +02:00
|
|
|
// get links and put them into a semicolon-separated list
|
2013-02-22 15:45:15 +01:00
|
|
|
final Set<DigestURI> uris = scraper.getAnchors().keySet();
|
2011-06-13 23:44:03 +02:00
|
|
|
final StringBuilder links = new StringBuilder(uris.size() * 80);
|
|
|
|
final StringBuilder filter = new StringBuilder(uris.size() * 40);
|
2010-09-30 14:50:34 +02:00
|
|
|
count = 0;
|
2013-02-22 15:45:15 +01:00
|
|
|
for (final DigestURI uri: uris) {
|
2011-11-22 01:03:20 +01:00
|
|
|
if (uri == null) continue;
|
2012-10-10 11:46:22 +02:00
|
|
|
links.append(';').append(uri.toNormalform(true));
|
2010-09-30 14:50:34 +02:00
|
|
|
filter.append('|').append(uri.getProtocol()).append("://").append(uri.getHost()).append(".*");
|
2012-10-10 11:46:22 +02:00
|
|
|
prop.putXML("links_" + count + "_link", uri.toNormalform(true));
|
2010-09-30 14:50:34 +02:00
|
|
|
count++;
|
|
|
|
}
|
|
|
|
prop.put("links", count);
|
|
|
|
prop.putXML("sitelist", links.length() > 0 ? links.substring(1) : "");
|
|
|
|
prop.putXML("filter", filter.length() > 0 ? filter.substring(1) : ".*");
|
2005-12-29 18:45:50 +01:00
|
|
|
}
|
2006-02-17 21:55:31 +01:00
|
|
|
}
|
2011-11-25 12:23:52 +01:00
|
|
|
if (actions.indexOf("robots",0) >= 0) {
|
2006-02-17 21:55:31 +01:00
|
|
|
try {
|
2010-10-26 18:10:20 +02:00
|
|
|
final DigestURI theURL = new DigestURI(url);
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2007-05-06 11:52:04 +02:00
|
|
|
// determine if crawling of the current URL is allowed
|
2013-08-22 14:23:47 +02:00
|
|
|
ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
|
|
|
|
sb.robots.ensureExist(theURL, agent, true);
|
|
|
|
RobotsTxtEntry robotsEntry = sb.robots.getEntry(theURL, agent);
|
2011-04-04 01:39:45 +02:00
|
|
|
prop.put("robots-allowed", robotsEntry == null ? 1 : robotsEntry.isDisallowed(theURL) ? 0 : 1);
|
2012-01-19 23:07:15 +01:00
|
|
|
prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo());
|
2011-04-04 01:39:45 +02:00
|
|
|
|
2007-05-06 11:52:04 +02:00
|
|
|
// get the sitemap URL of the domain
|
2011-04-04 01:39:45 +02:00
|
|
|
final MultiProtocolURI sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
|
|
|
|
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
|
2011-11-15 01:22:40 +01:00
|
|
|
} catch (final MalformedURLException e) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(e);
|
2011-11-15 01:22:40 +01:00
|
|
|
}
|
2006-02-17 21:55:31 +01:00
|
|
|
}
|
2011-11-25 12:23:52 +01:00
|
|
|
if (actions.indexOf("oai",0) >= 0) {
|
2011-11-15 13:22:19 +01:00
|
|
|
try {
|
|
|
|
final DigestURI theURL = new DigestURI(url
|
|
|
|
+ "?verb=Identify");
|
|
|
|
|
2011-11-16 02:03:49 +01:00
|
|
|
final String oairesult = checkOAI(theURL.toString());
|
2011-11-15 13:22:19 +01:00
|
|
|
|
|
|
|
prop.put("oai", oairesult == "" ? 0 : 1);
|
|
|
|
|
|
|
|
if (oairesult != "") {
|
|
|
|
prop.putXML("title", oairesult);
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (final MalformedURLException e) {
|
|
|
|
}
|
|
|
|
}
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2005-12-29 18:45:50 +01:00
|
|
|
}
|
|
|
|
// return rewrite properties
|
|
|
|
return prop;
|
|
|
|
}
|
2011-11-16 02:03:49 +01:00
|
|
|
|
2011-11-15 13:22:19 +01:00
|
|
|
private static String checkOAI(final String url) {
|
|
|
|
final DocumentBuilderFactory factory = DocumentBuilderFactory
|
|
|
|
.newInstance();
|
|
|
|
try {
|
|
|
|
final DocumentBuilder builder = factory.newDocumentBuilder();
|
|
|
|
return parseXML(builder.parse(url));
|
|
|
|
} catch (final ParserConfigurationException ex) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(ex);
|
2011-11-15 13:22:19 +01:00
|
|
|
} catch (final SAXException ex) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(ex);
|
2011-11-15 13:22:19 +01:00
|
|
|
} catch (final IOException ex) {
|
2013-07-09 14:28:25 +02:00
|
|
|
ConcurrentLog.logException(ex);
|
2011-11-15 13:22:19 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return "";
|
|
|
|
}
|
2011-11-16 02:03:49 +01:00
|
|
|
|
2011-11-15 13:22:19 +01:00
|
|
|
private static String parseXML(final Document doc) {
|
|
|
|
|
|
|
|
String repositoryName = null;
|
|
|
|
|
|
|
|
final NodeList items = doc.getDocumentElement().getElementsByTagName(
|
|
|
|
"Identify");
|
|
|
|
if (items.getLength() == 0) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0, n = items.getLength(); i < n; ++i) {
|
|
|
|
|
|
|
|
if (!"Identify".equals(items.item(i).getNodeName()))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
final NodeList currentNodeChildren = items.item(i).getChildNodes();
|
|
|
|
|
|
|
|
for (int j = 0, m = currentNodeChildren.getLength(); j < m; ++j) {
|
|
|
|
final Node currentNode = currentNodeChildren.item(j);
|
|
|
|
if ("repositoryName".equals(currentNode.getNodeName())) {
|
|
|
|
repositoryName = currentNode.getFirstChild().getNodeValue();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (repositoryName == null) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
return repositoryName;
|
|
|
|
}
|
2011-11-16 02:03:49 +01:00
|
|
|
|
2011-06-13 23:44:03 +02:00
|
|
|
|
2005-12-29 18:45:50 +01:00
|
|
|
}
|