reduce creation of empty legacy RequestHeader() in situation where null

is acceptable (less for garbage collection).
This commit is contained in:
reger 2016-12-18 02:38:43 +01:00
parent 87f6631a2a
commit c50e23c495
8 changed files with 29 additions and 20 deletions

View File

@ -79,7 +79,6 @@ public class push_p {
if ((data == null || data.length == 0) && data64.length() > 0) data = UTF8.getBytes(data64); // for test cases
// create response header
final RequestHeader requestHeader = new RequestHeader();
final ResponseHeader responseHeader = new ResponseHeader(200);
responseHeader.put(HeaderFramework.LAST_MODIFIED, lastModified);
responseHeader.put(HeaderFramework.CONTENT_TYPE, contentType);
@ -107,7 +106,7 @@ public class push_p {
profile.timezoneOffset());
Response response = new Response(
request,
requestHeader,
null,
responseHeader,
profile,
false, // from cache?

View File

@ -117,10 +117,13 @@ public class FTPLoader {
if (file.isEmpty()) {
// directory -> get list of files
final RequestHeader requestHeader = new RequestHeader();
RequestHeader requestHeader = null;
if (request.referrerhash() != null) {
final DigestURL u = this.sb.getURL(request.referrerhash());
if (u != null) requestHeader.put(RequestHeader.REFERER, u.toNormalform(true));
if (u != null) {
requestHeader = new RequestHeader();
requestHeader.put(RequestHeader.REFERER, u.toNormalform(true));
}
}
final StringBuilder dirList = ftpClient.dirhtml(path);
@ -223,10 +226,13 @@ public class FTPLoader {
final Date fileDate = ftpClient.entryDate(path);
// create response header
final RequestHeader requestHeader = new RequestHeader();
RequestHeader requestHeader = null;
if (request.referrerhash() != null) {
final DigestURL refurl = this.sb.getURL(request.referrerhash());
if (refurl != null) requestHeader.put(RequestHeader.REFERER, refurl.toNormalform(true));
if (refurl != null) {
requestHeader = new RequestHeader();
requestHeader.put(RequestHeader.REFERER, refurl.toNormalform(true));
}
}
final ResponseHeader responseHeader = new ResponseHeader(200);
responseHeader.put(HeaderFramework.LAST_MODIFIED, HeaderFramework.formatRFC1123(fileDate));

View File

@ -62,10 +62,13 @@ public class FileLoader {
DigestURL url = request.url();
if (!url.getProtocol().equals("file")) throw new IOException("wrong protocol for FileLoader: " + url.getProtocol());
RequestHeader requestHeader = new RequestHeader();
RequestHeader requestHeader = null;
if (request.referrerhash() != null) {
DigestURL ur = this.sb.getURL(request.referrerhash());
if (ur != null) requestHeader.put(RequestHeader.REFERER, ur.toNormalform(true));
if (ur != null) {
requestHeader = new RequestHeader();
requestHeader.put(RequestHeader.REFERER, ur.toNormalform(true));
}
}
// process directories: transform them to html with meta robots=noindex (using the ftpc lib)

View File

@ -284,13 +284,13 @@ public final class HTTPLoader {
throws IOException {
final RequestHeader requestHeader = new RequestHeader();
requestHeader.put(HeaderFramework.USER_AGENT, agent.userAgent);
DigestURL refererURL = null;
if (request.referrerhash() != null) {
refererURL = this.sb.getURL(request.referrerhash());
}
if (refererURL != null) {
requestHeader.put(RequestHeader.REFERER, refererURL.toNormalform(true));
DigestURL refererURL = this.sb.getURL(request.referrerhash());
if (refererURL != null) {
requestHeader.put(RequestHeader.REFERER, refererURL.toNormalform(true));
}
}
requestHeader.put(HeaderFramework.ACCEPT, this.sb.getConfig("crawler.http.accept", DEFAULT_ACCEPT));
requestHeader.put(HeaderFramework.ACCEPT_LANGUAGE,
this.sb.getConfig("crawler.http.acceptLanguage", DEFAULT_LANGUAGE));

View File

@ -212,7 +212,7 @@ public class Response {
public Response(final Request request, final CrawlProfile profile) {
this.request = request;
// request and response headers may be zero in case that we process surrogates
this.requestHeader = new RequestHeader();
this.requestHeader = null;
this.responseHeader = new ResponseHeader(200);
this.responseHeader.put(HeaderFramework.CONTENT_TYPE, Classification.ext2mime(MultiProtocolURL.getFileExtension(request.url().getFileName()), "text/plain")); // tell parser how to handle the content
this.profile = profile;

View File

@ -73,10 +73,13 @@ public class SMBLoader {
DigestURL url = request.url();
if (!url.getProtocol().equals("smb")) throw new IOException("wrong loader for SMBLoader: " + url.getProtocol());
RequestHeader requestHeader = new RequestHeader();
RequestHeader requestHeader = null;
if (request.referrerhash() != null) {
DigestURL ur = this.sb.getURL(request.referrerhash());
if (ur != null) requestHeader.put(RequestHeader.REFERER, ur.toNormalform(true));
if (ur != null) {
requestHeader = new RequestHeader();
requestHeader.put(RequestHeader.REFERER, ur.toNormalform(true));
}
}
// process directories: transform them to html with meta robots=noindex (using the ftpc lib)

View File

@ -115,9 +115,8 @@ public class sitemapParser extends AbstractParser implements Parser {
public static SitemapReader parse(final DigestURL sitemapURL, final ClientIdentification.Agent agent) throws IOException {
// download document
ConcurrentLog.info("SitemapReader", "loading sitemap from " + sitemapURL.toNormalform(true));
final RequestHeader requestHeader = new RequestHeader();
final HTTPClient client = new HTTPClient(agent);
client.setHeader(requestHeader.entrySet());
// client.setHeader(requestHeader.entrySet());
try {
client.GET(sitemapURL.toNormalform(false), false);
if (client.getStatusCode() != 200) {

View File

@ -566,9 +566,8 @@ public final class yacy {
if (encodedPassword == null) encodedPassword = ""; // not defined
// send 'wget' to web interface
final RequestHeader requestHeader = new RequestHeader();
final HTTPClient con = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
con.setHeader(requestHeader.entrySet());
// con.setHeader(requestHeader.entrySet());
try {
con.GETbytes("http://localhost:"+ port +"/" + path, config.getProperty(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME,"admin"), encodedPassword, false);
if (con.getStatusCode() > 199 && con.getStatusCode() < 300) {