mirror of
https://github.com/yacy/yacy_search_server.git
synced 2024-09-19 00:01:41 +02:00
Customized name for Threads still using the default "Thread-n" pattern.
This makes threads monitoring easier to read.
This commit is contained in:
parent
c0379c3cd3
commit
f0639d810c
|
@ -324,7 +324,7 @@ public class FederateSearchManager {
|
|||
|
||||
// job to iterate through Solr index to find links to opensearchdescriptions
|
||||
// started as background job as connect timeouts may cause it run a long time
|
||||
final Thread job = new Thread() {
|
||||
final Thread job = new Thread(FederateSearchManager.class.getSimpleName() + ".discoverFromSolrIndex") {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
|
|
|
@ -56,7 +56,7 @@ public class Peers extends TreeMap<byte[], Peer> implements Serializable {
|
|||
public void refresh() {
|
||||
if (System.currentTimeMillis() - this.lastBootstrap < 60000) return;
|
||||
lastBootstrap = System.currentTimeMillis();
|
||||
new Thread() {
|
||||
new Thread("Peers.refresh") {
|
||||
@Override
|
||||
public void run() {
|
||||
String[] peers = bootstrapList(select(false, false));
|
||||
|
@ -81,7 +81,7 @@ public class Peers extends TreeMap<byte[], Peer> implements Serializable {
|
|||
List<Thread> t = new ArrayList<Thread>();
|
||||
for (Integer pn: s) {
|
||||
final String bp = peers[pn.intValue()];
|
||||
Thread t0 = new Thread() {
|
||||
Thread t0 = new Thread("Peers.bootstrap-" + bp) {
|
||||
@Override
|
||||
public void run() {
|
||||
Peers ps;
|
||||
|
|
|
@ -91,7 +91,7 @@ public class HostBalancer implements Balancer {
|
|||
* return immediately (as large unfinished crawls may take longer to load)
|
||||
*/
|
||||
private void init() {
|
||||
Thread t = new Thread() {
|
||||
Thread t = new Thread("HostBalancer.init") {
|
||||
@Override
|
||||
public void run() {
|
||||
final String[] hostlist = hostsPath.list();
|
||||
|
|
|
@ -211,6 +211,7 @@ public class Transactions {
|
|||
Thread t = new Thread(){
|
||||
@Override
|
||||
public void run() {
|
||||
this.setName("Transactions.store");
|
||||
executorRunning.incrementAndGet();
|
||||
try {
|
||||
Html2Image.writeWkhtmltopdf(urls, proxy, ClientIdentification.browserAgent.userAgent, acceptLanguage, pdfPath);
|
||||
|
|
|
@ -63,6 +63,7 @@ public class RSSLoader extends Thread {
|
|||
private final ClientIdentification.Agent agent;
|
||||
|
||||
public RSSLoader(final Switchboard sb, final DigestURL urlf, final Map<String, Pattern> collections, final ClientIdentification.Agent agent) {
|
||||
super("RSSLoader(" + urlf != null ? urlf.toNormalform(true) : "" + ")");
|
||||
this.sb = sb;
|
||||
this.urlf = urlf;
|
||||
this.collections = collections;
|
||||
|
|
|
@ -47,6 +47,7 @@ public class SitemapImporter extends Thread {
|
|||
private final Switchboard sb;
|
||||
|
||||
public SitemapImporter(final Switchboard sb, final DigestURL sitemapURL, final CrawlProfile profileEntry) {
|
||||
super("SitemapImporter(" + sitemapURL != null ? sitemapURL.toNormalform(false) : "" + ")");
|
||||
assert sitemapURL != null;
|
||||
this.sb = sb;
|
||||
this.siteMapURL = sitemapURL;
|
||||
|
|
|
@ -384,7 +384,7 @@ public class RobotsTxt {
|
|||
final BlockingQueue<CheckEntry> out = new LinkedBlockingQueue<CheckEntry>();
|
||||
final Thread[] threads = new Thread[concurrency];
|
||||
for (int i = 0; i < concurrency; i++) {
|
||||
threads[i] = new Thread() {
|
||||
threads[i] = new Thread("RobotsTxt.massCrawlCheck-" + i) {
|
||||
@Override
|
||||
public void run() {
|
||||
DigestURL u;
|
||||
|
|
|
@ -399,6 +399,11 @@ public class DidYouMean {
|
|||
* <b>Note:</b> the loop runs (alphabet.length * len) tests.
|
||||
*/
|
||||
public class ChangingOneLetter extends Thread {
|
||||
|
||||
public ChangingOneLetter() {
|
||||
super("ChangingOneLetter");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
char m;
|
||||
|
@ -421,6 +426,10 @@ public class DidYouMean {
|
|||
* <b>Note:</b> the loop runs (len) tests.
|
||||
*/
|
||||
private class DeletingOneLetter extends Thread {
|
||||
public DeletingOneLetter() {
|
||||
super("DeletingOneLetter");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
for (int i = 0; i < DidYouMean.this.wordLen; i++) {
|
||||
|
@ -437,6 +446,10 @@ public class DidYouMean {
|
|||
* <b>Note:</b> the loop runs (alphabet.length * len) tests.
|
||||
*/
|
||||
private class AddingOneLetter extends Thread {
|
||||
public AddingOneLetter() {
|
||||
super("AddingOneLetter");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
for (int i = 0; i <= DidYouMean.this.wordLen; i++) {
|
||||
|
@ -455,6 +468,10 @@ public class DidYouMean {
|
|||
* <b>Note:</b> the loop runs (len-1) tests.
|
||||
*/
|
||||
private class ReversingTwoConsecutiveLetters extends Thread {
|
||||
public ReversingTwoConsecutiveLetters() {
|
||||
super("ReversingTwoConsecutiveLetters");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
for (int i = 0; i < DidYouMean.this.wordLen - 1; i++) {
|
||||
|
|
|
@ -95,6 +95,7 @@ public class MediawikiImporter extends Thread implements Importer {
|
|||
|
||||
|
||||
public MediawikiImporter(final File sourcefile, final File targetdir) {
|
||||
super("MediawikiImporter(" + sourcefile != null ? sourcefile.getAbsolutePath() : "null sourcefile" +")");
|
||||
this.sourcefile = sourcefile;
|
||||
this.docsize = sourcefile.length();
|
||||
this.approxdocs = (int) (this.docsize * docspermbinxmlbz2 / 1024L / 1024L);
|
||||
|
@ -296,7 +297,9 @@ public class MediawikiImporter extends Thread implements Importer {
|
|||
public static class indexMaker extends Thread {
|
||||
|
||||
File mediawikixml;
|
||||
|
||||
public indexMaker(final File mediawikixml) {
|
||||
super("MediawikiImporter.indexMaker " + mediawikixml != null ? mediawikixml.getName() : "");
|
||||
this.mediawikixml = mediawikixml;
|
||||
}
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ public class OAIPMHImporter extends Thread implements Importer, Comparable<OAIPM
|
|||
private final ClientIdentification.Agent agent;
|
||||
|
||||
public OAIPMHImporter(final LoaderDispatcher loader, final ClientIdentification.Agent agent, final DigestURL source) {
|
||||
super(OAIPMHImporter.class.getSimpleName());
|
||||
this.agent = agent;
|
||||
this.serialNumber = importerCounter--;
|
||||
this.loader = loader;
|
||||
|
|
|
@ -151,6 +151,7 @@ public class sitemapParser extends AbstractParser implements Parser {
|
|||
private final BlockingQueue<URLEntry> queue;
|
||||
private final ClientIdentification.Agent agent;
|
||||
public SitemapReader(final InputStream source, final ClientIdentification.Agent agent) {
|
||||
super(SitemapReader.class.getSimpleName());
|
||||
this.source = source;
|
||||
this.queue = new ArrayBlockingQueue<URLEntry>(10000);
|
||||
this.agent = agent;
|
||||
|
|
|
@ -125,6 +125,11 @@ public final class Tray {
|
|||
}
|
||||
|
||||
private class TrayAnimation extends Thread {
|
||||
|
||||
public TrayAnimation() {
|
||||
super(TrayAnimation.class.getSimpleName());
|
||||
}
|
||||
|
||||
int ic = 0;
|
||||
@Override
|
||||
public void run() {
|
||||
|
|
|
@ -81,6 +81,7 @@ public class Switchboard {
|
|||
public static class InfoUpdater extends Thread {
|
||||
long steptime;
|
||||
public InfoUpdater(long steptime) {
|
||||
super("Switchboard.InfoUpdater");
|
||||
this.steptime = steptime;
|
||||
}
|
||||
@Override
|
||||
|
@ -105,7 +106,7 @@ public class Switchboard {
|
|||
private final Semaphore shutdownSemaphore;
|
||||
|
||||
public shutdownHookThread(final Thread mainThread, Semaphore semaphore) {
|
||||
super();
|
||||
super("Switchboard.shutdownHookThread");
|
||||
this.mainThread = mainThread;
|
||||
this.shutdownSemaphore = semaphore;
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ abstract public class AbstractRemoteHandler extends ConnectHandler implements Ha
|
|||
|
||||
// Add some other known local host names
|
||||
// The remote DNS sometimes takes very long when it is waiting for timeout, therefore we do this concurrently
|
||||
new Thread() {
|
||||
new Thread(AbstractRemoteHandler.class.getSimpleName() + ".doStart") {
|
||||
@Override
|
||||
public void run() {
|
||||
for (InetAddress localInetAddress : Domains.myPublicIPv4()) {
|
||||
|
|
|
@ -1223,7 +1223,7 @@ public class YaCyDefaultServlet extends HttpServlet {
|
|||
Thread[] p = new Thread[t];
|
||||
for (int j = 0; j < t; j++) {
|
||||
files.put(POISON);
|
||||
p[j] = new Thread() {
|
||||
p[j] = new Thread("YaCyDefaultServlet.parseMultipart-" + j) {
|
||||
@Override
|
||||
public void run() {
|
||||
Map.Entry<String, byte[]> job;
|
||||
|
|
|
@ -503,6 +503,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
|
|||
private long maxtime;
|
||||
private final boolean local;
|
||||
private TransformDistributor(final ReferenceContainer<WordReference> container, final BlockingQueue<WordReferenceVars> out, final long maxtime, final boolean local) {
|
||||
super("WordReferenceVars.TransformDistributor");
|
||||
this.container = container;
|
||||
this.out = out;
|
||||
this.maxtime = maxtime;
|
||||
|
@ -556,6 +557,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
|
|||
private final boolean local;
|
||||
|
||||
private TransformWorker(final BlockingQueue<WordReferenceVars> out, final long maxtime, final boolean local) {
|
||||
super("WordReferenceVars.TransformWorker");
|
||||
this.in = new LinkedBlockingQueue<Row.Entry>();
|
||||
this.out = out;
|
||||
this.maxtime = maxtime;
|
||||
|
|
|
@ -43,6 +43,7 @@ public class ConsoleInterface extends Thread {
|
|||
|
||||
|
||||
private ConsoleInterface(final InputStream stream, final ConcurrentLog log) {
|
||||
super("ConsoleInterface");
|
||||
this.log = log;
|
||||
this.stream = stream;
|
||||
// block reading {@see getOutput()}
|
||||
|
|
|
@ -965,7 +965,7 @@ public final class FileUtils {
|
|||
* @param concurrent if this shall run concurrently
|
||||
*/
|
||||
public static void checkCharset(final File file, final String givenCharset, final boolean concurrent) {
|
||||
Thread t = new Thread() {
|
||||
Thread t = new Thread("FileUtils.checkCharset") {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
|
|
|
@ -821,6 +821,7 @@ public final class Protocol {
|
|||
* @param storeDocs solr documents collection to put to segment
|
||||
*/
|
||||
public WriteMetadataNodeToLocalIndexThread(Segment segment, Collection<URIMetadataNode> storeDocs) {
|
||||
super("WriteMetadataNodeToLocalIndexThread");
|
||||
this.segment = segment;
|
||||
this.storeDocs = storeDocs;
|
||||
}
|
||||
|
@ -1282,6 +1283,7 @@ public final class Protocol {
|
|||
* @param docs solr documents collection to put to segment
|
||||
*/
|
||||
public WriteToLocalIndexThread(Segment segment, Collection<SolrInputDocument> docs) {
|
||||
super("WriteToLocalIndexThread");
|
||||
this.segment = segment;
|
||||
this.docs = docs;
|
||||
}
|
||||
|
|
|
@ -1098,7 +1098,7 @@ public final class SeedDB implements AlternativeDomainNames {
|
|||
public void loadSeedListConcurrently(final String seedListFileURL, final AtomicInteger scc, final int timeout, final boolean checkAge) {
|
||||
// uses the superseed to initialize the database with known seeds
|
||||
|
||||
Thread seedLoader = new Thread() {
|
||||
Thread seedLoader = new Thread("SeedDB.loadSeedListConcurrently") {
|
||||
@Override
|
||||
public void run() {
|
||||
// load the seed list
|
||||
|
|
|
@ -82,6 +82,7 @@ public class OSMTile {
|
|||
RasterPlotter m;
|
||||
int xt, yt, xc, yc, z;
|
||||
public Place(final RasterPlotter m, final int xt, final int yt, final int xc, final int yc, final int z) {
|
||||
super("OSMTile.Place(" + xt + "," + yt+ ")");
|
||||
this.m = m; this.xt = xt; this.yt = yt; this.xc = xc; this.yc = yc; this.z = z;
|
||||
}
|
||||
@Override
|
||||
|
|
|
@ -610,6 +610,7 @@ public final class LoaderDispatcher {
|
|||
private final ClientIdentification.Agent agent;
|
||||
|
||||
public Loader(final DigestURL url, final File cache, final int maxFileSize, final CacheStrategy cacheStrategy, BlacklistType blacklistType, final ClientIdentification.Agent agent) {
|
||||
super("LoaderDispatcher.Loader");
|
||||
this.url = url;
|
||||
this.cache = cache;
|
||||
this.maxFileSize = maxFileSize;
|
||||
|
|
|
@ -28,6 +28,7 @@ public class Shutdown extends Thread {
|
|||
private final String reason;
|
||||
|
||||
public Shutdown(final Switchboard sb, final long delay, final String reason) {
|
||||
super(Shutdown.class.getSimpleName());
|
||||
this.sb = sb;
|
||||
this.delay = delay;
|
||||
this.reason = reason;
|
||||
|
|
|
@ -2045,7 +2045,7 @@ public final class Switchboard extends serverSwitch {
|
|||
assert this.crawlStacker != null;
|
||||
Thread[] indexer = new Thread[concurrency];
|
||||
for (int t = 0; t < concurrency; t++) {
|
||||
indexer[t] = new Thread() {
|
||||
indexer[t] = new Thread("Switchboard.processSurrogate-" + t) {
|
||||
@Override
|
||||
public void run() {
|
||||
VocabularyScraper scraper = new VocabularyScraper();
|
||||
|
@ -3145,6 +3145,7 @@ public final class Switchboard extends serverSwitch {
|
|||
Thread t = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
this.setName("Switchboard.stackURLs");
|
||||
String failreason;
|
||||
if ((failreason = Switchboard.this.stackUrl(profile, turl)) == null) successurls.add(turl); else failurls.put(turl, failreason);
|
||||
}
|
||||
|
@ -3745,7 +3746,7 @@ public final class Switchboard extends serverSwitch {
|
|||
* @param resulturl the result doc which outbound links to add to crawler
|
||||
*/
|
||||
public final void heuristicSearchResults(final URIMetadataNode resulturl) {
|
||||
new Thread() {
|
||||
new Thread("Switchboard.heuristicSearchResults") {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
|
|
|
@ -51,6 +51,7 @@ public class ErrorCacheFiller extends Thread {
|
|||
* @param cache error cache to fill. Must not be null.
|
||||
*/
|
||||
public ErrorCacheFiller(Switchboard sb, ErrorCache cache) {
|
||||
super(ErrorCacheFiller.class.getSimpleName());
|
||||
if(sb == null || cache == null) {
|
||||
throw new IllegalArgumentException("Unexpected null parameters");
|
||||
}
|
||||
|
|
|
@ -714,6 +714,7 @@ public final class Fulltext {
|
|||
private final boolean dom, text;
|
||||
|
||||
private Export(final File f, final String filter, final String query, final ExportFormat format, final boolean dom, final boolean text) {
|
||||
super("Fulltext.Export");
|
||||
// format: 0=text, 1=html, 2=rss/xml
|
||||
this.f = f;
|
||||
this.pattern = filter == null ? null : Pattern.compile(filter);
|
||||
|
|
|
@ -188,7 +188,7 @@ public class AccessTracker {
|
|||
while (!remoteSearches.isEmpty()) {
|
||||
addToDump(remoteSearches.removeFirst(), 0);
|
||||
}
|
||||
Thread t = new Thread() {
|
||||
Thread t = new Thread("AccessTracker.dumpLog") {
|
||||
@Override
|
||||
public void run() {
|
||||
ArrayList<String> logCopy = new ArrayList<String>();
|
||||
|
|
|
@ -462,7 +462,7 @@ public final class SearchEvent {
|
|||
final Thread waitForThread;
|
||||
|
||||
public RWIProcess(final Thread waitForThread) {
|
||||
super();
|
||||
super("SearchEvent.RWIProcess(" + waitForThread != null ? waitForThread.getName() : "" + ")");
|
||||
this.waitForThread = waitForThread;
|
||||
}
|
||||
|
||||
|
@ -1389,7 +1389,7 @@ public final class SearchEvent {
|
|||
success = true;
|
||||
} else {
|
||||
|
||||
new Thread() {
|
||||
new Thread("SearchEvent.drainStacksToResult.getSnippet") {
|
||||
@Override
|
||||
public void run() {
|
||||
SearchEvent.this.oneFeederStarted();
|
||||
|
|
|
@ -27,6 +27,7 @@ public class SecondarySearchSuperviser extends Thread {
|
|||
private final SearchEvent searchEvent;
|
||||
|
||||
protected SecondarySearchSuperviser(SearchEvent searchEvent) {
|
||||
super("SecondarySearchSuperviser");
|
||||
this.abstractsCache = Collections.synchronizedSortedMap(new TreeMap<String, SortedMap<String, Set<String>>>());
|
||||
this.checkedPeers = Collections.synchronizedSortedSet(new TreeSet<String>());
|
||||
this.trigger = new Semaphore(0);
|
||||
|
@ -50,7 +51,7 @@ public class SecondarySearchSuperviser extends Thread {
|
|||
new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
Thread.currentThread().setName("SearchEvent.addAbstract:" + wordhash);
|
||||
Thread.currentThread().setName("SecondarySearch.addAbstract:" + wordhash);
|
||||
for ( final Map.Entry<String, Set<String>> oneref : singleAbstract.entrySet() ) {
|
||||
final String urlhash = oneref.getKey();
|
||||
final Set<String> peerlistNew = oneref.getValue();
|
||||
|
|
|
@ -89,6 +89,7 @@ public class BlockRank {
|
|||
Seed seed;
|
||||
|
||||
public IndexRetrieval(final ReferenceContainerCache<HostReference> index, final Seed seed) {
|
||||
super("BlockRank.IndexRetrieval");
|
||||
this.index = index;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
|
|
@ -87,6 +87,7 @@ public class ReferenceOrder {
|
|||
private final boolean local;
|
||||
|
||||
public NormalizeDistributor(final ReferenceContainer<WordReference> container, final LinkedBlockingQueue<WordReferenceVars> out, final int threads, final long maxtime, final boolean local) {
|
||||
super("ReferenceOrder.NormalizeDistributor");
|
||||
this.container = container;
|
||||
this.out = out;
|
||||
this.threads = threads;
|
||||
|
@ -144,6 +145,7 @@ public class ReferenceOrder {
|
|||
private final long maxtime;
|
||||
|
||||
public NormalizeWorker(final BlockingQueue<WordReferenceVars> out, final Semaphore termination, long maxtime) {
|
||||
super("ReferenceOrder.NormalizeWorker");
|
||||
this.out = out;
|
||||
this.termination = termination;
|
||||
this.decodedEntries = new LinkedBlockingQueue<WordReferenceVars>();
|
||||
|
|
|
@ -109,6 +109,7 @@ public class loaderThreads {
|
|||
final ClientIdentification.Agent agent;
|
||||
|
||||
public loaderThread(final DigestURL url, final loaderProcess process, final ClientIdentification.Agent agent) {
|
||||
super("loaderThreads.loaderThread");
|
||||
this.url = url;
|
||||
this.process = process;
|
||||
this.error = null;
|
||||
|
|
|
@ -421,7 +421,7 @@ public final class yacy {
|
|||
final String iframesource = switchBoard.getConfig("donation.iframesource", "");
|
||||
final String iframetarget = switchBoard.getConfig("donation.iframetarget", "");
|
||||
final File iframefile = new File(htDocsDirectory, iframetarget);
|
||||
if (!iframefile.exists()) new Thread() {
|
||||
if (!iframefile.exists()) new Thread("yacy.importDonationIFrame") {
|
||||
@Override
|
||||
public void run() {
|
||||
final ClientIdentification.Agent agent = ClientIdentification.getAgent(ClientIdentification.yacyInternetCrawlerAgentName);
|
||||
|
@ -788,7 +788,7 @@ class shutdownHookThread extends Thread {
|
|||
private final Thread mainThread;
|
||||
|
||||
public shutdownHookThread(final Thread mainThread, final Switchboard sb) {
|
||||
super();
|
||||
super("yacy.shutdownHookThread");
|
||||
this.sb = sb;
|
||||
this.mainThread = mainThread;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue
Block a user