mirror of
https://github.com/yacy/yacy_search_server.git
synced 2025-09-15 17:06:13 -04:00
This is a complete re-design of the serverObjects data structure which holds all data that is submitted during http post requests to YaCy. Before the change, post attributes had been stored to Strings which cannot be larger than 2GB. Furthermore, byte[] uploads had been encoded to b64 Strings to fit into this data structure. Those strings are now replaced by a new data structure, ChunkedBytes which is an object that can hold more than 2GB data using a list of byte[] objects. All required streaming functions are implemented and streaming from http post upload into this data structure works. The b64 encoding has been removed. The ZIM and WARC reader make use of the new data structure.
131 lines
5.3 KiB
Java
131 lines
5.3 KiB
Java
// rct_p.java
|
|
// -----------------------
|
|
// (C) 2007 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
|
|
// first published 28.11.2007 on http://yacy.net
|
|
//
|
|
// This is a part of YaCy, a peer-to-peer based web search engine
|
|
//
|
|
// $LastChangedDate$
|
|
// $LastChangedRevision$
|
|
// $LastChangedBy$
|
|
//
|
|
// LICENSE
|
|
//
|
|
// This program is free software; you can redistribute it and/or modify
|
|
// it under the terms of the GNU General Public License as published by
|
|
// the Free Software Foundation; either version 2 of the License, or
|
|
// (at your option) any later version.
|
|
//
|
|
// This program is distributed in the hope that it will be useful,
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
// GNU General Public License for more details.
|
|
//
|
|
// You should have received a copy of the GNU General Public License
|
|
// along with this program; if not, write to the Free Software
|
|
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
package net.yacy.htroot;
|
|
|
|
import java.net.MalformedURLException;
|
|
import java.util.Date;
|
|
import java.util.Iterator;
|
|
|
|
import net.yacy.cora.document.feed.Hit;
|
|
import net.yacy.cora.document.feed.RSSFeed;
|
|
import net.yacy.cora.document.id.DigestURL;
|
|
import net.yacy.cora.protocol.RequestHeader;
|
|
import net.yacy.crawler.retrieval.Request;
|
|
import net.yacy.peers.DHTSelection;
|
|
import net.yacy.peers.Protocol;
|
|
import net.yacy.peers.Seed;
|
|
import net.yacy.search.Switchboard;
|
|
import net.yacy.search.SwitchboardConstants;
|
|
import net.yacy.server.serverObjects;
|
|
import net.yacy.server.serverSwitch;
|
|
|
|
public class rct_p {
|
|
|
|
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
|
|
// return variable that accumulates replacements
|
|
final Switchboard sb = (Switchboard) env;
|
|
final serverObjects prop = new serverObjects();
|
|
|
|
if (post != null) {
|
|
if (post.containsKey("retrieve")) {
|
|
final String peerhash = post.get("peer", "");
|
|
final Seed seed = (peerhash.length() == 0) ? null : sb.peers.getConnected(peerhash);
|
|
final boolean preferHttps = sb.getConfigBool(SwitchboardConstants.NETWORK_PROTOCOL_HTTPS_PREFERRED,
|
|
SwitchboardConstants.NETWORK_PROTOCOL_HTTPS_PREFERRED_DEFAULT);
|
|
final RSSFeed feed = (seed == null) ? null : Protocol.queryRemoteCrawlURLs(sb.peers, seed, 20, 60000, preferHttps);
|
|
if (feed != null) {
|
|
for (final Hit item: feed) {
|
|
//System.out.println("URL=" + item.getLink() + ", desc=" + item.getDescription() + ", pubDate=" + item.getPubDate());
|
|
|
|
// put url on remote crawl stack
|
|
DigestURL url;
|
|
try {
|
|
url = new DigestURL(item.getLink());
|
|
} catch (final MalformedURLException e) {
|
|
url = null;
|
|
}
|
|
Date loaddate;
|
|
loaddate = item.getPubDate();
|
|
final DigestURL referrer = null; // referrer needed!
|
|
final String urlRejectReason = sb.crawlStacker.urlInAcceptedDomain(url);
|
|
if (urlRejectReason == null) {
|
|
// stack url
|
|
if (sb.getLog().isFinest()) sb.getLog().finest("crawlOrder: stack: url='" + url + "'");
|
|
sb.crawlStacker.enqueueEntry(new Request(
|
|
peerhash.getBytes(),
|
|
url,
|
|
(referrer == null) ? null : referrer.hash(),
|
|
"REMOTE-CRAWLING",
|
|
loaddate,
|
|
sb.crawler.defaultRemoteProfile.handle(),
|
|
0,
|
|
sb.crawler.defaultRemoteProfile.timezoneOffset()));
|
|
} else {
|
|
env.getLog().warn("crawlOrder: Rejected URL '" + urlToString(url) + "': " + urlRejectReason);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
listHosts(sb, prop);
|
|
|
|
// return rewrite properties
|
|
return prop;
|
|
}
|
|
|
|
/**
|
|
* @param url
|
|
* @return
|
|
*/
|
|
private static String urlToString(final DigestURL url) {
|
|
return (url == null ? "null" : url.toNormalform(true));
|
|
}
|
|
|
|
private static void listHosts(final Switchboard sb, final serverObjects prop) {
|
|
// list known hosts
|
|
Seed seed;
|
|
int hc = 0;
|
|
if (sb.peers != null && sb.peers.sizeConnected() > 0) {
|
|
final Iterator<Seed> e = DHTSelection.getProvidesRemoteCrawlURLs(sb.peers);
|
|
while (e.hasNext()) {
|
|
seed = e.next();
|
|
if (seed != null) {
|
|
prop.put("hosts_" + hc + "_hosthash", seed.hash);
|
|
prop.putHTML("hosts_" + hc + "_hostname", seed.hash + " " + seed.get(Seed.NAME, "nameless") + " (" + seed.getLong(Seed.RCOUNT, 0) + ")");
|
|
hc++;
|
|
}
|
|
}
|
|
prop.put("hosts", hc);
|
|
} else {
|
|
prop.put("hosts", "0");
|
|
}
|
|
}
|
|
|
|
}
|