mirror of
https://github.com/yacy/yacy_search_server.git
synced 2025-07-22 09:14:38 -04:00
use configured admin-username for api calls
- the admin user name can be configured, in apiExec calls the default "admin" username is used. TODO: the bin/apicall.sh script should likely take that into account.
This commit is contained in:
htroot
ConfigAppearance_p.javaConfigLanguage_p.javaCrawlStartScanner_p.javaTable_API_p.javasharedBlacklist_p.java
source/net/yacy
@ -102,7 +102,7 @@ public class ConfigAppearance_p {
|
||||
final Iterator<String> it;
|
||||
try {
|
||||
final DigestURL u = new DigestURL(url);
|
||||
it = FileUtils.strings(u.get(ClientIdentification.yacyInternetCrawlerAgent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
it = FileUtils.strings(u.get(ClientIdentification.yacyInternetCrawlerAgent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
} catch (final IOException e) {
|
||||
prop.put("status", "1");// unable to get URL
|
||||
prop.put("status_url", url);
|
||||
|
@ -102,7 +102,7 @@ public class ConfigLanguage_p {
|
||||
Iterator<String> it;
|
||||
try {
|
||||
final DigestURL u = new DigestURL(url);
|
||||
it = FileUtils.strings(u.get(ClientIdentification.yacyInternetCrawlerAgent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
it = FileUtils.strings(u.get(ClientIdentification.yacyInternetCrawlerAgent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
} catch(final IOException e) {
|
||||
prop.put("status", "1");//unable to get url
|
||||
prop.put("status_url", url);
|
||||
|
@ -217,6 +217,7 @@ public class CrawlStartScanner_p
|
||||
(int) sb.getConfigLong("port", 8090),
|
||||
path,
|
||||
pk,
|
||||
sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"),
|
||||
sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
}
|
||||
}
|
||||
@ -263,6 +264,7 @@ public class CrawlStartScanner_p
|
||||
(int) sb.getConfigLong("port", 8090),
|
||||
path,
|
||||
u.hash(),
|
||||
sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"),
|
||||
sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
}
|
||||
} catch (final MalformedURLException e ) {
|
||||
|
@ -206,7 +206,7 @@ public class Table_API_p {
|
||||
}
|
||||
|
||||
// now call the api URLs and store the result status
|
||||
final Map<String, Integer> l = sb.tables.execAPICalls(Domains.LOCALHOST, (int) sb.getConfigLong("port", 8090), pks, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
final Map<String, Integer> l = sb.tables.execAPICalls(Domains.LOCALHOST, (int) sb.getConfigLong("port", 8090), pks, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
|
||||
// construct result table
|
||||
prop.put("showexec", l.isEmpty() ? 0 : 1);
|
||||
|
@ -140,7 +140,7 @@ public class sharedBlacklist_p {
|
||||
// get List
|
||||
final DigestURL u = new DigestURL(downloadURLOld);
|
||||
|
||||
otherBlacklist = FileUtils.strings(u.get(agent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
otherBlacklist = FileUtils.strings(u.get(agent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
} catch (final Exception e) {
|
||||
prop.put("status", STATUS_PEER_UNKNOWN);
|
||||
prop.putHTML("status_name", hash);
|
||||
@ -157,7 +157,7 @@ public class sharedBlacklist_p {
|
||||
|
||||
try {
|
||||
final DigestURL u = new DigestURL(downloadURL);
|
||||
otherBlacklist = FileUtils.strings(u.get(agent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
otherBlacklist = FileUtils.strings(u.get(agent, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, "")));
|
||||
} catch (final Exception e) {
|
||||
prop.put("status", STATUS_URL_PROBLEM);
|
||||
prop.putHTML("status_address",downloadURL);
|
||||
|
@ -79,7 +79,7 @@ public class SMWListSyncThread {
|
||||
+ "/limit%3D200000"
|
||||
+ "/format%3Dystat");
|
||||
|
||||
String reply = UTF8.String(new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent).GETbytes(urlCount.toString(), null));
|
||||
String reply = UTF8.String(new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent).GETbytes(urlCount.toString(), null, null));
|
||||
String overallcount = reply.split(",")[0];
|
||||
String lastsyncstring = reply.split(",")[1];
|
||||
this.currentmax = Integer.parseInt(overallcount);
|
||||
|
@ -2046,7 +2046,7 @@ public class MultiProtocolURL implements Serializable, Comparable<MultiProtocolU
|
||||
return null;
|
||||
}
|
||||
|
||||
public InputStream getInputStream(final ClientIdentification.Agent agent, final String pass) throws IOException {
|
||||
public InputStream getInputStream(final ClientIdentification.Agent agent, final String username, final String pass) throws IOException {
|
||||
if (isFile()) return new BufferedInputStream(new FileInputStream(getFSFile()));
|
||||
if (isSMB()) return new BufferedInputStream(new SmbFileInputStream(getSmbFile()));
|
||||
if (isFTP()) {
|
||||
@ -2059,13 +2059,13 @@ public class MultiProtocolURL implements Serializable, Comparable<MultiProtocolU
|
||||
if (isHTTP() || isHTTPS()) {
|
||||
final HTTPClient client = new HTTPClient(agent);
|
||||
client.setHost(getHost());
|
||||
return new ByteArrayInputStream(client.GETbytes(this, pass));
|
||||
return new ByteArrayInputStream(client.GETbytes(this, username, pass));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public byte[] get(final ClientIdentification.Agent agent, final String pass) throws IOException {
|
||||
public byte[] get(final ClientIdentification.Agent agent, final String username, final String pass) throws IOException {
|
||||
if (isFile()) return read(new FileInputStream(getFSFile()));
|
||||
if (isSMB()) return read(new SmbFileInputStream(getSmbFile()));
|
||||
if (isFTP()) {
|
||||
@ -2078,7 +2078,7 @@ public class MultiProtocolURL implements Serializable, Comparable<MultiProtocolU
|
||||
if (isHTTP() || isHTTPS()) {
|
||||
final HTTPClient client = new HTTPClient(agent);
|
||||
client.setHost(getHost());
|
||||
return client.GETbytes(this, pass);
|
||||
return client.GETbytes(this, username, pass);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
@ -50,7 +50,7 @@ public class Network {
|
||||
public static Peers getNetwork(final String address) throws IOException {
|
||||
Peers peers = new Peers();
|
||||
final HTTPClient httpclient = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
|
||||
final byte[] content = httpclient.GETbytes("http://" + address + "/Network.xml?page=1&maxCount=1000&ip=", null);
|
||||
final byte[] content = httpclient.GETbytes("http://" + address + "/Network.xml?page=1&maxCount=1000&ip=", null, null);
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(content);
|
||||
Document doc = null;
|
||||
try {
|
||||
|
@ -307,8 +307,8 @@ public class HTTPClient {
|
||||
* @return content bytes
|
||||
* @throws IOException
|
||||
*/
|
||||
public byte[] GETbytes(final String uri, final String pass) throws IOException {
|
||||
return GETbytes(uri, pass, Integer.MAX_VALUE);
|
||||
public byte[] GETbytes(final String uri, final String username, final String pass) throws IOException {
|
||||
return GETbytes(uri, username, pass, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -318,8 +318,8 @@ public class HTTPClient {
|
||||
* @return content bytes
|
||||
* @throws IOException
|
||||
*/
|
||||
public byte[] GETbytes(final MultiProtocolURL url, final String pass) throws IOException {
|
||||
return GETbytes(url, pass, Integer.MAX_VALUE);
|
||||
public byte[] GETbytes(final MultiProtocolURL url, final String username, final String pass) throws IOException {
|
||||
return GETbytes(url, username, pass, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -330,8 +330,8 @@ public class HTTPClient {
|
||||
* @return content bytes
|
||||
* @throws IOException
|
||||
*/
|
||||
public byte[] GETbytes(final String uri, final String pass, final int maxBytes) throws IOException {
|
||||
return GETbytes(new MultiProtocolURL(uri), pass, maxBytes);
|
||||
public byte[] GETbytes(final String uri, final String username, final String pass, final int maxBytes) throws IOException {
|
||||
return GETbytes(new MultiProtocolURL(uri), username, pass, maxBytes);
|
||||
}
|
||||
|
||||
|
||||
@ -343,7 +343,7 @@ public class HTTPClient {
|
||||
* @return content bytes
|
||||
* @throws IOException
|
||||
*/
|
||||
public byte[] GETbytes(final MultiProtocolURL url, final String pass, final int maxBytes) throws IOException {
|
||||
public byte[] GETbytes(final MultiProtocolURL url, final String username, final String pass, final int maxBytes) throws IOException {
|
||||
final boolean localhost = Domains.isLocalhost(url.getHost());
|
||||
final String urix = url.toNormalform(true);
|
||||
HttpGet httpGet = null;
|
||||
@ -357,7 +357,7 @@ public class HTTPClient {
|
||||
CredentialsProvider credsProvider = new BasicCredentialsProvider();
|
||||
credsProvider.setCredentials(
|
||||
AuthScope.ANY, // thats ok since we tested for localhost!
|
||||
new UsernamePasswordCredentials("admin", pass));
|
||||
new UsernamePasswordCredentials(username, pass));
|
||||
CloseableHttpClient httpclient = HttpClients.custom().setDefaultCredentialsProvider(credsProvider).build();
|
||||
byte[] content = null;
|
||||
try {
|
||||
@ -822,7 +822,7 @@ public class HTTPClient {
|
||||
url = "http://" + url;
|
||||
}
|
||||
try {
|
||||
System.out.println(UTF8.String(client.GETbytes(url, null)));
|
||||
System.out.println(UTF8.String(client.GETbytes(url, null, null)));
|
||||
} catch (final IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class FileLoader {
|
||||
}
|
||||
|
||||
// load the resource
|
||||
InputStream is = url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null);
|
||||
InputStream is = url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null, null);
|
||||
byte[] b = FileUtils.read(is);
|
||||
is.close();
|
||||
|
||||
|
@ -128,7 +128,7 @@ public final class HTTPLoader {
|
||||
client.setHeader(requestHeader.entrySet());
|
||||
|
||||
// send request
|
||||
final byte[] responseBody = client.GETbytes(url, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), maxFileSize);
|
||||
final byte[] responseBody = client.GETbytes(url, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), maxFileSize);
|
||||
final int statusCode = client.getHttpResponse().getStatusLine().getStatusCode();
|
||||
final ResponseHeader responseHeader = new ResponseHeader(statusCode, client.getHttpResponse().getAllHeaders());
|
||||
String requestURLString = request.url().toNormalform(true);
|
||||
@ -243,7 +243,7 @@ public final class HTTPLoader {
|
||||
final HTTPClient client = new HTTPClient(agent);
|
||||
client.setTimout(20000);
|
||||
client.setHeader(requestHeader.entrySet());
|
||||
final byte[] responseBody = client.GETbytes(request.url(), null);
|
||||
final byte[] responseBody = client.GETbytes(request.url(), null, null);
|
||||
final int code = client.getHttpResponse().getStatusLine().getStatusCode();
|
||||
final ResponseHeader header = new ResponseHeader(code, client.getHttpResponse().getAllHeaders());
|
||||
// FIXME: 30*-handling (bottom) is never reached
|
||||
|
@ -153,7 +153,7 @@ public class SMBLoader {
|
||||
}
|
||||
|
||||
// load the resource
|
||||
InputStream is = url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null);
|
||||
InputStream is = url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null, null);
|
||||
byte[] b = FileUtils.read(is);
|
||||
is.close();
|
||||
|
||||
|
@ -217,7 +217,7 @@ public class WorkTables extends Tables {
|
||||
* @param port the port on the host
|
||||
* @return a map of the called urls and the http status code of the api call or -1 if any other IOException occurred
|
||||
*/
|
||||
public Map<String, Integer> execAPICalls(String host, int port, Collection<String> pks, final String pass) {
|
||||
public Map<String, Integer> execAPICalls(String host, int port, Collection<String> pks, final String username, final String pass) {
|
||||
// now call the api URLs and store the result status
|
||||
final HTTPClient client = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
|
||||
client.setTimout(120000);
|
||||
@ -238,7 +238,7 @@ public class WorkTables extends Tables {
|
||||
url += "&" + WorkTables.TABLE_API_COL_APICALL_PK + "=" + UTF8.String(row.getPK());
|
||||
ConcurrentLog.info("WorkTables", "executing url: " + url);
|
||||
try {
|
||||
client.GETbytes(url, pass);
|
||||
client.GETbytes(url, username, pass);
|
||||
l.put(url, client.getStatusCode());
|
||||
} catch (final IOException e) {
|
||||
ConcurrentLog.logException(e);
|
||||
@ -248,14 +248,14 @@ public class WorkTables extends Tables {
|
||||
return l;
|
||||
}
|
||||
|
||||
public static int execAPICall(String host, int port, String path, byte[] pk, final String pass) {
|
||||
public static int execAPICall(String host, int port, String path, byte[] pk, final String username, final String pass) {
|
||||
// now call the api URLs and store the result status
|
||||
final HTTPClient client = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
|
||||
client.setTimout(120000);
|
||||
String url = "http://" + host + ":" + port + path;
|
||||
if (pk != null) url += "&" + WorkTables.TABLE_API_COL_APICALL_PK + "=" + UTF8.String(pk);
|
||||
try {
|
||||
client.GETbytes(url, pass);
|
||||
client.GETbytes(url, username, pass);
|
||||
return client.getStatusCode();
|
||||
} catch (final IOException e) {
|
||||
ConcurrentLog.logException(e);
|
||||
@ -271,10 +271,10 @@ public class WorkTables extends Tables {
|
||||
* @param realm authentification realm
|
||||
* @return the http status code of the api call or -1 if any other IOException occurred
|
||||
*/
|
||||
public int execAPICall(String pk, String host, int port, final String pass) {
|
||||
public int execAPICall(String pk, String host, int port, final String username, final String pass) {
|
||||
ArrayList<String> pks = new ArrayList<String>();
|
||||
pks.add(pk);
|
||||
Map<String, Integer> m = execAPICalls(host, port, pks, pass);
|
||||
Map<String, Integer> m = execAPICalls(host, port, pks, username, pass);
|
||||
if (m.isEmpty()) return -1;
|
||||
return m.values().iterator().next().intValue();
|
||||
}
|
||||
|
@ -110,8 +110,8 @@ public class YMarkCrawlStart extends HashMap<String,String>{
|
||||
}
|
||||
}
|
||||
|
||||
public int exec(final String host, final int port, final String pass) {
|
||||
return this.worktables.execAPICall(this.apicall_pk, host, port, pass);
|
||||
public int exec(final String host, final int port, final String username, final String pass) {
|
||||
return this.worktables.execAPICall(this.apicall_pk, host, port, username, pass);
|
||||
}
|
||||
|
||||
private void load() {
|
||||
|
@ -305,7 +305,7 @@ public class htmlParser extends AbstractParser implements Parser {
|
||||
AnchorURL url;
|
||||
try {
|
||||
url = new AnchorURL(args[0]);
|
||||
final byte[] content = url.get(ClientIdentification.yacyInternetCrawlerAgent, null);
|
||||
final byte[] content = url.get(ClientIdentification.yacyInternetCrawlerAgent, null, null);
|
||||
final Document[] document = new htmlParser().parse(url, "text/html", null, new ByteArrayInputStream(content));
|
||||
final String title = document[0].dc_title();
|
||||
System.out.println(title);
|
||||
|
@ -816,7 +816,7 @@ public final class SeedDB implements AlternativeDomainNames {
|
||||
byte[] content = null;
|
||||
try {
|
||||
// send request
|
||||
content = client.GETbytes(seedURL, null);
|
||||
content = client.GETbytes(seedURL, null, null);
|
||||
} catch (final Exception e) {
|
||||
throw new IOException("Unable to download seed file '" + seedURL + "'. " + e.getMessage());
|
||||
}
|
||||
|
@ -294,7 +294,7 @@ public final class yacyRelease extends yacyVersion {
|
||||
// download signature first, if public key is available
|
||||
try {
|
||||
if (this.publicKey != null) {
|
||||
final byte[] signatureData = client.GETbytes(getUrl().toString() + ".sig", null);
|
||||
final byte[] signatureData = client.GETbytes(getUrl().toString() + ".sig", null, null);
|
||||
if (signatureData == null) {
|
||||
ConcurrentLog.warn("yacyVersion", "download of signature " + getUrl().toString() + " failed. ignoring signature file.");
|
||||
}
|
||||
|
@ -2460,7 +2460,7 @@ public final class Switchboard extends serverSwitch {
|
||||
startupAction = false;
|
||||
|
||||
// execute api calls
|
||||
final Map<String, Integer> callResult = this.tables.execAPICalls("localhost", (int) getConfigLong("port", 8090), pks, getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
final Map<String, Integer> callResult = this.tables.execAPICalls("localhost", (int) getConfigLong("port", 8090), pks, getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"), getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
for ( final Map.Entry<String, Integer> call : callResult.entrySet() ) {
|
||||
this.log.info("Scheduler executed api call, response " + call.getValue() + ": " + call.getKey());
|
||||
}
|
||||
@ -3739,7 +3739,7 @@ public final class Switchboard extends serverSwitch {
|
||||
}
|
||||
}
|
||||
scc.incrementAndGet();
|
||||
final byte[] content = client.GETbytes(url, null);
|
||||
final byte[] content = client.GETbytes(url, null, null);
|
||||
Iterator<String> enu = FileUtils.strings(content);
|
||||
int lc = 0;
|
||||
while ( enu.hasNext() ) {
|
||||
|
@ -153,7 +153,7 @@ public class DocumentIndex extends Segment {
|
||||
length = -1;
|
||||
}
|
||||
try {
|
||||
documents = TextParser.parseSource(url, null, null, length, url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null));
|
||||
documents = TextParser.parseSource(url, null, null, length, url.getInputStream(ClientIdentification.yacyInternetCrawlerAgent, null, null));
|
||||
} catch (final Exception e ) {
|
||||
throw new IOException("cannot parse " + url.toString() + ": " + e.getMessage());
|
||||
}
|
||||
|
@ -537,7 +537,7 @@ public class serverSwitch
|
||||
reqHeader.put(HeaderFramework.USER_AGENT, ClientIdentification.yacyInternetCrawlerAgent.userAgent);
|
||||
final HTTPClient client = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
|
||||
client.setHeader(reqHeader.entrySet());
|
||||
byte[] data = client.GETbytes(uri, getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
byte[] data = client.GETbytes(uri, getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME, "admin"),getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""));
|
||||
if ( data == null || data.length == 0 ) {
|
||||
continue;
|
||||
}
|
||||
|
@ -122,7 +122,7 @@ public class loaderThreads {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
this.page = this.url.get(this.agent, null);
|
||||
this.page = this.url.get(this.agent, null, null);
|
||||
this.loaded = true;
|
||||
this.process.feed(this.page);
|
||||
if (this.process.status() == loaderCore.STATUS_FAILED) {
|
||||
|
@ -523,7 +523,7 @@ public final class yacy {
|
||||
final HTTPClient con = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent);
|
||||
con.setHeader(requestHeader.entrySet());
|
||||
try {
|
||||
con.GETbytes("http://localhost:"+ port +"/" + path, encodedPassword);
|
||||
con.GETbytes("http://localhost:"+ port +"/" + path, sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_USER_NAME,"admin"), encodedPassword);
|
||||
if (con.getStatusCode() > 199 && con.getStatusCode() < 300) {
|
||||
ConcurrentLog.config("COMMAND-STEERING", "YACY accepted steering command: " + processdescription);
|
||||
|
||||
|
Reference in New Issue
Block a user