public abstract class HttpBase extends Object implements Protocol
Modifier and Type | Field and Description |
---|---|
protected String |
accept
The "Accept" request header value.
|
protected String |
acceptLanguage
The "Accept-Language" request header value.
|
static int |
BUFFER_SIZE |
protected int |
maxContent
The length limit for downloaded content, in bytes.
|
protected String |
proxyHost
The proxy hostname.
|
protected int |
proxyPort
The proxy port.
|
protected boolean |
responseTime
Response Time
|
protected int |
timeout
The network timeout in millisecond
|
protected Set<String> |
tlsPreferredCipherSuites
Which TLS/SSL cipher suites to support
|
protected Set<String> |
tlsPreferredProtocols
Which TLS/SSL protocols to support
|
protected boolean |
useHttp11
Do we use HTTP/1.1?
|
protected boolean |
useProxy
Indicates if a proxy is used
|
protected String |
userAgent
The Nutch 'User-Agent' request header
|
CHECK_BLOCKING, CHECK_ROBOTS, X_POINT_ID
Constructor and Description |
---|
HttpBase()
Creates a new instance of HttpBase
|
HttpBase(org.slf4j.Logger logger)
Creates a new instance of HttpBase
|
Modifier and Type | Method and Description |
---|---|
String |
getAccept() |
String |
getAcceptLanguage()
Value of "Accept-Language" request header sent by Nutch.
|
Configuration |
getConf() |
int |
getMaxContent() |
ProtocolOutput |
getProtocolOutput(String url,
WebPage page) |
String |
getProxyHost() |
int |
getProxyPort() |
protected abstract Response |
getResponse(URL url,
WebPage page,
boolean followRedirects) |
crawlercommons.robots.BaseRobotRules |
getRobotRules(String url,
WebPage page)
Retrieve robot rules applicable for this url.
|
int |
getTimeout() |
Set<String> |
getTlsPreferredCipherSuites() |
Set<String> |
getTlsPreferredProtocols() |
boolean |
getUseHttp11() |
String |
getUserAgent() |
protected void |
logConf() |
protected static void |
main(HttpBase http,
String[] args) |
byte[] |
processDeflateEncoded(byte[] compressed,
URL url) |
byte[] |
processGzipEncoded(byte[] compressed,
URL url) |
void |
setConf(Configuration conf) |
boolean |
useProxy() |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
getFields
public static final int BUFFER_SIZE
protected String proxyHost
protected int proxyPort
protected boolean useProxy
protected int timeout
protected int maxContent
protected String userAgent
protected String acceptLanguage
protected String accept
protected boolean useHttp11
protected boolean responseTime
protected Set<String> tlsPreferredProtocols
public HttpBase()
public HttpBase(org.slf4j.Logger logger)
public void setConf(Configuration conf)
setConf
in interface Configurable
public Configuration getConf()
getConf
in interface Configurable
public ProtocolOutput getProtocolOutput(String url, WebPage page)
getProtocolOutput
in interface Protocol
public String getProxyHost()
public int getProxyPort()
public boolean useProxy()
public int getTimeout()
public int getMaxContent()
public String getUserAgent()
public String getAcceptLanguage()
public String getAccept()
public boolean getUseHttp11()
protected void logConf()
public byte[] processGzipEncoded(byte[] compressed, URL url) throws IOException
IOException
public byte[] processDeflateEncoded(byte[] compressed, URL url) throws IOException
IOException
protected abstract Response getResponse(URL url, WebPage page, boolean followRedirects) throws ProtocolException, IOException
ProtocolException
IOException
public crawlercommons.robots.BaseRobotRules getRobotRules(String url, WebPage page)
Protocol
getRobotRules
in interface Protocol
url
- url to checkCopyright © 2015 The Apache Software Foundation