📄 preconditionenforcer.html
字号:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"><head><meta http-equiv="content-type" content="text/html; charset=ISO-8859-1" /><title>PreconditionEnforcer xref</title><link type="text/css" rel="stylesheet" href="../../../../stylesheet.css" /></head><body><pre><a name="1" href="#1">1</a> <em class="comment">/*<em class="comment"> Copyright (C) 2003 Internet Archive.</em></em><a name="2" href="#2">2</a> <em class="comment"> *</em><a name="3" href="#3">3</a> <em class="comment"> * This file is part of the Heritrix web crawler (crawler.archive.org).</em><a name="4" href="#4">4</a> <em class="comment"> *</em><a name="5" href="#5">5</a> <em class="comment"> * Heritrix is free software; you can redistribute it and/or modify</em><a name="6" href="#6">6</a> <em class="comment"> * it under the terms of the GNU Lesser Public License as published by</em><a name="7" href="#7">7</a> <em class="comment"> * the Free Software Foundation; either version 2.1 of the License, or</em><a name="8" href="#8">8</a> <em class="comment"> * any later version.</em><a name="9" href="#9">9</a> <em class="comment"> *</em><a name="10" href="#10">10</a> <em class="comment"> * Heritrix is distributed in the hope that it will be useful,</em><a name="11" href="#11">11</a> <em class="comment"> * but WITHOUT ANY WARRANTY; without even the implied warranty of</em><a name="12" href="#12">12</a> <em class="comment"> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the</em><a name="13" href="#13">13</a> <em class="comment"> * GNU Lesser Public License for more details.</em><a name="14" href="#14">14</a> <em class="comment"> *</em><a name="15" href="#15">15</a> <em class="comment"> * You should have received a copy of the GNU Lesser Public License</em><a name="16" href="#16">16</a> <em class="comment"> * along with Heritrix; if not, write to the Free Software</em><a name="17" href="#17">17</a> <em class="comment"> * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA</em><a name="18" href="#18">18</a> <em class="comment"> *</em><a name="19" href="#19">19</a> <em class="comment"> * SimplePolitenessEnforcer.java</em><a name="20" href="#20">20</a> <em class="comment"> * Created on May 22, 2003</em><a name="21" href="#21">21</a> <em class="comment"> *</em><a name="22" href="#22">22</a> <em class="comment"> * $Header: /cvsroot/archive-crawler/ArchiveOpenCrawler/src/java/org/archive/crawler/prefetch/PreconditionEnforcer.java,v 1.25 2006/08/30 21:24:45 stack-sf Exp $</em><a name="23" href="#23">23</a> <em class="comment"> */</em><a name="24" href="#24">24</a> <strong>package</strong> <a href="../../../../org/archive/crawler/prefetch/package-summary.html">org.archive.crawler.prefetch</a>;<a name="25" href="#25">25</a> <a name="26" href="#26">26</a> <strong>import</strong> java.util.Iterator;<a name="27" href="#27">27</a> <strong>import</strong> java.util.Set;<a name="28" href="#28">28</a> <strong>import</strong> java.util.logging.Level;<a name="29" href="#29">29</a> <strong>import</strong> java.util.logging.Logger;<a name="30" href="#30">30</a> <a name="31" href="#31">31</a> <strong>import</strong> javax.management.AttributeNotFoundException;<a name="32" href="#32">32</a> <a name="33" href="#33">33</a> <strong>import</strong> org.apache.commons.httpclient.URIException;<a name="34" href="#34">34</a> <strong>import</strong> org.archive.crawler.datamodel.CoreAttributeConstants;<a name="35" href="#35">35</a> <strong>import</strong> org.archive.crawler.datamodel.CrawlHost;<a name="36" href="#36">36</a> <strong>import</strong> org.archive.crawler.datamodel.CrawlServer;<a name="37" href="#37">37</a> <strong>import</strong> org.archive.crawler.datamodel.CrawlURI;<a name="38" href="#38">38</a> <strong>import</strong> org.archive.crawler.datamodel.CredentialStore;<a name="39" href="#39">39</a> <strong>import</strong> org.archive.crawler.datamodel.FetchStatusCodes;<a name="40" href="#40">40</a> <strong>import</strong> org.archive.crawler.datamodel.credential.Credential;<a name="41" href="#41">41</a> <strong>import</strong> org.archive.crawler.datamodel.credential.CredentialAvatar;<a name="42" href="#42">42</a> <strong>import</strong> org.archive.crawler.framework.Processor;<a name="43" href="#43">43</a> <strong>import</strong> org.archive.crawler.settings.SimpleType;<a name="44" href="#44">44</a> <strong>import</strong> org.archive.crawler.settings.Type;<a name="45" href="#45">45</a> <strong>import</strong> org.archive.net.UURI;<a name="46" href="#46">46</a> <a name="47" href="#47">47</a> <em>/**<em>*</em></em><a name="48" href="#48">48</a> <em> * Ensures the preconditions for a fetch -- such as DNS lookup </em><a name="49" href="#49">49</a> <em> * or acquiring and respecting a robots.txt policy -- are</em><a name="50" href="#50">50</a> <em> * satisfied before a URI is passed to subsequent stages.</em><a name="51" href="#51">51</a> <em> *</em><a name="52" href="#52">52</a> <em> * @author gojomo</em><a name="53" href="#53">53</a> <em> */</em><a name="54" href="#54">54</a> <strong>public</strong> <strong>class</strong> <a href="../../../../org/archive/crawler/prefetch/PreconditionEnforcer.html">PreconditionEnforcer</a><a name="55" href="#55">55</a> <strong>extends</strong> <a href="../../../../org/archive/crawler/framework/Processor.html">Processor</a><a name="56" href="#56">56</a> implements CoreAttributeConstants, <a href="../../../../org/archive/crawler/datamodel/FetchStatusCodes.html">FetchStatusCodes</a> {<a name="57" href="#57">57</a> <a name="58" href="#58">58</a> <strong>private</strong> <strong>static</strong> <strong>final</strong> Logger logger =<a name="59" href="#59">59</a> Logger.getLogger(PreconditionEnforcer.<strong>class</strong>.getName());<a name="60" href="#60">60</a> <a name="61" href="#61">61</a> <strong>private</strong> <strong>final</strong> <strong>static</strong> Integer DEFAULT_IP_VALIDITY_DURATION = <a name="62" href="#62">62</a> <strong>new</strong> Integer(60*60*6); <em class="comment">// six hours </em><a name="63" href="#63">63</a> <strong>private</strong> <strong>final</strong> <strong>static</strong> Integer DEFAULT_ROBOTS_VALIDITY_DURATION =<a name="64" href="#64">64</a> <strong>new</strong> Integer(60*60*24); <em class="comment">// one day</em><a name="65" href="#65">65</a> <a name="66" href="#66">66</a> <em>/**<em>* seconds to keep IP information for */</em></em><a name="67" href="#67">67</a> <strong>public</strong> <strong>final</strong> <strong>static</strong> String ATTR_IP_VALIDITY_DURATION<a name="68" href="#68">68</a> = <span class="string">"ip-validity-duration-seconds"</span>;<a name="69" href="#69">69</a> <em>/**<em>* seconds to cache robots info */</em></em><a name="70" href="#70">70</a> <strong>public</strong> <strong>final</strong> <strong>static</strong> String ATTR_ROBOTS_VALIDITY_DURATION<a name="71" href="#71">71</a> = <span class="string">"robot-validity-duration-seconds"</span>;<a name="72" href="#72">72</a> <a name="73" href="#73">73</a> <em>/**<em>* whether to calculate robots exclusion without applying */</em></em><a name="74" href="#74">74</a> <strong>public</strong> <strong>final</strong> <strong>static</strong> Boolean DEFAULT_CALCULATE_ROBOTS_ONLY = Boolean.FALSE;<a name="75" href="#75">75</a> <strong>public</strong> <strong>final</strong> <strong>static</strong> String ATTR_CALCULATE_ROBOTS_ONLY <a name="76" href="#76">76</a> = <span class="string">"calculate-robots-only"</span>;<a name="77" href="#77">77</a> <a name="78" href="#78">78</a> <strong>public</strong> <a href="../../../../org/archive/crawler/prefetch/PreconditionEnforcer.html">PreconditionEnforcer</a>(String name) {<a name="79" href="#79">79</a> <strong>super</strong>(name, <span class="string">"Precondition enforcer"</span>);<a name="80" href="#80">80</a> <a name="81" href="#81">81</a> <a href="../../../../org/archive/crawler/settings/Type.html">Type</a> e;<a name="82" href="#82">82</a> <a name="83" href="#83">83</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_IP_VALIDITY_DURATION,<a name="84" href="#84">84</a> <span class="string">"The minimum interval for which a dns-record will be considered "</span> +<a name="85" href="#85">85</a> <span class="string">"valid (in seconds). "</span> +<a name="86" href="#86">86</a> <span class="string">"If the record's DNS TTL is larger, that will be used instead."</span>,<a name="87" href="#87">87</a> DEFAULT_IP_VALIDITY_DURATION));<a name="88" href="#88">88</a> e.setExpertSetting(<strong>true</strong>);<a name="89" href="#89">89</a> <a name="90" href="#90">90</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_ROBOTS_VALIDITY_DURATION,<a name="91" href="#91">91</a> <span class="string">"The time in seconds that fetched robots.txt information is "</span> +<a name="92" href="#92">92</a> <span class="string">"considered to be valid. "</span> +<a name="93" href="#93">93</a> <span class="string">"If the value is set to '0', then the robots.txt information"</span> +<a name="94" href="#94">94</a> <span class="string">" will never expire."</span>,<a name="95" href="#95">95</a> DEFAULT_ROBOTS_VALIDITY_DURATION));<a name="96" href="#96">96</a> e.setExpertSetting(<strong>true</strong>);<a name="97" href="#97">97</a> <a name="98" href="#98">98</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_CALCULATE_ROBOTS_ONLY,<a name="99" href="#99">99</a> <span class="string">"Whether to only calculate the robots status of an URI, "</span> +<a name="100" href="#100">100</a> <span class="string">"without actually applying any exclusions found. If true, "</span> +<a name="101" href="#101">101</a> <span class="string">"exlcuded URIs will only be annotated in the crawl.log, but "</span> +<a name="102" href="#102">102</a> <span class="string">"still fetched. Default is false. "</span>,<a name="103" href="#103">103</a> DEFAULT_CALCULATE_ROBOTS_ONLY));<a name="104" href="#104">104</a> e.setExpertSetting(<strong>true</strong>);<a name="105" href="#105">105</a> }<a name="106" href="#106">106</a> <a name="107" href="#107">107</a> <strong>protected</strong> <strong>void</strong> innerProcess(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="108" href="#108">108</a> <a name="109" href="#109">109</a> <strong>if</strong> (considerDnsPreconditions(curi)) {<a name="110" href="#110">110</a> <strong>return</strong>;<a name="111" href="#111">111</a> }<a name="112" href="#112">112</a> <a name="113" href="#113">113</a> <em class="comment">// make sure we only process schemes we understand (i.e. not dns)</em><a name="114" href="#114">114</a> String scheme = curi.getUURI().getScheme().toLowerCase();<a name="115" href="#115">115</a> <strong>if</strong> (! (scheme.equals(<span class="string">"http"</span>) || scheme.equals(<span class="string">"https"</span>))) {<a name="116" href="#116">116</a> logger.fine(<span class="string">"PolitenessEnforcer doesn't understand uri's of type "</span> +<a name="117" href="#117">117</a> scheme + <span class="string">" (ignoring)"</span>);<a name="118" href="#118">118</a> <strong>return</strong>;<a name="119" href="#119">119</a> }<a name="120" href="#120">120</a> <a name="121" href="#121">121</a> <strong>if</strong> (considerRobotsPreconditions(curi)) {<a name="122" href="#122">122</a> <strong>return</strong>;<a name="123" href="#123">123</a> }<a name="124" href="#124">124</a> <a name="125" href="#125">125</a> <strong>if</strong> (!curi.isPrerequisite() && credentialPrecondition(curi)) {<a name="126" href="#126">126</a> <strong>return</strong>;<a name="127" href="#127">127</a> }<a name="128" href="#128">128</a> <a name="129" href="#129">129</a> <em class="comment">// OK, it's allowed</em><a name="130" href="#130">130</a> <a name="131" href="#131">131</a> <em class="comment">// For all curis that will in fact be fetched, set appropriate delays.</em><a name="132" href="#132">132</a> <em class="comment">// TODO: SOMEDAY: allow per-host, per-protocol, etc. factors</em><a name="133" href="#133">133</a> <em class="comment">// curi.setDelayFactor(getDelayFactorFor(curi));</em><a name="134" href="#134">134</a> <em class="comment">// curi.setMinimumDelay(getMinimumDelayFor(curi));</em><a name="135" href="#135">135</a> <a name="136" href="#136">136</a> <strong>return</strong>;<a name="137" href="#137">137</a> }<a name="138" href="#138">138</a> <a name="139" href="#139">139</a> <em>/**<em>*</em></em><a name="140" href="#140">140</a> <em> * Consider the robots precondition.</em><a name="141" href="#141">141</a> <em> *</em><a name="142" href="#142">142</a> <em> * @param curi CrawlURI we're checking for any required preconditions.</em><a name="143" href="#143">143</a> <em> * @return True, if this <code>curi</code> has a precondition or processing</em><a name="144" href="#144">144</a> <em> * should be terminated for some other reason. False if</em><a name="145" href="#145">145</a> <em> * we can precede to process this url.</em><a name="146" href="#146">146</a> <em> */</em><a name="147" href="#147">147</a> <strong>private</strong> <strong>boolean</strong> considerRobotsPreconditions(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="148" href="#148">148</a> <em class="comment">// treat /robots.txt fetches specially</em><a name="149" href="#149">149</a> <a href="../../../../org/archive/net/UURI.html">UURI</a> uuri = curi.getUURI();<a name="150" href="#150">150</a> <strong>try</strong> {<a name="151" href="#151">151</a> <strong>if</strong> (uuri != <strong>null</strong> && uuri.getPath() != <strong>null</strong> &&<a name="152" href="#152">152</a> curi.getUURI().getPath().equals(<span class="string">"/robots.txt"</span>)) {<a name="153" href="#153">153</a> <em class="comment">// allow processing to continue</em><a name="154" href="#154">154</a> curi.setPrerequisite(<strong>true</strong>);<a name="155" href="#155">155</a> <strong>return</strong> false;<a name="156" href="#156">156</a> }<a name="157" href="#157">157</a> }<a name="158" href="#158">158</a> <strong>catch</strong> (URIException e) {<a name="159" href="#159">159</a> logger.severe(<span class="string">"Failed get of path for "</span> + curi);<a name="160" href="#160">160</a> }<a name="161" href="#161">161</a> <em class="comment">// require /robots.txt if not present</em>
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -