📄 preconditionenforcer.html
字号:
<a name="162" href="#162">162</a> <strong>if</strong> (isRobotsExpired(curi)) {<a name="163" href="#163">163</a> <em class="comment">// Need to get robots</em><a name="164" href="#164">164</a> <strong>if</strong> (logger.isLoggable(Level.FINE)) {<a name="165" href="#165">165</a> logger.fine( <span class="string">"No valid robots for "</span> +<a name="166" href="#166">166</a> getController().getServerCache().getServerFor(curi) +<a name="167" href="#167">167</a> <span class="string">"; deferring "</span> + curi);<a name="168" href="#168">168</a> }<a name="169" href="#169">169</a> <a name="170" href="#170">170</a> <em class="comment">// Robots expired - should be refetched even though its already</em><a name="171" href="#171">171</a> <em class="comment">// crawled.</em><a name="172" href="#172">172</a> <strong>try</strong> {<a name="173" href="#173">173</a> String prereq = curi.getUURI().resolve(<span class="string">"/robots.txt"</span>).toString();<a name="174" href="#174">174</a> curi.markPrerequisite(prereq,<a name="175" href="#175">175</a> getController().getPostprocessorChain());<a name="176" href="#176">176</a> }<a name="177" href="#177">177</a> <strong>catch</strong> (URIException e1) {<a name="178" href="#178">178</a> logger.severe(<span class="string">"Failed resolve using "</span> + curi);<a name="179" href="#179">179</a> <strong>throw</strong> <strong>new</strong> RuntimeException(e1); <em class="comment">// shouldn't ever happen</em><a name="180" href="#180">180</a> }<a name="181" href="#181">181</a> <strong>return</strong> <strong>true</strong>;<a name="182" href="#182">182</a> }<a name="183" href="#183">183</a> <em class="comment">// test against robots.txt if available</em><a name="184" href="#184">184</a> <a href="../../../../org/archive/crawler/datamodel/CrawlServer.html">CrawlServer</a> cs = getController().getServerCache().getServerFor(curi);<a name="185" href="#185">185</a> <strong>if</strong>(cs.isValidRobots()){<a name="186" href="#186">186</a> String ua = getController().getOrder().getUserAgent(curi);<a name="187" href="#187">187</a> <strong>if</strong>(cs.getRobots().disallows(curi, ua)) {<a name="188" href="#188">188</a> <strong>if</strong>(((Boolean)getUncheckedAttribute(curi,ATTR_CALCULATE_ROBOTS_ONLY)).booleanValue() == <strong>true</strong>) {<a name="189" href="#189">189</a> <em class="comment">// annotate URI as excluded, but continue to process normally</em><a name="190" href="#190">190</a> curi.addAnnotation(<span class="string">"robotExcluded"</span>);<a name="191" href="#191">191</a> <strong>return</strong> false; <a name="192" href="#192">192</a> }<a name="193" href="#193">193</a> <em class="comment">// mark as precluded; in FetchHTTP, this will</em><a name="194" href="#194">194</a> <em class="comment">// prevent fetching and cause a skip to the end</em><a name="195" href="#195">195</a> <em class="comment">// of processing (unless an intervening processor</em><a name="196" href="#196">196</a> <em class="comment">// overrules)</em><a name="197" href="#197">197</a> curi.setFetchStatus(S_ROBOTS_PRECLUDED);<a name="198" href="#198">198</a> curi.putString(<span class="string">"error"</span>,<span class="string">"robots.txt exclusion"</span>);<a name="199" href="#199">199</a> logger.fine(<span class="string">"robots.txt precluded "</span> + curi);<a name="200" href="#200">200</a> <strong>return</strong> <strong>true</strong>;<a name="201" href="#201">201</a> }<a name="202" href="#202">202</a> <strong>return</strong> false;<a name="203" href="#203">203</a> }<a name="204" href="#204">204</a> <em class="comment">// No valid robots found => Attempt to get robots.txt failed</em><a name="205" href="#205">205</a> curi.skipToProcessorChain(getController().getPostprocessorChain());<a name="206" href="#206">206</a> curi.setFetchStatus(S_ROBOTS_PREREQUISITE_FAILURE);<a name="207" href="#207">207</a> curi.putString(<span class="string">"error"</span>,<span class="string">"robots.txt prerequisite failed"</span>);<a name="208" href="#208">208</a> <strong>if</strong> (logger.isLoggable(Level.FINE)) {<a name="209" href="#209">209</a> logger.fine(<span class="string">"robots.txt prerequisite failed "</span> + curi);<a name="210" href="#210">210</a> }<a name="211" href="#211">211</a> <strong>return</strong> <strong>true</strong>;<a name="212" href="#212">212</a> }<a name="213" href="#213">213</a> <a name="214" href="#214">214</a> <em>/**<em>*</em></em><a name="215" href="#215">215</a> <em> * @param curi CrawlURI whose dns prerequisite we're to check.</em><a name="216" href="#216">216</a> <em> * @return true if no further processing in this module should occur</em><a name="217" href="#217">217</a> <em> */</em><a name="218" href="#218">218</a> <strong>private</strong> <strong>boolean</strong> considerDnsPreconditions(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="219" href="#219">219</a> <strong>if</strong>(curi.getUURI().getScheme().equals(<span class="string">"dns"</span>)){<a name="220" href="#220">220</a> <em class="comment">// DNS URIs never have a DNS precondition</em><a name="221" href="#221">221</a> curi.setPrerequisite(<strong>true</strong>);<a name="222" href="#222">222</a> <strong>return</strong> false; <a name="223" href="#223">223</a> }<a name="224" href="#224">224</a> <a name="225" href="#225">225</a> <a href="../../../../org/archive/crawler/datamodel/CrawlServer.html">CrawlServer</a> cs = getController().getServerCache().getServerFor(curi);<a name="226" href="#226">226</a> <strong>if</strong>(cs == <strong>null</strong>) {<a name="227" href="#227">227</a> curi.setFetchStatus(S_UNFETCHABLE_URI);<a name="228" href="#228">228</a> curi.skipToProcessorChain(getController().getPostprocessorChain());<a name="229" href="#229">229</a> <strong>return</strong> <strong>true</strong>;<a name="230" href="#230">230</a> }<a name="231" href="#231">231</a> <a name="232" href="#232">232</a> <em class="comment">// If we've done a dns lookup and it didn't resolve a host</em><a name="233" href="#233">233</a> <em class="comment">// cancel further fetch-processing of this URI, because</em><a name="234" href="#234">234</a> <em class="comment">// the domain is unresolvable</em><a name="235" href="#235">235</a> <a href="../../../../org/archive/crawler/datamodel/CrawlHost.html">CrawlHost</a> ch = getController().getServerCache().getHostFor(curi);<a name="236" href="#236">236</a> <strong>if</strong> (ch == <strong>null</strong> || ch.hasBeenLookedUp() && ch.getIP() == <strong>null</strong>) {<a name="237" href="#237">237</a> <strong>if</strong> (logger.isLoggable(Level.FINE)) {<a name="238" href="#238">238</a> logger.fine( <span class="string">"no dns for "</span> + ch +<a name="239" href="#239">239</a> <span class="string">" cancelling processing for CrawlURI "</span> + curi.toString());<a name="240" href="#240">240</a> }<a name="241" href="#241">241</a> curi.setFetchStatus(S_DOMAIN_PREREQUISITE_FAILURE);<a name="242" href="#242">242</a> curi.skipToProcessorChain(getController().getPostprocessorChain());<a name="243" href="#243">243</a> <strong>return</strong> <strong>true</strong>;<a name="244" href="#244">244</a> }<a name="245" href="#245">245</a> <a name="246" href="#246">246</a> <em class="comment">// If we haven't done a dns lookup and this isn't a dns uri</em><a name="247" href="#247">247</a> <em class="comment">// shoot that off and defer further processing</em><a name="248" href="#248">248</a> <strong>if</strong> (isIpExpired(curi) && !curi.getUURI().getScheme().equals(<span class="string">"dns"</span>)) {<a name="249" href="#249">249</a> logger.fine(<span class="string">"Deferring processing of CrawlURI "</span> + curi.toString()<a name="250" href="#250">250</a> + <span class="string">" for dns lookup."</span>);<a name="251" href="#251">251</a> String preq = <span class="string">"dns:"</span> + ch.getHostName();<a name="252" href="#252">252</a> <strong>try</strong> {<a name="253" href="#253">253</a> curi.markPrerequisite(preq,<a name="254" href="#254">254</a> getController().getPostprocessorChain());<a name="255" href="#255">255</a> } <strong>catch</strong> (URIException e) {<a name="256" href="#256">256</a> <strong>throw</strong> <strong>new</strong> RuntimeException(e); <em class="comment">// shouldn't ever happen</em><a name="257" href="#257">257</a> }<a name="258" href="#258">258</a> <strong>return</strong> <strong>true</strong>;<a name="259" href="#259">259</a> }<a name="260" href="#260">260</a> <a name="261" href="#261">261</a> <em class="comment">// DNS preconditions OK</em><a name="262" href="#262">262</a> <strong>return</strong> false;<a name="263" href="#263">263</a> }<a name="264" href="#264">264</a> <a name="265" href="#265">265</a> <em>/**<em>*</em></em><a name="266" href="#266">266</a> <em> * Get the maximum time a dns-record is valid.</em><a name="267" href="#267">267</a> <em> *</em><a name="268" href="#268">268</a> <em> * @param curi the uri this time is valid for.</em><a name="269" href="#269">269</a> <em> * @return the maximum time a dns-record is valid -- in seconds -- or</em><a name="270" href="#270">270</a> <em> * negative if record's ttl should be used.</em><a name="271" href="#271">271</a> <em> */</em><a name="272" href="#272">272</a> <strong>public</strong> <strong>long</strong> getIPValidityDuration(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="273" href="#273">273</a> Integer d;<a name="274" href="#274">274</a> <strong>try</strong> {<a name="275" href="#275">275</a> d = (Integer)getAttribute(ATTR_IP_VALIDITY_DURATION, curi);<a name="276" href="#276">276</a> } <strong>catch</strong> (AttributeNotFoundException e) {<a name="277" href="#277">277</a> d = DEFAULT_IP_VALIDITY_DURATION;<a name="278" href="#278">278</a> }<a name="279" href="#279">279</a> <a name="280" href="#280">280</a> <strong>return</strong> d.longValue();<a name="281" href="#281">281</a> }<a name="282" href="#282">282</a> <a name="283" href="#283">283</a> <em>/**<em>* Return true if ip should be looked up.</em></em><a name="284" href="#284">284</a> <em> *</em><a name="285" href="#285">285</a> <em> * @param curi the URI to check.</em><a name="286" href="#286">286</a> <em> * @return true if ip should be looked up.</em><a name="287" href="#287">287</a> <em> */</em><a name="288" href="#288">288</a> <strong>public</strong> <strong>boolean</strong> isIpExpired(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="289" href="#289">289</a> <a href="../../../../org/archive/crawler/datamodel/CrawlHost.html">CrawlHost</a> host = getController().getServerCache().getHostFor(curi);<a name="290" href="#290">290</a> <strong>if</strong> (!host.hasBeenLookedUp()) {<a name="291" href="#291">291</a> <em class="comment">// IP has not been looked up yet.</em><a name="292" href="#292">292</a> <strong>return</strong> <strong>true</strong>;<a name="293" href="#293">293</a> }<a name="294" href="#294">294</a> <a name="295" href="#295">295</a> <strong>if</strong> (host.getIpTTL() == CrawlHost.IP_NEVER_EXPIRES) {<a name="296" href="#296">296</a> <em class="comment">// IP never expires (numeric IP)</em><a name="297" href="#297">297</a> <strong>return</strong> false;<a name="298" href="#298">298</a> }<a name="299" href="#299">299</a> <a name="300" href="#300">300</a> <strong>long</strong> duration = getIPValidityDuration(curi);<a name="301" href="#301">301</a> <strong>if</strong> (duration == 0) {<a name="302" href="#302">302</a> <em class="comment">// Never expire ip if duration is null (set by user or more likely,</em><a name="303" href="#303">303</a> <em class="comment">// set to zero in case where we tried in FetchDNS but failed).</em><a name="304" href="#304">304</a> <strong>return</strong> false;<a name="305" href="#305">305</a> }<a name="306" href="#306">306</a> <a name="307" href="#307">307</a> <em class="comment">// catch old "default" -1 settings that are now problematic,</em><a name="308" href="#308">308</a> <em class="comment">// convert to new minimum</em><a name="309" href="#309">309</a> <strong>if</strong> (duration <= 0) {<a name="310" href="#310">310</a> duration = DEFAULT_IP_VALIDITY_DURATION.intValue();<a name="311" href="#311">311</a> }<a name="312" href="#312">312</a> <a name="313" href="#313">313</a> <strong>long</strong> ttl = host.getIpTTL();<a name="314" href="#314">314</a> <strong>if</strong> (ttl > duration) {<a name="315" href="#315">315</a> <em class="comment">// Use the larger of the operator-set minimum duration </em><a name="316" href="#316">316</a> <em class="comment">// or the DNS record TTL</em><a name="317" href="#317">317</a> duration = ttl;<a name="318" href="#318">318</a> }<a name="319" href="#319">319</a> <a name="320" href="#320">320</a> <em class="comment">// Duration and ttl are in seconds. Convert to millis.</em><a name="321" href="#321">321</a> <strong>if</strong> (duration > 0) {<a name="322" href="#322">322</a> duration *= 1000;<a name="323" href="#323">323</a> }<a name="324" href="#324">324</a> <a name="325" href="#325">325</a> <strong>return</strong> (duration + host.getIpFetched()) < System.currentTimeMillis();<a name="326" href="#326">326</a> }<a name="327" href="#327">327</a> <a name="328" href="#328">328</a> <em>/**<em>* Get the maximum time a robots.txt is valid.</em></em><a name="329" href="#329">329</a> <em> *</em><a name="330" href="#330">330</a> <em> * @param curi</em><a name="331" href="#331">331</a> <em> * @return the time a robots.txt is valid in milliseconds.</em>
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -