📄 linksscoper.html
字号:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"><head><meta http-equiv="content-type" content="text/html; charset=UTF-8" /><title>LinksScoper xref</title><link type="text/css" rel="stylesheet" href="../../../../stylesheet.css" /></head><body><div id="overview"><a href="../../../../../apidocs/org/archive/crawler/postprocessor/LinksScoper.html">View Javadoc</a></div><pre><a name="1" href="#1">1</a> <em class="comment">/*<em class="comment"> LinksScoper</em></em><a name="2" href="#2">2</a> <em class="comment"> * </em><a name="3" href="#3">3</a> <em class="comment"> * $Id: LinksScoper.java,v 1.8 2006/08/04 18:06:29 stack-sf Exp $</em><a name="4" href="#4">4</a> <em class="comment"> *</em><a name="5" href="#5">5</a> <em class="comment"> * Created on Oct 2, 2003</em><a name="6" href="#6">6</a> <em class="comment"> * </em><a name="7" href="#7">7</a> <em class="comment"> * Copyright (C) 2003 Internet Archive.</em><a name="8" href="#8">8</a> <em class="comment"> *</em><a name="9" href="#9">9</a> <em class="comment"> * This file is part of the Heritrix web crawler (crawler.archive.org).</em><a name="10" href="#10">10</a> <em class="comment"> *</em><a name="11" href="#11">11</a> <em class="comment"> * Heritrix is free software; you can redistribute it and/or modify</em><a name="12" href="#12">12</a> <em class="comment"> * it under the terms of the GNU Lesser Public License as published by</em><a name="13" href="#13">13</a> <em class="comment"> * the Free Software Foundation; either version 2.1 of the License, or</em><a name="14" href="#14">14</a> <em class="comment"> * any later version.</em><a name="15" href="#15">15</a> <em class="comment"> *</em><a name="16" href="#16">16</a> <em class="comment"> * Heritrix is distributed in the hope that it will be useful,</em><a name="17" href="#17">17</a> <em class="comment"> * but WITHOUT ANY WARRANTY; without even the implied warranty of</em><a name="18" href="#18">18</a> <em class="comment"> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the</em><a name="19" href="#19">19</a> <em class="comment"> * GNU Lesser Public License for more details.</em><a name="20" href="#20">20</a> <em class="comment"> *</em><a name="21" href="#21">21</a> <em class="comment"> * You should have received a copy of the GNU Lesser Public License</em><a name="22" href="#22">22</a> <em class="comment"> * along with Heritrix; if not, write to the Free Software</em><a name="23" href="#23">23</a> <em class="comment"> * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA</em><a name="24" href="#24">24</a> <em class="comment"> *</em><a name="25" href="#25">25</a> <em class="comment"> */</em><a name="26" href="#26">26</a> <strong>package</strong> <a href="../../../../org/archive/crawler/postprocessor/package-summary.html">org.archive.crawler.postprocessor</a>;<a name="27" href="#27">27</a> <a name="28" href="#28">28</a> <strong>import</strong> java.util.Collection;<a name="29" href="#29">29</a> <strong>import</strong> java.util.HashSet;<a name="30" href="#30">30</a> <strong>import</strong> java.util.Iterator;<a name="31" href="#31">31</a> <strong>import</strong> java.util.logging.Level;<a name="32" href="#32">32</a> <strong>import</strong> java.util.logging.Logger;<a name="33" href="#33">33</a> <a name="34" href="#34">34</a> <strong>import</strong> org.apache.commons.httpclient.URIException;<a name="35" href="#35">35</a> <strong>import</strong> org.archive.crawler.datamodel.CandidateURI;<a name="36" href="#36">36</a> <strong>import</strong> org.archive.crawler.datamodel.CrawlURI;<a name="37" href="#37">37</a> <strong>import</strong> org.archive.crawler.datamodel.FetchStatusCodes;<a name="38" href="#38">38</a> <strong>import</strong> org.archive.crawler.extractor.Link;<a name="39" href="#39">39</a> <strong>import</strong> org.archive.crawler.framework.Filter;<a name="40" href="#40">40</a> <strong>import</strong> org.archive.crawler.framework.Scoper;<a name="41" href="#41">41</a> <strong>import</strong> org.archive.crawler.settings.MapType;<a name="42" href="#42">42</a> <strong>import</strong> org.archive.crawler.settings.SimpleType;<a name="43" href="#43">43</a> <strong>import</strong> org.archive.crawler.settings.Type;<a name="44" href="#44">44</a> <a name="45" href="#45">45</a> <em>/**<em>*</em></em><a name="46" href="#46">46</a> <em> * Determine which extracted links are within scope.</em><a name="47" href="#47">47</a> <em> * TODO: To test scope, requires that Link be converted to</em><a name="48" href="#48">48</a> <em> * a CandidateURI. Make it so don't have to make a CandidateURI to test</em><a name="49" href="#49">49</a> <em> * if Link is in scope.</em><a name="50" href="#50">50</a> <em> * <p>Since this scoper has to create CandidateURIs, no sense</em><a name="51" href="#51">51</a> <em> * discarding them since later in the processing chain CandidateURIs rather</em><a name="52" href="#52">52</a> <em> * than Links are whats needed scheduling extracted links w/ the</em><a name="53" href="#53">53</a> <em> * Frontier (Frontier#schedule expects CandidateURI, not Link). This class</em><a name="54" href="#54">54</a> <em> * replaces Links w/ the CandidateURI that wraps the Link in the CrawlURI.</em><a name="55" href="#55">55</a> <em> *</em><a name="56" href="#56">56</a> <em> * @author gojomo</em><a name="57" href="#57">57</a> <em> * @author stack</em><a name="58" href="#58">58</a> <em> */</em><a name="59" href="#59">59</a> <strong>public</strong> <strong>class</strong> <a href="../../../../org/archive/crawler/postprocessor/LinksScoper.html">LinksScoper</a> <strong>extends</strong> <a href="../../../../org/archive/crawler/framework/Scoper.html">Scoper</a><a name="60" href="#60">60</a> implements <a href="../../../../org/archive/crawler/datamodel/FetchStatusCodes.html">FetchStatusCodes</a> {<a name="61" href="#61">61</a> <strong>private</strong> <strong>static</strong> Logger LOGGER =<a name="62" href="#62">62</a> Logger.getLogger(LinksScoper.<strong>class</strong>.getName());<a name="63" href="#63">63</a> <a name="64" href="#64">64</a> <strong>private</strong> <strong>final</strong> <strong>static</strong> String ATTR_SEED_REDIRECTS_NEW_SEEDS =<a name="65" href="#65">65</a> <span class="string">"seed-redirects-new-seed"</span>;<a name="66" href="#66">66</a> <a name="67" href="#67">67</a> <strong>private</strong> <strong>final</strong> <strong>static</strong> Boolean DEFAULT_SEED_REDIRECTS_NEW_SEEDS =<a name="68" href="#68">68</a> <strong>new</strong> Boolean(<strong>true</strong>);<a name="69" href="#69">69</a> <a name="70" href="#70">70</a> <strong>public</strong> <strong>static</strong> <strong>final</strong> String ATTR_LOG_REJECT_FILTERS =<a name="71" href="#71">71</a> <span class="string">"scope-rejected-url-filters"</span>;<a name="72" href="#72">72</a> <a name="73" href="#73">73</a> <strong>public</strong> <strong>static</strong> <strong>final</strong> String ATTR_PREFERENCE_DEPTH_HOPS =<a name="74" href="#74">74</a> <span class="string">"preference-depth-hops"</span>;<a name="75" href="#75">75</a> <a name="76" href="#76">76</a> <strong>private</strong> <strong>final</strong> <strong>static</strong> Integer DEFAULT_PREFERENCE_DEPTH_HOPS =<a name="77" href="#77">77</a> <strong>new</strong> Integer(-1);<a name="78" href="#78">78</a> <a name="79" href="#79">79</a> <em>/**<em>*</em></em><a name="80" href="#80">80</a> <em> * Instance of rejected uris log filters.</em><a name="81" href="#81">81</a> <em> */</em><a name="82" href="#82">82</a> <strong>private</strong> <a href="../../../../org/archive/crawler/settings/MapType.html">MapType</a> rejectLogFilters = <strong>null</strong>;<a name="83" href="#83">83</a> <a name="84" href="#84">84</a> <em>/**<em>*</em></em><a name="85" href="#85">85</a> <em> * @param name Name of this filter.</em><a name="86" href="#86">86</a> <em> */</em><a name="87" href="#87">87</a> <strong>public</strong> <a href="../../../../org/archive/crawler/postprocessor/LinksScoper.html">LinksScoper</a>(String name) {<a name="88" href="#88">88</a> <strong>super</strong>(name, <span class="string">"LinksScoper. Rules on which extracted links "</span> +<a name="89" href="#89">89</a> <span class="string">"are within configured scope."</span>);<a name="90" href="#90">90</a> <a name="91" href="#91">91</a> <a href="../../../../org/archive/crawler/settings/Type.html">Type</a> t;<a name="92" href="#92">92</a> t = addElementToDefinition(<a name="93" href="#93">93</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_SEED_REDIRECTS_NEW_SEEDS,<a name="94" href="#94">94</a> <span class="string">"If enabled, any URL found because a seed redirected to it "</span> +<a name="95" href="#95">95</a> <span class="string">"(original seed returned 301 or 302), will also be treated "</span> +<a name="96" href="#96">96</a> <span class="string">"as a seed."</span>, DEFAULT_SEED_REDIRECTS_NEW_SEEDS));<a name="97" href="#97">97</a> t.setExpertSetting(<strong>true</strong>);<a name="98" href="#98">98</a> <a name="99" href="#99">99</a> t = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_PREFERENCE_DEPTH_HOPS,<a name="100" href="#100">100</a> <span class="string">"Number of hops (of any sort) from a seed up to which a URI has higher "</span> +<a name="101" href="#101">101</a> <span class="string">"priority scheduling than any remaining seed. For example, if set to 1 items one "</span> + <a name="102" href="#102">102</a> <span class="string">"hop (link, embed, redirect, etc.) away from a seed will be scheduled "</span> + <a name="103" href="#103">103</a> <span class="string">"with HIGH priority. If set to -1, no "</span> + <a name="104" href="#104">104</a> <span class="string">"preferencing will occur, and a breadth-first search with seeds "</span> + <a name="105" href="#105">105</a> <span class="string">"processed before discovered links will proceed. If set to zero, a "</span> + <a name="106" href="#106">106</a> <span class="string">"purely depth-first search will proceed, with all discovered links processed "</span> + <a name="107" href="#107">107</a> <span class="string">"before remaining seeds. Seed redirects are treated as one hop from a seed."</span>,<a name="108" href="#108">108</a> DEFAULT_PREFERENCE_DEPTH_HOPS));<a name="109" href="#109">109</a> t.setExpertSetting(<strong>true</strong>);<a name="110" href="#110">110</a> <a name="111" href="#111">111</a> <strong>this</strong>.rejectLogFilters = (MapType)addElementToDefinition(<a name="112" href="#112">112</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/MapType.html">MapType</a>(ATTR_LOG_REJECT_FILTERS, <span class="string">"Filters applied after "</span> +<a name="113" href="#113">113</a> <span class="string">"an URI has been rejected. If filter return "</span> +<a name="114" href="#114">114</a> <span class="string">"TRUE, the URI is logged (if the logging level is INFO). "</span> +<a name="115" href="#115">115</a> <span class="string">"Depends on "</span> + ATTR_OVERRIDE_LOGGER_ENABLED +<a name="116" href="#116">116</a> <span class="string">" being enabled."</span>, Filter.<strong>class</strong>));<a name="117" href="#117">117</a> <strong>this</strong>.rejectLogFilters.setExpertSetting(<strong>true</strong>);<a name="118" href="#118">118</a> }<a name="119" href="#119">119</a> <a name="120" href="#120">120</a> <strong>protected</strong> <strong>void</strong> innerProcess(<strong>final</strong> <a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi) {<a name="121" href="#121">121</a> <strong>if</strong> (LOGGER.isLoggable(Level.FINEST)) {<a name="122" href="#122">122</a> LOGGER.finest(getName() + <span class="string">" processing "</span> + curi);<a name="123" href="#123">123</a> }<a name="124" href="#124">124</a> <a name="125" href="#125">125</a> <em class="comment">// If prerequisites, nothing to be done in here.</em><a name="126" href="#126">126</a> <strong>if</strong> (curi.hasPrerequisiteUri()) {<a name="127" href="#127">127</a> handlePrerequisite(curi);<a name="128" href="#128">128</a> <strong>return</strong>;<a name="129" href="#129">129</a> }<a name="130" href="#130">130</a> <a name="131" href="#131">131</a> <em class="comment">// Don't extract links of error pages.</em><a name="132" href="#132">132</a> <strong>if</strong> (curi.getFetchStatus() < 200 || curi.getFetchStatus() >= 400) {<a name="133" href="#133">133</a> curi.clearOutlinks();<a name="134" href="#134">134</a> <strong>return</strong>;<a name="135" href="#135">135</a> }<a name="136" href="#136">136</a> <a name="137" href="#137">137</a> <strong>if</strong> (curi.outlinksSize() <= 0) {<a name="138" href="#138">138</a> <em class="comment">// No outlinks to process.</em><a name="139" href="#139">139</a> <strong>return</strong>;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -