writerpoolprocessor.html
来自「网络爬虫开源代码」· HTML 代码 · 共 732 行 · 第 1/5 页
HTML
732 行
<a name="139" href="#139">139</a> <em> * CrawlURI annotation indicating no record was written</em><a name="140" href="#140">140</a> <em> */</em><a name="141" href="#141">141</a> <strong>protected</strong> <strong>static</strong> <strong>final</strong> String ANNOTATION_UNWRITTEN = <span class="string">"unwritten"</span>;<a name="142" href="#142">142</a> <a name="143" href="#143">143</a> <em>/**<em>*</em></em><a name="144" href="#144">144</a> <em> * Default maximum file size.</em><a name="145" href="#145">145</a> <em> * TODO: Check that subclasses can set a different MAX_FILE_SIZE and</em><a name="146" href="#146">146</a> <em> * it will be used in the constructor as default.</em><a name="147" href="#147">147</a> <em> */</em><a name="148" href="#148">148</a> <strong>private</strong> <strong>static</strong> <strong>final</strong> <strong>int</strong> DEFAULT_MAX_FILE_SIZE = 100000000;<a name="149" href="#149">149</a> <a name="150" href="#150">150</a> <em>/**<em>*</em></em><a name="151" href="#151">151</a> <em> * Default path list.</em><a name="152" href="#152">152</a> <em> * </em><a name="153" href="#153">153</a> <em> * TODO: Confirm this one gets picked up.</em><a name="154" href="#154">154</a> <em> */</em><a name="155" href="#155">155</a> <strong>private</strong> <strong>static</strong> <strong>final</strong> String [] DEFAULT_PATH = {<span class="string">"crawl-store"</span>};<a name="156" href="#156">156</a> <a name="157" href="#157">157</a> <em>/**<em>*</em></em><a name="158" href="#158">158</a> <em> * Reference to pool.</em><a name="159" href="#159">159</a> <em> */</em><a name="160" href="#160">160</a> <strong>transient</strong> <strong>private</strong> <a href="../../../../org/archive/io/WriterPool.html">WriterPool</a> pool = <strong>null</strong>;<a name="161" href="#161">161</a> <a name="162" href="#162">162</a> <em>/**<em>*</em></em><a name="163" href="#163">163</a> <em> * Total number of bytes written to disc.</em><a name="164" href="#164">164</a> <em> */</em><a name="165" href="#165">165</a> <strong>private</strong> <strong>long</strong> totalBytesWritten = 0;<a name="166" href="#166">166</a> <a name="167" href="#167">167</a> <em>/**<em>*</em></em><a name="168" href="#168">168</a> <em> * Calculate metadata once only.</em><a name="169" href="#169">169</a> <em> */</em><a name="170" href="#170">170</a> <strong>transient</strong> <strong>private</strong> List<String> cachedMetadata = <strong>null</strong>;<a name="171" href="#171">171</a> <a name="172" href="#172">172</a> <a name="173" href="#173">173</a> <em>/**<em>*</em></em><a name="174" href="#174">174</a> <em> * @param name Name of this processor.</em><a name="175" href="#175">175</a> <em> */</em><a name="176" href="#176">176</a> <strong>public</strong> <a href="../../../../org/archive/crawler/framework/WriterPoolProcessor.html">WriterPoolProcessor</a>(String name) {<a name="177" href="#177">177</a> <strong>this</strong>(name, <span class="string">"Pool of files processor"</span>);<a name="178" href="#178">178</a> }<a name="179" href="#179">179</a> <a name="180" href="#180">180</a> <em>/**<em>*</em></em><a name="181" href="#181">181</a> <em> * @param name Name of this processor.</em><a name="182" href="#182">182</a> <em> * @param description Description for this processor.</em><a name="183" href="#183">183</a> <em> */</em><a name="184" href="#184">184</a> <strong>public</strong> <a href="../../../../org/archive/crawler/framework/WriterPoolProcessor.html">WriterPoolProcessor</a>(<strong>final</strong> String name,<a name="185" href="#185">185</a> <strong>final</strong> String description) {<a name="186" href="#186">186</a> <strong>super</strong>(name, description);<a name="187" href="#187">187</a> <a href="../../../../org/archive/crawler/settings/Type.html">Type</a> e = addElementToDefinition(<a name="188" href="#188">188</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_COMPRESS, <span class="string">"Compress files when "</span> +<a name="189" href="#189">189</a> <span class="string">"writing to disk."</span>, <strong>new</strong> Boolean(DEFAULT_COMPRESS)));<a name="190" href="#190">190</a> e.setOverrideable(false);<a name="191" href="#191">191</a> e = addElementToDefinition(<a name="192" href="#192">192</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_PREFIX, <a name="193" href="#193">193</a> <span class="string">"File prefix. "</span> +<a name="194" href="#194">194</a> <span class="string">"The text supplied here will be used as a prefix naming "</span> +<a name="195" href="#195">195</a> <span class="string">"writer files. For example if the prefix is 'IAH', "</span> +<a name="196" href="#196">196</a> <span class="string">"then file names will look like "</span> +<a name="197" href="#197">197</a> <span class="string">"IAH-20040808101010-0001-HOSTNAME.arc.gz "</span> +<a name="198" href="#198">198</a> <span class="string">"...if writing ARCs (The prefix will be "</span> +<a name="199" href="#199">199</a> <span class="string">"separated from the date by a hyphen)."</span>,<a name="200" href="#200">200</a> WriterPoolMember.DEFAULT_PREFIX));<a name="201" href="#201">201</a> e = addElementToDefinition(<a name="202" href="#202">202</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_SUFFIX, <span class="string">"Suffix to tag onto "</span> +<a name="203" href="#203">203</a> <span class="string">"files. If value is '${HOSTNAME}', will use hostname for "</span> +<a name="204" href="#204">204</a> <span class="string">"suffix. If empty, no suffix will be added."</span>,<a name="205" href="#205">205</a> WriterPoolMember.DEFAULT_SUFFIX));<a name="206" href="#206">206</a> e.setOverrideable(false);<a name="207" href="#207">207</a> e = addElementToDefinition(<a name="208" href="#208">208</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_MAX_SIZE_BYTES, <span class="string">"Max size of each file"</span>,<a name="209" href="#209">209</a> <strong>new</strong> Long(DEFAULT_MAX_FILE_SIZE)));<a name="210" href="#210">210</a> e.setOverrideable(false);<a name="211" href="#211">211</a> e = addElementToDefinition(<a name="212" href="#212">212</a> <strong>new</strong> <a href="../../../../org/archive/crawler/settings/StringList.html">StringList</a>(ATTR_PATH, <span class="string">"Where to files. "</span> +<a name="213" href="#213">213</a> <span class="string">"Supply absolute or relative path. If relative, files "</span> +<a name="214" href="#214">214</a> <span class="string">"will be written relative to "</span> +<a name="215" href="#215">215</a> <span class="string">"the "</span> + CrawlOrder.ATTR_DISK_PATH + <span class="string">"setting."</span> +<a name="216" href="#216">216</a> <span class="string">" If more than one path specified, we'll round-robin"</span> +<a name="217" href="#217">217</a> <span class="string">" dropping files to each. This setting is safe"</span> +<a name="218" href="#218">218</a> <span class="string">" to change midcrawl (You can remove and add new dirs"</span> +<a name="219" href="#219">219</a> <span class="string">" as the crawler progresses)."</span>, getDefaultPath()));<a name="220" href="#220">220</a> e.setOverrideable(false);<a name="221" href="#221">221</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_POOL_MAX_ACTIVE,<a name="222" href="#222">222</a> <span class="string">"Maximum active files in pool. "</span> +<a name="223" href="#223">223</a> <span class="string">"This setting cannot be varied over the life of a crawl."</span>,<a name="224" href="#224">224</a> <strong>new</strong> Integer(WriterPool.DEFAULT_MAX_ACTIVE)));<a name="225" href="#225">225</a> e.setOverrideable(false);<a name="226" href="#226">226</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_POOL_MAX_WAIT,<a name="227" href="#227">227</a> <span class="string">"Maximum time to wait on pool element"</span> +<a name="228" href="#228">228</a> <span class="string">" (milliseconds). This setting cannot be varied over the life"</span> +<a name="229" href="#229">229</a> <span class="string">" of a crawl."</span>,<a name="230" href="#230">230</a> <strong>new</strong> Integer(WriterPool.DEFAULT_MAXIMUM_WAIT)));<a name="231" href="#231">231</a> e.setOverrideable(false);<a name="232" href="#232">232</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_MAX_BYTES_WRITTEN,<a name="233" href="#233">233</a> <span class="string">"Total file bytes to write to disk."</span> +<a name="234" href="#234">234</a> <span class="string">" Once the size of all files on disk has exceeded this "</span> +<a name="235" href="#235">235</a> <span class="string">"limit, this processor will stop the crawler. "</span> +<a name="236" href="#236">236</a> <span class="string">"A value of zero means no upper limit."</span>, <strong>new</strong> Long(0)));<a name="237" href="#237">237</a> e.setOverrideable(false);<a name="238" href="#238">238</a> e.setExpertSetting(<strong>true</strong>);<a name="239" href="#239">239</a> e = addElementToDefinition(<strong>new</strong> <a href="../../../../org/archive/crawler/settings/SimpleType.html">SimpleType</a>(ATTR_SKIP_IDENTICAL_DIGESTS,<a name="240" href="#240">240</a> <span class="string">"Whether to skip the writing of a record when URI "</span> +<a name="241" href="#241">241</a> <span class="string">"history information is available and indicates the "</span> +<a name="242" href="#242">242</a> <span class="string">"prior fetch had an identical content digest. "</span> +<a name="243" href="#243">243</a> <span class="string">"Default is false."</span>, <strong>new</strong> Boolean(false)));<a name="244" href="#244">244</a> e.setOverrideable(<strong>true</strong>);<a name="245" href="#245">245</a> e.setExpertSetting(<strong>true</strong>);<a name="246" href="#246">246</a> }<a name="247" href="#247">247</a> <a name="248" href="#248">248</a> <strong>protected</strong> String [] getDefaultPath() {<a name="249" href="#249">249</a> <strong>return</strong> DEFAULT_PATH;<a name="250" href="#250">250</a> }<a name="251" href="#251">251</a> <a name="252" href="#252">252</a> <strong>public</strong> <strong>synchronized</strong> <strong>void</strong> initialTasks() {<a name="253" href="#253">253</a> <em class="comment">// Add this class to crawl state listeners and setup pool.</em><a name="254" href="#254">254</a> getSettingsHandler().getOrder().getController().<a name="255" href="#255">255</a> addCrawlStatusListener(<strong>this</strong>);<a name="256" href="#256">256</a> setupPool(<strong>new</strong> AtomicInteger());<a name="257" href="#257">257</a> <em class="comment">// Run checkpoint recovery code.</em><a name="258" href="#258">258</a> <strong>if</strong> (getSettingsHandler().getOrder().getController().<a name="259" href="#259">259</a> isCheckpointRecover()) {<a name="260" href="#260">260</a> checkpointRecover();<a name="261" href="#261">261</a> }<a name="262" href="#262">262</a> }<a name="263" href="#263">263</a> <a name="264" href="#264">264</a> <strong>protected</strong> AtomicInteger getSerialNo() {<a name="265" href="#265">265</a> <strong>return</strong> ((WriterPool)getPool()).getSerialNo();<a name="266" href="#266">266</a> }<a name="267" href="#267">267</a> <a name="268" href="#268">268</a> <em>/**<em>*</em></em><a name="269" href="#269">269</a> <em> * Set up pool of files.</em><a name="270" href="#270">270</a> <em> */</em><a name="271" href="#271">271</a> <strong>protected</strong> <strong>abstract</strong> <strong>void</strong> setupPool(<strong>final</strong> AtomicInteger serialNo);<a name="272" href="#272">272</a> <a name="273" href="#273">273</a> <em>/**<em>*</em></em><a name="274" href="#274">274</a> <em> * Writes a CrawlURI and its associated data to store file.</em><a name="275" href="#275">275</a> <em> *</em><a name="276" href="#276">276</a> <em> * Currently this method understands the following uri types: dns, http, </em><a name="277" href="#277">277</a> <em> * and https.</em><a name="278" href="#278">278</a> <em> *</em><a name="279" href="#279">279</a> <em> * @param curi CrawlURI to process.</em><a name="280" href="#280">280</a> <em> */</em><a name="281" href="#281">281</a> <strong>protected</strong> <strong>abstract</strong> <strong>void</strong> innerProcess(<a href="../../../../org/archive/crawler/datamodel/CrawlURI.html">CrawlURI</a> curi);<a name="282" href="#282">282</a> <a name="283" href="#283">283</a> <strong>protected</strong> <strong>void</strong> checkBytesWritten() {<a name="284" href="#284">284</a> <strong>long</strong> max = getMaxToWrite();<a name="285" href="#285">285</a> <strong>if</strong> (max <= 0) {
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?