recoverylogmapper.html

来自「网络爬虫开源代码」· HTML 代码 · 共 339 行 · 第 1/2 页

HTML
339
字号
<a name="162" href="#162">162</a>                         crawledUrlToSeedMap.put(firstUrl, viaUrl);<a name="163" href="#163">163</a>                         seedForFirstUrl = viaUrl;<a name="164" href="#164">164</a>                     } <strong>else</strong> {<a name="165" href="#165">165</a>                         <strong>if</strong> (logger.isLoggable(Level.FINE)) {<a name="166" href="#166">166</a>                             logger.fine(<span class="string">"\tvia URL discovered via seed URL "</span> +<a name="167" href="#167">167</a>                                 seedForFirstUrl);<a name="168" href="#168">168</a>                         }<a name="169" href="#169">169</a>                         <em class="comment">// Collapse</em><a name="170" href="#170">170</a>                         crawledUrlToSeedMap.put(firstUrl, seedForFirstUrl);<a name="171" href="#171">171</a>                     }<a name="172" href="#172">172</a>                     Set&lt;String> theSeedUrlList =<a name="173" href="#173">173</a>                         seedUrlToDiscoveredUrlsMap.get(seedForFirstUrl);<a name="174" href="#174">174</a>                         <strong>if</strong> (theSeedUrlList == <strong>null</strong>) {<a name="175" href="#175">175</a>                         String message = <span class="string">"recover log "</span> +<a name="176" href="#176">176</a>                                          recoverLogFileName + <span class="string">" at line "</span> +<a name="177" href="#177">177</a>                                          reader.getLineNumber() +<a name="178" href="#178">178</a>                                          <span class="string">" listed F+ URL ("</span> + viaUrl +<a name="179" href="#179">179</a>                                          <span class="string">") for which found no seed list."</span>;<a name="180" href="#180">180</a>                         <strong>if</strong> (seedNotFoundPrintWriter != <strong>null</strong>) {<a name="181" href="#181">181</a>                             seedNotFoundPrintWriter.println(message);<a name="182" href="#182">182</a>                         } <strong>else</strong> {<a name="183" href="#183">183</a>                             <strong>throw</strong> <strong>new</strong> <a href="../../../../org/archive/crawler/util/SeedUrlNotFoundException.html">SeedUrlNotFoundException</a>(message);<a name="184" href="#184">184</a>                         }<a name="185" href="#185">185</a>                     } <strong>else</strong> {<a name="186" href="#186">186</a>                         theSeedUrlList.add(firstUrl);<a name="187" href="#187">187</a>                     }<a name="188" href="#188">188</a>                 }<a name="189" href="#189">189</a>             } <strong>else</strong> <strong>if</strong> (curLine.startsWith(RecoveryJournal.F_SUCCESS)) {<a name="190" href="#190">190</a>                 <strong>if</strong> (logger.isLoggable(Level.FINE)) {<a name="191" href="#191">191</a>                     logger.fine(<span class="string">"F_SUCCESS for URL "</span> + firstUrl);<a name="192" href="#192">192</a>                 }<a name="193" href="#193">193</a>                 successfullyCrawledUrls.add(firstUrl);<a name="194" href="#194">194</a>             }<a name="195" href="#195">195</a>         }<a name="196" href="#196">196</a>         reader.close();<a name="197" href="#197">197</a>         <strong>if</strong> (seedNotFoundPrintWriter != <strong>null</strong>) {<a name="198" href="#198">198</a>             seedNotFoundPrintWriter.close();<a name="199" href="#199">199</a>         }<a name="200" href="#200">200</a>     }<a name="201" href="#201">201</a> <a name="202" href="#202">202</a>     <em>/**<em>*</em></em><a name="203" href="#203">203</a> <em>     * Returns seed for urlString (null if seed not found).</em><a name="204" href="#204">204</a> <em>     * @param urlString</em><a name="205" href="#205">205</a> <em>     * @return Seed.</em><a name="206" href="#206">206</a> <em>     */</em><a name="207" href="#207">207</a>     <strong>public</strong> String getSeedForUrl(String urlString) {<a name="208" href="#208">208</a>         <strong>return</strong> (seedUrlToDiscoveredUrlsMap.get(urlString) != <strong>null</strong>)?<a name="209" href="#209">209</a>                 urlString: crawledUrlToSeedMap.get(urlString);<a name="210" href="#210">210</a>     }<a name="211" href="#211">211</a> <a name="212" href="#212">212</a>     <em>/**<em>*</em></em><a name="213" href="#213">213</a> <em>     * @return Returns the seedUrlToDiscoveredUrlsMap.</em><a name="214" href="#214">214</a> <em>     */</em><a name="215" href="#215">215</a>     <strong>public</strong> Map getSeedUrlToDiscoveredUrlsMap() {<a name="216" href="#216">216</a>         <strong>return</strong> <strong>this</strong>.seedUrlToDiscoveredUrlsMap;<a name="217" href="#217">217</a>     }<a name="218" href="#218">218</a> <a name="219" href="#219">219</a>     <em>/**<em>*</em></em><a name="220" href="#220">220</a> <em>     * @return Returns the successfullyCrawledUrls.</em><a name="221" href="#221">221</a> <em>     */</em><a name="222" href="#222">222</a>     <strong>public</strong> Set getSuccessfullyCrawledUrls() {<a name="223" href="#223">223</a>         <strong>return</strong> <strong>this</strong>.successfullyCrawledUrls;<a name="224" href="#224">224</a>     }<a name="225" href="#225">225</a> <a name="226" href="#226">226</a>     <em>/**<em>*</em></em><a name="227" href="#227">227</a> <em>     * @return Returns the logger.</em><a name="228" href="#228">228</a> <em>     */</em><a name="229" href="#229">229</a>     <strong>public</strong> <strong>static</strong> Logger getLogger() {<a name="230" href="#230">230</a>         <strong>return</strong> logger;<a name="231" href="#231">231</a>     }<a name="232" href="#232">232</a> <a name="233" href="#233">233</a>     <strong>private</strong> <strong>class</strong> SuccessfullyCrawledURLsIterator<a name="234" href="#234">234</a>     implements Iterator&lt;String> {<a name="235" href="#235">235</a>         <strong>private</strong> String nextValue = <strong>null</strong>;<a name="236" href="#236">236</a>         <strong>private</strong> Iterator discoveredUrlsIterator;<a name="237" href="#237">237</a> <a name="238" href="#238">238</a>         <strong>public</strong> SuccessfullyCrawledURLsIterator(String seedUrlString)<a name="239" href="#239">239</a>         throws <a href="../../../../org/archive/crawler/util/SeedUrlNotFoundException.html">SeedUrlNotFoundException</a> {<a name="240" href="#240">240</a>             Set discoveredUrlList =<a name="241" href="#241">241</a>                 (Set)getSeedUrlToDiscoveredUrlsMap().get(seedUrlString);<a name="242" href="#242">242</a>             <strong>if</strong> (discoveredUrlList == <strong>null</strong>) {<a name="243" href="#243">243</a>                 <strong>throw</strong> <strong>new</strong> <a href="../../../../org/archive/crawler/util/SeedUrlNotFoundException.html">SeedUrlNotFoundException</a>(<span class="string">"Seed URL "</span> +<a name="244" href="#244">244</a>                     seedUrlString + <span class="string">"  not found in seed list"</span>);<a name="245" href="#245">245</a>             }<a name="246" href="#246">246</a>             discoveredUrlsIterator = discoveredUrlList.iterator();<a name="247" href="#247">247</a>         }<a name="248" href="#248">248</a> <a name="249" href="#249">249</a>         <em>/**<em>*</em></em><a name="250" href="#250">250</a> <em>         * Idempotent method (because of null check on nextValue).</em><a name="251" href="#251">251</a> <em>         */</em><a name="252" href="#252">252</a>         <strong>private</strong> <strong>void</strong> populateNextValue() {<a name="253" href="#253">253</a>             <strong>while</strong> (nextValue == <strong>null</strong> &amp; discoveredUrlsIterator.hasNext()) {<a name="254" href="#254">254</a>                 String curDiscoveredUrl =<a name="255" href="#255">255</a>                     (String)discoveredUrlsIterator.next();<a name="256" href="#256">256</a>                 <strong>boolean</strong> succCrawled = getSuccessfullyCrawledUrls().<a name="257" href="#257">257</a>                     contains(curDiscoveredUrl);<a name="258" href="#258">258</a>                 <strong>if</strong> (getLogger().isLoggable(Level.FINE)) {<a name="259" href="#259">259</a>                     getLogger().fine(<span class="string">"populateNextValue: curDiscoveredUrl="</span> +<a name="260" href="#260">260</a>                             curDiscoveredUrl + <span class="string">", succCrawled="</span> +<a name="261" href="#261">261</a>                             succCrawled);<a name="262" href="#262">262</a>                 }<a name="263" href="#263">263</a>                 <strong>if</strong> (succCrawled)<a name="264" href="#264">264</a>                     nextValue = curDiscoveredUrl;<a name="265" href="#265">265</a>             }<a name="266" href="#266">266</a>         }<a name="267" href="#267">267</a> <a name="268" href="#268">268</a>         <strong>public</strong> <strong>boolean</strong> hasNext() {<a name="269" href="#269">269</a>             populateNextValue();<a name="270" href="#270">270</a>             <strong>return</strong> (nextValue != <strong>null</strong>);<a name="271" href="#271">271</a>         }<a name="272" href="#272">272</a> <a name="273" href="#273">273</a>         <strong>public</strong> String next() {<a name="274" href="#274">274</a>             populateNextValue();<a name="275" href="#275">275</a>             String returnValue = nextValue;<a name="276" href="#276">276</a>             nextValue = <strong>null</strong>;<a name="277" href="#277">277</a>             <strong>return</strong> <strong>return</strong>Value;<a name="278" href="#278">278</a>         }<a name="279" href="#279">279</a> <a name="280" href="#280">280</a>         <em>/**<em>*</em></em><a name="281" href="#281">281</a> <em>         * Remove operation is unsupported in this Iterator</em><a name="282" href="#282">282</a> <em>         * (will throw UnsupportedOperationException if called).</em><a name="283" href="#283">283</a> <em>         */</em><a name="284" href="#284">284</a>         <strong>public</strong> <strong>void</strong> remove() {<a name="285" href="#285">285</a>             <strong>throw</strong> <strong>new</strong> UnsupportedOperationException(<a name="286" href="#286">286</a>                 <span class="string">"SuccessfullyCrawledURLsIterator.remove: not supported."</span>);<a name="287" href="#287">287</a>         }<a name="288" href="#288">288</a>     }<a name="289" href="#289">289</a> <a name="290" href="#290">290</a>     <strong>public</strong> Iterator&lt;String> getIteratorOfURLsSuccessfullyCrawledFromSeedUrl(<a name="291" href="#291">291</a>             String seedUrlString) throws SeedUrlNotFoundException {<a name="292" href="#292">292</a>         <strong>return</strong> <strong>new</strong> SuccessfullyCrawledURLsIterator(seedUrlString);<a name="293" href="#293">293</a>     }<a name="294" href="#294">294</a> <a name="295" href="#295">295</a>     <strong>public</strong> Collection&lt;String> getSeedCollection() {<a name="296" href="#296">296</a>         <strong>return</strong> seedUrlToDiscoveredUrlsMap.keySet();<a name="297" href="#297">297</a>     }<a name="298" href="#298">298</a> <a name="299" href="#299">299</a>     <strong>public</strong> <strong>static</strong> <strong>void</strong> main(String args[]) {<a name="300" href="#300">300</a>         <strong>if</strong> (args.length &lt; 1) {<a name="301" href="#301">301</a>             System.out.println(<span class="string">"Usage: RecoveryLogMapper recoverLogFileName"</span>);<a name="302" href="#302">302</a>             Runtime.getRuntime().exit(-1);<a name="303" href="#303">303</a>         }<a name="304" href="#304">304</a>         String recoverLogFileName = args[0];<a name="305" href="#305">305</a>         <strong>try</strong> {<a name="306" href="#306">306</a>             <a href="../../../../org/archive/crawler/util/RecoveryLogMapper.html">RecoveryLogMapper</a> myRecoveryLogMapper =<a name="307" href="#307">307</a>                 <strong>new</strong> <a href="../../../../org/archive/crawler/util/RecoveryLogMapper.html">RecoveryLogMapper</a>(recoverLogFileName);<a name="308" href="#308">308</a>             <strong>for</strong> (String curSeedUrl: myRecoveryLogMapper.getSeedCollection()) {<a name="309" href="#309">309</a>                 System.out.println(<span class="string">"URLs successfully crawled from seed URL "</span><a name="310" href="#310">310</a>                     + curSeedUrl);<a name="311" href="#311">311</a>                 Iterator iteratorOfUrlsCrawledFromSeedUrl =<a name="312" href="#312">312</a>                     myRecoveryLogMapper.<a name="313" href="#313">313</a>                         getIteratorOfURLsSuccessfullyCrawledFromSeedUrl(<a name="314" href="#314">314</a>                             curSeedUrl);<a name="315" href="#315">315</a>                 <strong>while</strong> (iteratorOfUrlsCrawledFromSeedUrl.hasNext()) {<a name="316" href="#316">316</a>                     String curCrawledUrlString =<a name="317" href="#317">317</a>                         (String)iteratorOfUrlsCrawledFromSeedUrl.next();<a name="318" href="#318">318</a>                     System.out.println(<span class="string">"    -> "</span> + curCrawledUrlString);<a name="319" href="#319">319</a>                 }<a name="320" href="#320">320</a>             }<a name="321" href="#321">321</a>         } <strong>catch</strong> (Exception e) {<a name="322" href="#322">322</a>             e.printStackTrace();<a name="323" href="#323">323</a>         }<a name="324" href="#324">324</a>     }<a name="325" href="#325">325</a> }</pre><hr/><div id="footer">This page was automatically generated by <a href="http://maven.apache.org/">Maven</a></div></body></html>

⌨️ 快捷键说明

复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?