⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 urlfromdatabaseimpl.java

📁 本系统实现了从五个网站上搜索的图书进行整合后
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
package com.booksearch.service.urlservice;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;

import javax.servlet.http.HttpServletResponse;

import com.booksearch.dao.UrlLoadDao;
import com.booksearch.orm.Url;
import com.booksearch.util.UpdateUtilBean;;
/**
 * Class:UrlfromDatabaseImpl
 * Description:  从数据库中获得url,并构建请求路径,初始化线程池
 * extens:no
 * implements:UrlfromDatabase
 * @author  feng guang
 * @since   11/20/08
 */
public class UrlfromDatabaseImpl implements UrlfromDatabase {
	
   /*从数据库中获得url的接口引用*/
	private UrlLoadDao urlLoadDao;
	
	private HashMap<String,String> urlMap = new HashMap<String,String>();
	
	public void setUrlLoadDao(UrlLoadDao urlLoadDao) {
		this.urlLoadDao = urlLoadDao;
	}

	/**
	* Function:  getUrlMap
	* Description:  从数据库中获得url,并构建请求路径,初始化线程池
	* Calls:  UrlLoadDao.loadUrl()
	* Called By:  CrampDataAction.execute()
	* @param keyword as String
	* @return void
	* @throws no
	*/
	public HashMap<String,String> getUrlMap(String keyword,String searchKind) {
		
		/*调用urlLoadDao的成员函数从数据库中获得所有url*/
		List list = this.urlLoadDao.loadUrl();
		String temKeyword = "";
		if(keyword.indexOf(" ")!=-1){

			ArrayList<String> strList = new ArrayList<String> ();
			StringTokenizer st = new StringTokenizer(keyword, " ");
			
			while (st.hasMoreElements()) {
				strList.add(st.nextToken());
			}
			
			for(int i=0;i<strList.size();i++){
				temKeyword += strList.get(i) + "+";
			}
			temKeyword = temKeyword.substring(0, temKeyword.length()-1);
		}else{
			temKeyword = keyword;
		}
		Iterator it = list.iterator();
		/*迭代list,构建url并放到静态urlmap中*/
		while(it.hasNext()){
				
			String fullUrl = null;
			Url url = (Url) it.next();

			if ("dangdang".equals(url.getWebname())) {
				if("all".equals(searchKind)||"name".equals(searchKind))
				       fullUrl= url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword;
				else if("author".equals(searchKind))
				       fullUrl= url.getWebsite()+url.getSearchpage()+"?"+"key2"
			              + "="+temKeyword;
				else if("publisher".equals(searchKind))
				       fullUrl= url.getWebsite()+url.getSearchpage()+"?"+"key3"
			              + "="+temKeyword;
			        
			}else if("zhuoyue".equals(url.getWebname())){
				if("name".equals(searchKind)){
				      fullUrl = url.getWebsite()+url.getSearchpage()
				              + "?"+url.getKeyword()+"="+temKeyword
				              +"&searchKind=name&searchType=1";
				}else if("all".equals(searchKind)){
				      fullUrl = url.getWebsite()+url.getSearchpage()
			                  + "?"+url.getKeyword()+"="+temKeyword
			                  + "&searchKind=keyword&searchType=1";
				}else if("author".equals(searchKind)){
				      fullUrl = url.getWebsite()+url.getSearchpage()
			                  + "?"+url.getKeyword()+"="+temKeyword
			                  + "&searchKind=author&searchType=1";
				}else if("publisher".equals(searchKind)){
				      fullUrl = url.getWebsite()+url.getSearchpage()
			                  + "?"+url.getKeyword()+"="+temKeyword
			                  + "&searchKind=pubcomp&searchType=1";
				}
			}else if("china_pub".equals(url.getWebname())){

				      fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword;
					
			}else if("chinabook".equals(url.getWebname())){
			    if("name".equals(searchKind)||"all".equals(searchKind)){
					  fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword + "&Str_Search=bookname";
				}else if("author".equals(searchKind)){
					  fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword + "&Str_Search=Str_Search=author";
				}else if("publisher".equals(searchKind)){
					  fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword + "&Str_Search=Str_Search=publish";
				}
			}else if("tsinghua".equals(url.getWebname())){
					
				if("name".equals(searchKind)||"all".equals(searchKind)){
					  fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword + "&type=0";
				}else if("author".equals(searchKind)){
					  fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()
				              + "="+temKeyword + "&type=1";
				}
			}else if("weilan".equals(url.getWebname())){
					
			    try {
			        if("name".equals(searchKind)){
						fullUrl = url.getWebsite()+url.getSearchpage()
							    + "?"+url.getKeyword()+"="+URLEncoder.encode(temKeyword, "utf-8")
							    + "&producttype=1&index=1";
			        }else if("all".equals(searchKind)){
						fullUrl = url.getWebsite()+url.getSearchpage()
							    + "?"+url.getKeyword()+"="+URLEncoder.encode(temKeyword, "utf-8")
							    + "&producttype=1&index=0";
			        }else if("author".equals(searchKind)){
						fullUrl = url.getWebsite()+url.getSearchpage()
							    + "?"+url.getKeyword()+"="+URLEncoder.encode(temKeyword, "utf-8")
							    + "&producttype=1&index=2";
			        }else if("publisher".equals(searchKind)){
						fullUrl = url.getWebsite()+url.getSearchpage()
							    + "?"+url.getKeyword()+"="+URLEncoder.encode(temKeyword, "utf-8")
							    + "&producttype=1&index=3";
			        }
				} catch (UnsupportedEncodingException e) {
						
						e.printStackTrace();
				}
			}
			if(null != fullUrl)
				urlMap.put(url.getWebname(), fullUrl);
	 }
//		}else{
//			Iterator it = list.iterator();
//			/*迭代list,构建url并放到静态urlmap中*/
//			while(it.hasNext()){
//				
//				Url url = (Url) it.next();
//				String fullUrl = url.getWebsite()+url.getSearchpage()+"?"+url.getKeyword()+"="+keyword;
//				urlMap.put(url.getWebname(), fullUrl);
//			}
//		}
		return urlMap;
	}
	/**
	 * Function: getAdvancedUrlMap 
	 * Description: 从数据库中获得url,并构建请求路径,初始化线程池 
	 * Calls:UrlLoadDao.loadUrl() 
	 * Called By: AdvancedSearchAction.execute()
	 * @param list as ArrayList<String>
	 * @return HashMap<String,String>
	 * @throws UnsupportedEncodingException
	 */
	public HashMap<String, String> getAdvancedUrlMap(String keyword)
			throws Exception {
		
		ArrayList<String> list = new ArrayList<String> ();
		ArrayList<String> strList = new ArrayList<String> ();
		StringTokenizer st = new StringTokenizer(keyword, "+");
		
		while (st.hasMoreElements()) {
			list.add(st.nextToken());
		}
		/* 定义一个完整的接受advURL的变量fullAdvURL */
        HashMap<String,String> advUrlMap = new HashMap<String,String>();
		String fullAdvURL = "";

		/* 定义一些中间变量 */
		String name = "";
		String author = "";
		String publisher = "";
		String ISBN = "";
		String minprice = "";
		String maxprice = "";
		String publishUpYear = "";
		String publishUpmonth = "";
		String publishDoanYear = "";
		String publishDoanMonth = "";

		/* 判断接受的每个参数是否为空 */
		if (!"null".equals(list.get(0))) {
			//System.out.println(list.get(0));
			name = list.get(0).replace("20%", "+");
			if(name.indexOf(" ")!=-1){
				
				StringTokenizer temst = new StringTokenizer(name, " ");
				
				while (temst.hasMoreElements()) {
					strList.add(temst.nextToken());
				}	
			}

		}
		if (!"null".equals(list.get(1))) {
			author = list.get(1).replace("20%", "+");
		} 

		if (!"null".equals(list.get(2))) {
			ISBN = list.get(2).replace("20%", "+");

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -