⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 url.h

📁 Larbin互联网蜘蛛索引系统
💻 H
字号:
// Larbin// Sebastien Ailleret// 15-11-99 -> 02-12-99/* This class describes an URL */#ifndef URL_H#define URL_H#include <stdlib.h>#include "types.h"#include "xutils/string.h"struct global;class url { private:  char *host;  uint port;  char *file;  uint depth;  /* parse the url */  void parse (char *s);  /** parse a file with base */  void parseWithBase (char *u, char *base);  /* normalize file name */  bool normalize (char *file);  /* Does this url starts with a protocol name */  bool isProtocol (char *s); public:  /* Constructor : Parses an url (u is deleted) */  url (char *u, uint depth, char *base = NULL);  /* Constructor : read the url from a file (cf serialize) */  url (String *line);  /* Destructor */  ~url ();  /* Is it a valid url ? */  int isValid ();  /* print an URL */  void print ();  /* return the host */  char *getHost ();  /* return the port */  uint getPort ();  /* return the file */  char *getFile ();  /** Depth in the Site */  uint getDepth ();  /** Set depth to one if we are at an entry point in the site */  void setDepth (char *fromHost);  /** return the base of the url   * give means that you have to delete the string yourself   */  char *giveBase ();  /** return a char * representation of the url   * give means that you have to delete the string yourself   */  char *giveUrl ();  /* return a hashcode for the host of this url */  uint hostHashCode ();  /* return a hashcode for this url */  uint hashCode ();  /* serialize the url */  char *serialize (int fds);};#endif // URL_H

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -