|
| 1 | +package haodong.net.cn.leetcode2github; |
| 2 | + |
| 3 | +import java.util.List; |
| 4 | + |
| 5 | +import us.codecraft.webmagic.Page; |
| 6 | +import us.codecraft.webmagic.Site; |
| 7 | +import us.codecraft.webmagic.Spider; |
| 8 | +import us.codecraft.webmagic.processor.PageProcessor; |
| 9 | + |
| 10 | +/** |
| 11 | + * Hello world! |
| 12 | + * |
| 13 | + */ |
| 14 | +public class App implements PageProcessor { |
| 15 | + protected Site site = Site.me().setRetryTimes(3).setSleepTime(1000) |
| 16 | + .setTimeOut(15000); |
| 17 | + //submission 列表url |
| 18 | + protected String submissionList; |
| 19 | + //problem 代码url |
| 20 | + protected String problemCode; |
| 21 | + //problem 描述url |
| 22 | + protected String problemDescription; |
| 23 | + //problem 列表url |
| 24 | + protected String problemList; |
| 25 | + //初始访问页面 |
| 26 | + protected String url; |
| 27 | + //前往problem列表的path |
| 28 | + protected String problemLinkPath; |
| 29 | + //获得problem列表的path |
| 30 | + protected String getProblemLinkPath; |
| 31 | + //前往submission列表的path |
| 32 | + protected String submissionLinkPath; |
| 33 | + //前往code页面的path |
| 34 | + protected String codePagePath; |
| 35 | + //获得code的path |
| 36 | + protected String codePath; |
| 37 | + |
| 38 | + // process是定制爬虫逻辑的核心接口,在这里编写抽取逻辑 |
| 39 | + public void process(Page page) { |
| 40 | + if (url.contains(problemList)) { |
| 41 | + getLinks(page); |
| 42 | + } else if (url.contains(problemCode)) { |
| 43 | + getCode(page); |
| 44 | + } else { |
| 45 | + getSubmission(page); |
| 46 | + } |
| 47 | + } |
| 48 | + |
| 49 | + public void getLinks(Page page) { |
| 50 | + page.putField("name", page.getHtml().xpath(getProblemLinkPath).all()); |
| 51 | + forwards(page, problemLinkPath); |
| 52 | + } |
| 53 | + |
| 54 | + public void getSubmission(Page page) { |
| 55 | + forwards(page, submissionLinkPath); |
| 56 | + } |
| 57 | + |
| 58 | + public void getCode(Page page) { |
| 59 | + |
| 60 | + } |
| 61 | + |
| 62 | + public void forwards(Page page, String xpath) { |
| 63 | + List<String> list = page.getHtml().links().xpath(xpath).all(); |
| 64 | + page.addTargetRequests(list); |
| 65 | + } |
| 66 | + |
| 67 | + public Site getSite() { |
| 68 | + return site; |
| 69 | + } |
| 70 | + |
| 71 | + public static void main(String[] args) { |
| 72 | + |
| 73 | + Spider.create(new App()) |
| 74 | + // 从"https://github.com/code4craft"开始抓 |
| 75 | + .addUrl("https://leetcode.com/problemset/algorithms/") |
| 76 | + // 开启5个线程抓取 |
| 77 | + .thread(5) |
| 78 | + // 启动爬虫 |
| 79 | + .run(); |
| 80 | + } |
| 81 | +} |
0 commit comments