1200字范文,内容丰富有趣,写作的好帮手!
1200字范文 > 使用jsoup入门java爬虫 案例

使用jsoup入门java爬虫 案例

时间:2021-03-29 23:42:19

相关推荐

使用jsoup入门java爬虫 案例

1.导入依赖

<dependency><groupId>org.jsoup</groupId><artifactId>jsoup</artifactId><version>1.14.3</version> <!-- or latest version --></dependency>

2.静态工具类

public class ProxyIP {public static String getOne() throws IOException {String Proxyurl ="填入提取链接 一次获取一个 ";String ip = Jsoup.connect(Proxyurl).get().body().text();return ip;}}

3. 爬取页面信息

4.使用 Jsoup 爬取内容

package com.sgg.main;import com.sgg.main.proxy.ProxyIP;import org.jsoup.Connection;import org.jsoup.Jsoup;import org.jsoup.nodes.Attributes;import org.jsoup.nodes.Document;import org.jsoup.nodes.Element;import org.jsoup.nodes.Node;import org.jsoup.select.Elements;import java.io.IOException;import .Proxy;import .URLDecoder;import .URLEncoder;import java.nio.charset.StandardCharsets;import java.util.ArrayList;import java.util.Arrays;import java.util.Random;import java.util.concurrent.Executor;import java.util.concurrent.ExecutorService;import java.util.concurrent.Executors;import java.util.concurrent.TimeUnit;import java.util.function.Consumer;public class Index {public static void main(String[] args) throws IOException {ArrayList<String> urls = new ArrayList<>();ArrayList<String> names = new ArrayList<>();// 创建httpClient实例String indexUrl = "https://xxxxxxxxxx";Document document = Jsoup.connect(indexUrl).get();Elements article = document.getElementsByTag("article");article.forEach(new Consumer<Element>() {@Overridepublic void accept(Element element) {Node node = element.childNode(0);Node parentNode = node.childNode(0).childNode(0).parentNode();String nameUrl = parentNode.childNode(0).childNode(0).toString();names.add(nameUrl);Attributes attributes = node.attributes();urls.add(href);}});//开启线程ExecutorService executorService = Executors.newFixedThreadPool(urls.size());for (int i = 0; i < urls.size(); i++) {//每个连接地址使用一个线程int finalI = i;executorService.execute(() -> {while (true) {String[] split = null;try {split = ProxyIP.getOne().split(":");} catch (IOException e) {e.printStackTrace();}try {//搜索引擎搜索帖子 优化搜索排名String decode = URLEncoder.encode(names.get(finalI), "utf-8");// 搜索RULString searchUrl ="https:xxxxxxxxxxxx";Connection.Response search = Jsoup.connect(searchUrl).proxy(split[0], Integer.parseInt(split[1])).ignoreContentType(true).execute();Connection.Response response = Jsoup.connect(urls.get(finalI)).proxy(split[0], Integer.parseInt(split[1])).ignoreContentType(true).execute();System.out.println(names.get(finalI).substring(0,5)+" 搜索状态: "+search.statusCode()+" ---> "+urls.get(finalI).split("details/")[1] + " 访问状态 " + response.statusCode());try {TimeUnit.SECONDS.sleep(30+ new Random().nextInt(10));} catch (InterruptedException e) {}} catch (IOException e) {System.out.println(names.get(finalI).substring(0,5)+""+urls.get(finalI).split("details/")[1] + " 访问出错了");System.out.println(e.getMessage());}}});}}}

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。