-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathWebCrawler.java
More file actions
147 lines (127 loc) · 5.26 KB
/
WebCrawler.java
File metadata and controls
147 lines (127 loc) · 5.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
package by.andd3dfx.crawler.engine;
import by.andd3dfx.crawler.dto.CrawlerData;
import by.andd3dfx.crawler.dto.SingleSearchResult;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.util.ArrayList;
import java.util.List;
/**
* Web crawler for retrieving list of data with type T by consequent visiting of pages.
* Process started using provided starting page URL; next link on each page retrieved from current page.
*
* @param <T> data items type
*/
@Slf4j
public abstract class WebCrawler<T extends CrawlerData> {
private static final String USER_AGENT = "Mozilla";
private static final int DEFAULT_MAX_PAGES_CAP = 10;
private static final long DEFAULT_THROTTLING_DELAY_MS = 20;
/**
* Search and extract data from page with provided URL
*
* @param pageUrl URL of page
* @return search result
*/
@SneakyThrows
public SingleSearchResult<T> singleSearch(String pageUrl) {
return singleSearch(pageUrl, DEFAULT_THROTTLING_DELAY_MS);
}
/**
* Search and extract data from page with provided URL
*
* @param pageUrl URL of page
* @param throttlingDelayMs delay between two consequent page requests, milliseconds
* @return search result
*/
@SneakyThrows
public SingleSearchResult<T> singleSearch(String pageUrl, long throttlingDelayMs) {
Document document = retrieveDocument(pageUrl, throttlingDelayMs);
Elements elements = extractElements(document);
List<T> dataItems = elements.stream()
.map(element -> mapElementToData(element, throttlingDelayMs))
.toList();
log.debug("Single search: url={}, items={}", pageUrl, dataItems.size());
String nextUrl = extractNextUrl(document);
return new SingleSearchResult<>(dataItems, nextUrl);
}
@SneakyThrows
protected Document retrieveDocument(String pageUrl, long throttlingDelayMs) {
Thread.sleep(throttlingDelayMs);
return Jsoup
.connect(pageUrl)
.userAgent(USER_AGENT).get();
}
/**
* Extract elements from parsed Jsoup document
*
* @param document Jsoup document
* @return extracted elements
*/
protected abstract Elements extractElements(Document document);
/**
* Extract next URL from parsed Jsoup document
*
* @param document Jsoup document
* @return next URL
*/
protected abstract String extractNextUrl(Document document);
/**
* Map element to result DTO object
*
* @param element
* @param throttlingDelayMs delay between two consequent page requests, milliseconds
* @return DTO object of type T
*/
protected abstract T mapElementToData(Element element, long throttlingDelayMs);
/**
* Batch search using provided starting page URL, max pages cap 10 and throttling delay 20ms
*
* @param pageUrl starting page URL
* @return list of retrieved items
*/
public List<T> batchSearch(String pageUrl) {
return batchSearch(pageUrl, DEFAULT_MAX_PAGES_CAP);
}
/**
* Batch search using provided starting page URL and max pages cap. Used throttling delay is 20ms.
* Use value -1 for max pages cap to visit all available pages.
*
* @param pageUrl starting page URL
* @param maxPagesCap max pages amount (search will be stopped when this amount of pages requested or no more pages available)
* @return list of retrieved items
*/
public List<T> batchSearch(String pageUrl, int maxPagesCap) {
return batchSearch(pageUrl, maxPagesCap, DEFAULT_THROTTLING_DELAY_MS);
}
/**
* Batch search using provided starting page URL, max pages cap and throttling delay.
* Use value -1 for max pages cap to visit all available pages.
*
* @param pageUrl starting page URL
* @param maxPagesCap max pages amount (search will be stopped when this amount of pages requested or no more pages available)
* @param throttlingDelayMs delay between two consequent page requests, milliseconds
* @return list of retrieved items
*/
@SneakyThrows
public List<T> batchSearch(String pageUrl, int maxPagesCap, long throttlingDelayMs) {
assert (throttlingDelayMs > 0);
log.info("Batch search. Starting URL={}, maxPagesCap={}, delay={}ms", pageUrl, maxPagesCap, throttlingDelayMs);
int pagesCounter = 0;
var nextPage = pageUrl;
List<T> result = new ArrayList<>();
while (nextPage != null && (maxPagesCap == -1 || pagesCounter < maxPagesCap)) {
SingleSearchResult<T> searchResult = singleSearch(nextPage, throttlingDelayMs);
List<T> dataItems = searchResult.dataItems();
log.info("Hit №{}, {} items retrieved", pagesCounter, dataItems.size());
pagesCounter++;
result.addAll(dataItems);
nextPage = searchResult.nextPageUrl();
}
log.info("Total records retrieved: {}", result.size());
return result;
}
}