-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathprescrape_queue.py
More file actions
executable file
·90 lines (79 loc) · 2.34 KB
/
prescrape_queue.py
File metadata and controls
executable file
·90 lines (79 loc) · 2.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#!/usr/bin/env python
import sys
import time
import platform
from typing import Optional
import psycopg2
from oil import oil
from weaver import Web, WebScraper, WebQueue
import weaver.enc as enc
def plog(msg: str, fname: str = "./pffn.log") -> None:
with open(fname, 'a') as f:
f.write(msg + '\n')
print(msg)
def prescrape(scraper: WebScraper, wq: WebQueue) -> Optional[Web]:
assert(wq.url is not None)
print(f"url: {wq.url}")
w = scraper.softScrape(wq.url)
assert(w.created is not None)
#print(f" {w.created} {wq.musty}")
if wq.musty is not None and w.created < wq.musty:
print(f" musty, rescraping")
w = scraper.scrape(wq.url)
assert(w.url is not None and w.response is not None)
print(f"\tresponse size: {len(w.response)}B")
#print(f"\trequest headers: {w.requestHeaders}")
#print(f"\tresponse headers: {w.responseHeaders}")
dec = enc.decode(w.response, w.url)
if dec is None:
print("\tunknown encoding")
return None
print(f"\tencoding: {dec[0]}")
html = dec[1]
print(f"\tdecoded size: {len(html)}B")
return w
NODE_PREFIX = 'minerva'
node = platform.node()
if not node.startswith(NODE_PREFIX) \
or not node[len(NODE_PREFIX):].isnumeric():
plog(f"err: node {node} is not {NODE_PREFIX}[workerId]")
raise Exception("expected valid node name")
workerId = int(node[len(NODE_PREFIX):])
plog(f"workerId: {workerId}")
stripeCount = 1
stripe = 0
baseDelay = None
if len(sys.argv) == 3 or len(sys.argv) == 4:
stripeCount = int(sys.argv[1])
stripe = int(sys.argv[2])
plog(f"stripeCount: {stripeCount}")
plog(f"stripe: {stripe}")
if len(sys.argv) == 4:
baseDelay = float(sys.argv[3])
plog(f"baseDelay: {baseDelay}")
else:
raise Exception("expected stripeCount stripe extraDelay?")
with oil.open() as db:
scraper = WebScraper(db)
plog('==========')
plog(f"source: {scraper.source.__dict__}")
if baseDelay:
scraper.baseDelay = baseDelay
# we handle sleeping in our loop
loopDelay = scraper.baseDelay
scraper.baseDelay = 0.01
while True:
wq = WebQueue.next(db, workerId, stripeCount=stripeCount, stripe=stripe)
if wq is None:
time.sleep(.05)
continue
assert(wq.url is not None)
w = prescrape(scraper, wq)
if len(Web.wcache(db, [wq.url])) == 1:
wq.dequeue(db)
if w is not None:
assert(w.created is not None)
if w.created > int((time.time() - 30) * 1000):
time.sleep(loopDelay)
else:
time.sleep(loopDelay)