-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathapp.py
65 lines (62 loc) · 2.56 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from flask import Flask, request, jsonify
import selectorlib
import requests
from dateutil import parser as dateparser
app = Flask(__name__)
extractor = selectorlib.Extractor.from_yaml_file('selectors.yml')
def scrape(url):
headers = {
'authority': 'www.amazon.com',
'pragma': 'no-cache',
'cache-control': 'no-cache',
'dnt': '1',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'sec-fetch-site': 'none',
'sec-fetch-mode': 'navigate',
'sec-fetch-dest': 'document',
'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8',
}
# Download the page using requests
print("Downloading %s"%url)
r = requests.get(url, headers=headers)
# Simple check to check if page was blocked (Usually 503)
if r.status_code > 500:
if "To discuss automated access to Amazon data please contact" in r.text:
print("Page %s was blocked by Amazon. Please try using better proxies\n"%url)
else:
print("Page %s must have been blocked by Amazon as the status code was %d"%(url,r.status_code))
return None
# Pass the HTML of the page and create
data = extractor.extract(r.text,base_url=url)
reviews = []
for r in data['reviews']:
r["product"] = data["product_title"]
r['url'] = url
if 'verified_purchase' in r:
if 'Verified Purchase' in r['verified_purchase']:
r['verified_purchase'] = True
else:
r['verified_purchase'] = False
r['rating'] = r['rating'].split(' out of')[0]
date_posted = r['date'].split('on ')[-1]
if r['images']:
r['images'] = "\n".join(r['images'])
r['date'] = dateparser.parse(date_posted).strftime('%d %b %Y')
reviews.append(r)
histogram = {}
for h in data['histogram']:
histogram[h['key']] = h['value']
data['histogram'] = histogram
data['average_rating'] = float(data['average_rating'].split(' out')[0])
data['reviews'] = reviews
data['number_of_reviews'] = int(data['number_of_reviews'].split(' customer')[0])
return data
@app.route('/')
def api():
url = request.args.get('url',None)
if url:
data = scrape(url)
return jsonify(data)
return jsonify({'error':'URL to scrape is not provided'}),400