-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathscrape.py
49 lines (45 loc) · 1.33 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 21 19:37:45 2018
@author: kogito
"""
import csv
import requests
from bs4 import BeautifulSoup
class Scraper():
@classmethod
def process(self, url):
r = requests.get(url)
soup = BeautifulSoup(r.text, 'lxml')
data = []
table = soup.find('table', id='')
for row in table.find_all('tr'):
cells = row.findChildren('td')
values = []
for cell in cells:
value = cell.string
values.append(value)
try:
Date = values[0]
Open = values[1]
High = values[2]
Low = values[3]
Close = values[4]
Volume = values[5]
MarketCap = values[6]
except IndexError:
continue
data.append([Date, Open, High, Low, Close, Volume, MarketCap])
# Print data
for item in data:
print(item)
return data
@classmethod
def write_to_csv(self, data):
f = open('ScrapedData.csv', 'w')
with f:
writer = csv.writer(f)
writer.writerow(['Date', 'Open', 'High', 'Low', 'Close', 'Volume', 'MarketCap'])
for row in data:
writer.writerow(row)