Trying to make a start with the WikipediaParser, but I can't find out with the Scrapy website (or another way) what the structure of the file should be, and how I can test/run the crawling on a page.
This commit is contained in:
parent
81a93c44bb
commit
add4a13a4d
@ -1,3 +1,30 @@
|
||||
__author__ = 'Bas'
|
||||
__author__ = 'Nout'
|
||||
#new branch
|
||||
import parser
|
||||
from scrapy.selector import Selector
|
||||
from FourmiCrawler.items import Result
|
||||
|
||||
class WikipediaParser:
|
||||
|
||||
website = "http://en.wikipedia.org/wiki/Methane"
|
||||
__spider = "WikipediaParser"
|
||||
|
||||
|
||||
#def __init__(self, csid):
|
||||
# self.website = "http://en.wikipedia.org/wiki/{id}".format(id=csid)
|
||||
|
||||
#def parse(self, response):
|
||||
#self.log('A response from %s just arrived!' % response.url)
|
||||
def parse():
|
||||
sel = Selector("http://en.wikipedia.org/wiki/Methane")
|
||||
items = []
|
||||
item = Result()
|
||||
item['attribute']="Melting point"
|
||||
item['value']=site.xpath('//tr[contains(@href, "/wiki/Melting_point")]/text()').extract()
|
||||
item['source']= self.website
|
||||
items.append(item)
|
||||
print item['attribute']
|
||||
print item['value']
|
||||
print item['source']
|
||||
print "test"
|
||||
return items
|
||||
|
||||
parse()
|
Reference in New Issue
Block a user