forked from DeLeb86/immoscraper
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpipelines.py
44 lines (37 loc) · 1.46 KB
/
pipelines.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
#from itemadapter import ItemAdapter
import json,os
import pandas as pd
#from pymongo import MongoClient
class ImmoelizaPipeline:
def process_item(self, item, spider):
"""
actions performed after the crawling is done.
data cleaning, data post processing.
removing duplicates and removing unusable entries (no price, no postal code)
Args:
spider (ImmowebscraperSpider): the spider itself
"""
item.transform()
for field in item.fields:
item.setdefault(field,None)
item.pop("js")
item.pop("html_elems")
return item
def close_spider(self, spider):
"""
actions performed after the crawling is done.
data cleaning, data post processing.
removing duplicates and removing unusable entries (no price, no postal code)
Args:
spider (ImmowebscraperSpider): the spider itself
"""
print("SPIDER FINISHED!!! ------ Post processing data")
df=pd.read_json("data/output.json",orient="columns")
df.dropna(subset=["Price","PostalCode"],inplace=True)
df.drop(df[df["PostalCode"]>10000].index,inplace=True)
df.to_json("data/final_dataset.json")