python学习之旅-第二周week2-1

week2-1作业
学习python的第二周 5.23号完成练习week2-1在MongoDb中筛选房源信息

python学习之旅-第二周week2-1_第1张图片
Paste_Image.png
python学习之旅-第二周week2-1_第2张图片
Paste_Image.png

代码部分(爬取数据至mongodb)

from bs4 import BeautifulSoup
import requests
import time
import pymongo
client = pymongo.MongoClient('localhost',27017)
walden = client['walden']
sheet_message = walden['sheet_message']
def get_message(url):

    wb_data = requests.get(url)
    soup = BeautifulSoup(wb_data.text,'lxml')
    titles = soup.select('body > div.wrap.clearfix.con_bg > div.con_l > div.pho_info > h4 > em ')#标题
    address2 = soup.select('body > div.wrap.clearfix.con_bg > div.con_l > div.pho_info > p > span.pr5')#地址
    prices = soup.select('#pricePart > div.day_l > span ')#价格
    landlords = soup.select('#floatRightBox > div.js_box.clearfix > div.member_pic > a > img ')#房东图片
    house_images = soup.select('#detailImageBox > div.pho_show_r > div > ul > li:nth-of-type(2) > img ')#房屋图片第一张

    landNames = soup.select('#floatRightBox > div.js_box.clearfix > div.w_240 > h6 > a ')#房东名字
    sexes = soup.select('div.member_pic > div ')
    def print_gender(class_name):
        if class_name[0] == 'member_ico1':
            return '女'
        if class_name[0] == 'member_ico':
            return '男'
    for title,address,price,landLoad,landName,sex,house_image in zip(titles,address2,prices,landlords,landNames,sexes,house_images):
        data = {
            '标题':title.get_text(),
            '地址':address.get_text().strip(),
            '价格':price.get_text(),
            '房东图片':landLoad.get('src'),
            '房东姓名':landName.get_text(),
            '性别':print_gender(sex.get('class')),
            '房屋图片':house_image.get('data-src'),
        }
        print(data)
        sheet_message.insert_one(data)


#获取当前页面里的跳转地址
def get_pink_url(url):
    wb_data = requests.get(url)
    soup = BeautifulSoup(wb_data.text, 'lxml')
    links = soup.select('#page_list > ul > li > a ')
    for link in links:
        time.sleep(1)
        get_message(link.get('href'))
    return None
#获取分页连接
def get_pink_page():
    full_url = ['http://sh.xiaozhu.com/search-duanzufang-p{}-0/?startDate=2016-05-17&endDate=2016-05-18'.format(str(i)) for i in range(1,4,1)]
    for link in full_url:
        print(link)
        get_pink_url(link)
#调用方法,进行爬取
get_pink_page()

查询部分代码

def get_Db_Message():
    for message in sheet_message.find({'价格':{'$gt':'500'}}):
        print(message)
    return
get_Db_Message()

运行结果

python学习之旅-第二周week2-1_第3张图片
Paste_Image.png
Paste_Image.png

总结:
-.通过本章节的运用学习初步掌握了mongodb的基础用法
-.学会在mongodb中进行简单的查询操作

你可能感兴趣的:(python学习之旅-第二周week2-1)