python爬虫爬取太原房价和租房价格(安居客)
前段时间家里需要买房,但是没有个参考的房价。于是自己用自己学的python爬虫技术去安居客上爬了房价房价:import requests
from bs4 import BeautifulSoup
import csv
headers={'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36'}
lss=[]
for i in range(23):
url='https://ty.anjuke.com/sale/b253-p{}/?from=esf_list'.format(i)
html = requests.get(url, headers=headers).text
soup=BeautifulSoup(html,'lxml')
divs=soup.find_all('div',class_='property-content-detail')
list1=[]
for div in divs:
m=div.find_all('p',class_='property-content-info-text').text.replace('\n',' ').strip()
xiaoqu=div.find('p', class_='property-content-info-comm-name').text
weizhi=div.find('p', class_='property-content-info-comm-address').text
ls=[]
ls.append(m)
ls.append(xiaoqu)
ls.append(weizhi)
list1.append(ls)
jiages=soup.find_all('div',class_='property-price').replace('元/','')
list2=[]
for jiage in jiages:
zongjia=jiage.find('p',class_='property-price-total').text
danjia=jiage.find('p', class_='property-price-average').text
ls2=[]
ls2.append(danjia)
ls2.append(zongjia)
list2.append(ls2)
list=[]
for i in range(len(list1)):
list3=list1+list2
list.append(list3)
lss.append(list3)
'''
for i in list:
print(i)
'''
print(lss)
with open('G:/pythondemo/安居客/房价.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
for row in lss:
writer.writerow(row)
租房:
import requests
from bs4 import BeautifulSoup
import time
import csv
headers={'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36'}
lss=[]
for i in range(50):
print(str((i+1)*60)+'页')
url='https://ty.zu.anjuke.com/fangyuan/fx1-p{}//'.format(i)
html = requests.get(url, headers=headers).text
soup=BeautifulSoup(html,'lxml')
divs=soup.find_all('div',class_='zu-itemmod')
for div in divs:
ls=[]
m = div.find('address', class_='details-item')
xiaoqu=m.find('a').text
m = div.find('address', class_='details-item').text.replace('\n', ' ').strip().replace(' ', '')
q=len(xiaoqu)
weizhi=m.strip()
zujin=div.find('div',class_='zu-side').text.replace('\n', ' ').strip().replace(' ', '').replace('元/月','')
print(zujin)
ls.append(xiaoqu)
ls.append(weizhi)
ls.append(zujin)
lss.append(ls)
time.sleep(1)
for i in lss:
print(i)
with open('G:/pythondemo/安居客/租房.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
for row in lss:
writer.writerow(row) 山西人路过,楼主可以直接公布下爬取结果吗,懒得爬了 神隐少年 发表于 2021-2-7 20:53
山西人路过,楼主可以直接公布下爬取结果吗,懒得爬了
好的,只不过这个是小平米的,需要完整的话可以联系我
然后呢??? {:301_997:}不明觉厉 结果呢?
爬虫大佬 然后爬取到好房源了么,公布一下呗 怎么爬大哥 怎么改其它地区的? 难道是种种精彩,尽在下回?