get_message.py 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. # -*- coding: utf-8 -*-
  2. # @Author : ChenZhaoyuchen
  3. # @Time : 2024/9/26 16:20
  4. # @File : get_message.py
  5. from bs4 import BeautifulSoup
  6. from anjuke.utils.anjuke_response import *
  7. from anjuke.utils.setting import *
  8. import time,random
  9. # 省级
  10. def get_province():
  11. province_name_list = []
  12. province_url_list = []
  13. response_province = requests.get(url = url_start, headers = headers).content.decode('utf8')
  14. time.sleep(random.uniform(0.5, 1))
  15. soup = BeautifulSoup(response_province, 'html.parser')
  16. # print(soup)
  17. filter_area_wrap = soup.find(class_="filter-area-wrap")
  18. # print(filter_area_wrap)
  19. for province_ in filter_area_wrap.find_all('a'):
  20. province_name = province_.text
  21. province_url = province_.get('href')
  22. province_name_list.append(province_name)
  23. province_url_list.append('https://www.anjuke.com'+province_url)
  24. del province_name_list[0],province_url_list[0]
  25. time.sleep(random.uniform(0.5, 1))
  26. return province_name_list,province_url_list
  27. # 市级
  28. def get_city():
  29. province_name_list,province_url_list = get_province()
  30. city_name_list = []
  31. city_url_list = []
  32. for i in range(len(province_url_list)):
  33. province_url = province_url_list[i]
  34. province_name = province_name_list[i]
  35. response_city = requests.get(url = province_url, headers = headers).content.decode('utf8')
  36. time.sleep(random.uniform(3, 4))
  37. soup = BeautifulSoup(response_city, 'html.parser')
  38. filter_area_wrap = soup.find(class_="sel-content bank")
  39. zhongji_name_list = []
  40. zhongji_url_list = []
  41. for city_ in filter_area_wrap.find_all('a'):
  42. city_name = province_name + city_.text
  43. city_url = city_.get('href')
  44. zhongji_name_list.append(city_name)
  45. zhongji_url_list.append(city_url)
  46. del zhongji_name_list[0], zhongji_url_list[0]
  47. city_name_list += zhongji_name_list
  48. city_url_list += zhongji_url_list
  49. print(f'已循环到第{i}个省级单位:{province_name_list[i]}')
  50. return city_name_list,city_url_list
  51. # 区级
  52. def get_area():
  53. city_name_list, city_url_list = get_city()
  54. area_name_list = []
  55. area_url_list = []
  56. for i in range(len(city_url_list)):
  57. city_url = city_url_list[i]
  58. city_name = city_name_list[i]
  59. response_area = requests.get(url = city_url, headers = headers).content.decode('utf8')
  60. time.sleep(random.uniform(2, 3))
  61. soup = BeautifulSoup(response_area, 'html.parser')
  62. filter_area_wrap = soup.find(class_="sel-content bank")
  63. zhongji_name_list = []
  64. zhongji_url_list = []
  65. for area_ in filter_area_wrap.find_all('a'):
  66. area_name = city_name + area_.text
  67. area_url = area_.get('href')
  68. zhongji_name_list.append(area_name)
  69. zhongji_url_list.append(area_url)
  70. area_name_list.append(area_name)
  71. area_url_list.append(area_url)
  72. del area_name_list[0],area_url_list[0]
  73. return area_name_list,area_url_list
  74. # 周边
  75. def get_periphery():
  76. area_name_list, area_url_list = get_area()
  77. periphery_name_list = []
  78. periphery_url_list = []
  79. for i in range(len(area_url_list)):
  80. area_url = area_url_list[i]
  81. area_name = area_name_list[i]
  82. response_periphery = requests.get(url = area_url, headers = headers).content.decode('utf8')
  83. time.sleep(random.uniform(3, 5))
  84. soup = BeautifulSoup(response_periphery, 'html.parser')
  85. filter_area_wrap = soup.find(class_="sel-content bank")
  86. for periphery_ in filter_area_wrap.find_all('a'):
  87. periphery_name = area_name + periphery_.text
  88. periphery_url = periphery_.get('href')
  89. periphery_name_list.append(periphery_name)
  90. periphery_url_list.append(periphery_url)
  91. del periphery_name_list[0], periphery_url_list[0]
  92. time.sleep(random.uniform(3, 5))
  93. return periphery_name_list, periphery_url_list
  94. # 获取年份
  95. def get_Year():
  96. Years_list = []
  97. soup = BeautifulSoup(response_origion, 'html.parser')
  98. years = soup.find(class_="year-list")
  99. for year_ in years.find_all('a'):
  100. year = year_.text
  101. Years_list.append(year)
  102. return Years_list
  103. # 测试函数
  104. if __name__ == '__main__':
  105. # print('时间跨度:',get_Year())
  106. # print('省级单位:',get_province())
  107. print('市级单位:',get_city())
  108. # print('区级单位',get_area())
  109. # print('周边单位',get_periphery())