本文實例講述了Python使用Selenium模塊模擬瀏覽器抓取斗魚直播間信息。分享給大家供大家參考,具體如下:
import timefrom multiprocessing import Poolfrom selenium import webdriverfrom selenium.webdriver.common.by import Byfrom selenium.webdriver.support.ui import WebDriverWaitfrom selenium.webdriver.support import expected_conditions as ECfrom selenium.common.exceptions import TimeoutExceptionfrom bs4 import BeautifulSoupfrom pymongo import MongoClientfrom pymongo.errors import PyMongoError# monogdb配置信息MONGO_HOST = "localhost"MONGO_DATABASE = "douyu"MONGO_TABLE = "zhibo"client = MongoClient(host=MONGO_HOST)db = client[MONGO_DATABASE]# PhantomJS 命令行相關配置# 參見 http://phantomjs.org/api/command-line.htmlSERVICE_ARGS = ['--disk-cache=true', '--load-images=false']# driver = webdriver.Chrome() # 有界面driver = webdriver.PhantomJS(service_args=SERVICE_ARGS) # 無界面delay = 10wait = WebDriverWait(driver, delay)driver.maximize_window()def get_total_pages(): url = 'https://www.douyu.com/directory/all' driver.get(url) pages = int(driver.find_element_by_css_selector( '.shark-pager-dot + .shark-pager-item').text) print("正在獲取第1頁數據") room_list = get_rooms_by_beautifulsoup() save_to_monogodb(room_list) return pages# 根據頁碼獲取指定頁數據,并將其保存到數據庫中def parse_page(page_num): print("正在獲取第%d頁數據" % page_num) try: page_num_box = wait.until( EC.presence_of_element_located( (By.CSS_SELECTOR, "input.jumptxt"))) go_btn = wait.until(EC.element_to_be_clickable( (By.CSS_SELECTOR, 'a.shark-pager-submit'))) page_num_box.clear() page_num_box.send_keys(page_num) go_btn.click() # driver.execute_script("window.scrollTo(0, document.body.scrollHeight);") # time.sleep(0.1) wait.until( EC.text_to_be_present_in_element( (By.CSS_SELECTOR, '.shark-pager-item.current'), str(page_num))) # 對于By.CLASS_NAME invalid selector: Compound class names not permitted room_list = get_rooms_by_beautifulsoup() save_to_monogodb(room_list) except TimeoutException: print("請求第%d頁失敗" % page_num) print("嘗試重新獲取第%d頁" % page_num) return parse_page(page_num)# 通過bs4解析數據def get_rooms_by_beautifulsoup(): ''' 通過bs4庫解析數據 獲取直播間的名稱,觀看人數,標簽,主播名 ''' wait.until(EC.presence_of_element_located( (By.CSS_SELECTOR, "ul#live-list-contentbox > li"))) html = driver.page_source soup = BeautifulSoup(html, 'lxml') rooms = soup.select('ul#live-list-contentbox > li') for room in rooms: room_name = room.find( 'h3', attrs={ 'class': 'ellipsis'}).get_text( strip=True) view_count = room.find('span', class_='dy-num fr').text tag = room.find('span', class_='tag ellipsis').text hostname = room.find('span', class_='dy-name ellipsis fl').text #print("房間名: " + room_name + "/t觀看人數: " + view_count + "/t標簽: " + tag + "/t主播名: " + hostname) yield { 'room_name': room_name, 'view_count': view_count, 'tag': tag, 'hostname': hostname, }def save_to_monogodb(room_list): for room in room_list: try: db[MONGO_TABLE].insert(room) # insert支持插入多條數據 print("mongodb插入數據成功:", room) except PyMongoError as e: print("mongodb插入數據失敗:", room, e)if __name__ == '__main__': try: total_pages = get_total_pages() for page_num in range(2, total_pages + 1): parse_page(page_num) except Exception as e: print("出錯了", e) finally: # 確保 瀏覽器能正常關閉 print("共有%d頁" % total_pages) driver.close()
新聞熱點
疑難解答