<html> <body> <p class="ecopyramid"> <ul id="producers"> <li class="producerlist"> <p class="name">plants</p> <p class="number">100000</p> </li> <li class="producerlist"> <p class="name">algae</p> <p class="number">100000</p> </li> </ul> <ul id="primaryconsumers"> <li class="primaryconsumerlist"> <p class="name">deer</p> <p class="number">1000</p> </li> <li class="primaryconsumerlist"> <p class="name">rabbit</p> <p class="number">2000</p> </li> </ul> <ul id="secondaryconsumers"> <li class="secondaryconsumerlist"> <p class="name">fox</p> <p class="number">100</p> </li> <li class="secondaryconsumerlist"> <p class="name">bear</p> <p class="number">100</p> </li> </ul> <ul id="tertiaryconsumers"> <li class="tertiaryconsumerlist"> <p class="name">lion</p> <p class="number">80</p> </li> <li class="tertiaryconsumerlist"> <p class="name">tiger</p> <p class="number">50</p> </li> </ul> </p> </body> </html>
from bs4 import BeautifulSoup with open('search.html','r') as filename: soup = BeautifulSoup(filename,'lxml') first_ul_entries = soup.find('ul') print first_ul_entries.li.p.string
tag_li = soup.find('li') # tag_li = soup.find(name = "li") print type(tag_li) print tag_li.p.string
search_for_text = soup.find(text='plants') print type(search_for_text) <class 'bs4.element.NavigableString'>
<p>The below HTML has the information that has email ids.</p> abc@example测试数据 <p>xyz@example测试数据</p> <span>foo@example测试数据</span>
email_id_example = """ <p>The below HTML has the information that has email ids.</p> abc@example测试数据 <p>xyz@example测试数据</p> <span>foo@example测试数据</span> """ email_soup = BeautifulSoup(email_id_example,'lxml') print email_soup # pattern = "\w+@\w+\.\w+" emailid_regexp = re测试数据pile("\w+@\w+\.\w+") first_email_id = email_soup.find(text=emailid_regexp) print first_email_id
search_for_attribute = soup.find(id='primaryconsumers') print search_for_attribute.li.p.string
customattr = """ <p data-custom="custom">custom attribute example</p> """ customsoup = BeautifulSoup(customattr,'lxml') customsoup.find(data-custom="custom") # SyntaxError: keyword can't be an expression
using_attrs = customsoup.find(attrs={'data-custom':'custom'}) print using_attrs
css_class = soup.find(attrs={'class':'producerlist'}) css_class2 = soup.find(class_ = "producerlist") print css_class print css_class2
def is_producers(tag): return tag.has_attr('id') and tag.get('id') == 'producers' tag_producers = soup.find(is_producers) print tag_producers.li.p.string
combine_html = """ <p class="identical"> Example of p tag with class identical </p> <p class="identical"> Example of p tag with class identical <p> """ combine_soup = BeautifulSoup(combine_html,'lxml') identical_p = combine_soup.find("p",class_="identical") print identical_p
# 搜索所有 class 属性等于 tertiaryconsumerlist 的标签。 all_tertiaryconsumers = soup.find_all(class_='tertiaryconsumerlist') print type(all_tertiaryconsumers) for tertiaryconsumers in all_tertiaryconsumers: print tertiaryconsumers.p.string
# 搜索所有的 p 和 li 标签 p_li_tags = soup.find_all(["p","li"]) print p_li_tags print # 搜索所有类属性是 producerlist 和 primaryconsumerlist 的标签 all_css_class = soup.find_all(class_=["producerlist","primaryconsumerlist"]) print all_css_class print
# 搜索 父标签 primaryconsumers = soup.find_all(class_='primaryconsumerlist') print len(primaryconsumers) # 取父标签的第一个 primaryconsumer = primaryconsumers[0] # 搜索所有 ul 的父标签 parent_ul = primaryconsumer.find_parents('ul') print len(parent_ul) # 结果将包含父标签的所有内容 print parent_ul print # 搜索,取第一个出现的父标签.有两种操作 immediateprimary_consumer_parent = primaryconsumer.find_parent() # immediateprimary_consumer_parent = primaryconsumer.find_parent('ul') print immediateprimary_consumer_parent
producers = soup.find(id='producers') next_siblings = producers.find_next_siblings() print next_siblings
# 搜索下一级标签 first_p = soup.p all_li_tags = first_p.find_all_next("li") print all_li_tags
搜索上一个标签
与搜索下一个标签类似,使用 find_previous() 和 find_all_previous() 方法来搜索上一个标签。
以上就是详解Python利用Beautiful Soup模块搜索内容方法的详细内容,更多请关注Gxl网其它相关文章!
查看更多关于详解Python利用BeautifulSoup模块搜索内容方法的详细内容...
声明:本文来自网络,不代表【好得很程序员自学网】立场,转载请注明出处:http://haodehen.cn/did85262