爬虫爬取B站文章图片

查看 184|回复 11
作者:kuank   
废话不说,上代码
import requests
import os
from bs4 import BeautifulSoup
def download_images(url):
    # 发送HTTP请求获取网页源代码
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36'
    }
    response = requests.get(url, headers=headers)
    html = response.text
    # 使用BeautifulSoup解析HTML内容
    soup = BeautifulSoup(html, 'html.parser')
    # 创建一个目录用于保存图片
    os.makedirs('bilibili_images', exist_ok=True)
    # 找到所有图片的标签
    img_tags = soup.find_all('img')
    # 遍历所有图片标签,下载图片
    for img in img_tags:
        img_url = img.get('data-src') or img.get('src')
        if img_url.startswith('//'):
            img_url = 'https:' + img_url
        # 下载图片
        response = requests.get(img_url)
        # 提取图片文件名
        img_file = img_url.split('/')[-1]
        # 将图片保存到指定目录下
        with open('bilibili_images/' + img_file, 'wb') as f:
            f.write(response.content)
            print(f"Downloaded: {img_file}")
# 要爬取的文章URL
article_url = 'https://www.bilibili.com/read/cv18302467/'
download_images(article_url)
https://wwqu.lanzouq.com/i4OyO1l13yij
密码:52pj

图片, 楷体

kuank
OP
  

注意:使用前要安装BeautifulSoup和requests库
williamipod   

During handling of the above exception, another exception occurred:
Traceback (most recent call last):
  File "/workspace/1/main.py", line 1, in
    import requests
  File "/usr/local/lib/python3.10/site-packages/requests-2.31.0-py3.10.egg/requests/__init__.py", line 45, in
    from .exceptions import RequestsDependencyWarning
  File "/usr/local/lib/python3.10/site-packages/requests-2.31.0-py3.10.egg/requests/exceptions.py", line 9, in
    from .compat import JSONDecodeError as CompatJSONDecodeError
  File "/usr/local/lib/python3.10/site-packages/requests-2.31.0-py3.10.egg/requests/compat.py", line 13, in
    import charset_normalizer as chardet
ModuleNotFoundError: No module named 'charset_normalizer'
出现这个这咋回事
三滑稽甲苯   

这就是通用的图片下载器吧
52bojie   

挺不错的,加油!
Wryyy6   

好用,感谢分享
sai609   

微信批量下载原创标签文章,python如何实现
红尘旧梦i   

老婆是越来越多了,我感觉三妻四妾已经满足不了我了
weilai8023   

这个很实用,谢谢楼主
yiliber   

加油!加油!
您需要登录后才可以回帖 登录 | 立即注册

返回顶部