如果您在使用Google Colab中的requests模块发出HTTP请求时遇到问题,可能有以下几个原因
1. Firewall or Network Restrictions:有时,网络或防火墙限制可能会阻止笔记本访问外部资源.如果您位于代理或防火墙后面,则可能需要在笔记本中配置代理设置.
使用以下代码片段在笔记本中设置代理设置:
import os
os.environ['HTTP_PROXY'] = 'http://your_proxy_address:your_proxy_port'
os.environ['HTTPS_PROXY'] = 'http://your_proxy_address:your_proxy_port'
2. Blocked Sites:如果您试图访问的网站在Colab环境中被阻止,您将无法向其发出请求.
此外,请添加所有可能的标头,以避免阻塞.以下是代码的修订版
import requests
from bs4 import BeautifulSoup as bs
from urllib.parse import urlparse, parse_qs
import os
# Please add your proxy address and port to use given proxy while making a request.
# Note: I'm using scrapeops proxy here, you can also get a trail plan and replace the api_key with a valid key
api_key = "0565b10e-c1b5-418c-b15d-02d4ebd5d6a2"
proxy_value = f"http://scrapeops:{api_key}@proxy.scrapeops.io:5353"
os.environ['HTTP_PROXY'] = proxy_value
os.environ['HTTPS_PROXY'] = proxy_value
def get_response_by_passing_headers(url):
# We are parsing query parameters from the URL to pass it to the request
parsed_url = urlparse(url)
query_params = parse_qs(parsed_url.query)
params = {key: value[0] for key, value in query_params.items()}
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
'Accept-Language': 'en-GB,en;q=0.9',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'none',
'Sec-Fetch-User': '?1',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
'sec-ch-ua': '"Chromium";v="122", "Not(A:Brand";v="24", "Google Chrome";v="122"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Linux"',
}
# Making a request with all the headers and parameters
response = requests.get('https://gall.dcinside.com/board/view/', params=params, headers=headers, verify=False)
return response
url = 'https://gall.dcinside.com/board/view/?id=piano&no=1&exception_mode=notice&page=1'
response = get_response_by_passing_headers(url)
soup = bs(response.content, "html.parser")
print(soup)