如何在从代码中获得的 csv 文件中打印网页抓取结果

How can I print web scraping result in csv file which I got from the code

from bs4 import BeautifulSoup
import requests
import csv
url = "https://coingecko.com/en"

page = requests.get(url)
html_doc = page.content
soup = BeautifulSoup(html_doc,"html.parser")
coinname =soup.find_all("div",attrs={"class":"coin-content center"})
coin_sign = soup.find_all("div",attrs={"class":"coin-icon mr-2 center flex-column"})
coinvalue = soup.find_all("td",attrs={"class":"td-price price text-right "})
marketcap = soup.find_all("td",attrs={"class":"td-market_cap cap "})
Liquidity = soup.find_all("td", attrs={"class": "td-liquidity_score lit text-right "})

coin_name = []
coinsign = []
Coinvalue = []
Marketcap = []
marketliquidity = []
for div in coinname:
    coin_name.append(div.a.span.text)

for sign in coin_sign:
    coinsign.append(sign.span.text)
for Value in coinvalue:
    Coinvalue.append(Value.a.span.text)
for cap in marketcap:
    Marketcap.append(cap.div.span.text)
for liquidity in Liquidity:
marketliquidity.append(liquidity.a.span.text)
print(coin_name)
print(coinsign)
print(Coinvalue)
print(Marketcap)
print(marketliquidity)

我想将输出保存到一个包含 5 列的 csv 文件中。第 1 列为 "coin_name",第 2 列为 "coinsign",第 3 列为 "coinvalue",第 4 列为 "Marketcap",第 5 列为 "Marketliquidity" .我该如何解决这个问题?

我也想限制我接收的数据,因为我只想接收 100 个 coin_name 但我收到了 200 个 coin_name。

from bs4 import BeautifulSoup
import requests
import csv

url = "https://coingecko.com/en"
page = requests.get(url)
soup = BeautifulSoup(page.content,"html.parser")

#Instead of assigning variable and looping you can use list comprehension.
names = [div.a.span.text for div in soup.find_all("div",attrs={"class":"coin-content center"})]
signs = [sign.span.text for sign in soup.find_all("div",attrs={"class":"coin-icon mr-2 center flex-column"})]
values = [value.a.span.text for value in soup.find_all("td",attrs={"class":"td-price price text-right "})]
caps = [cap.div.span.text for cap in soup.find_all("td",attrs={"class":"td-market_cap cap "})]
liquidities = [liquidity.a.span.text for liquidity in soup.find_all("td", attrs={"class": "td-liquidity_score lit text-right "})]

with open('coins.csv', mode='w',newline='') as coins:
    writer = csv.writer(coins, delimiter=',', quotechar='"')
    #Take only first 100 coins
    for i in range(100):
        writer.writerow([names[i],signs[i],values[i],caps[i],liquidities[i]])

输出将是

Bitcoin,BTC,",578.62","3,894,498,118",",476,855,331"
Ethereum,ETH,4.49,",995,876,618",",256,303,216"
EOS,EOS,.73,",193,319,905","8,339,006"
XRP,XRP,[=11=].48,",249,618,341","4,378,978"
Litecoin,LTC,.80,",388,966,637","6,289,650"
NEO,NEO,.11,",177,368,159","0,733,208"
Monero,XMR,3.64,",871,890,512",",235,745"