1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
|
import requests
from bs4 import BeautifulSoup
import csv
import pandas as pd
import json
import re
for i in range(5):
url = 'https://www.futbin.com/21/players?page='+ str(i) +'&ps_price=1100-5000&version=gold_rare&sort=likes&order=desc'
response = requests.get(url)
if response.ok:
soup = BeautifulSoup(response.text,'lxml')
tbody = soup.find('tbody')
extracted = tbody.findAll('tr', {'class': re.compile('player_tr_\d+')})
for td in extracted :
name = td.find('a', class_='player_name_players_table')
#print(name.text)
detail = td.find('span', class_= 'players_club_nation')
if detail:
club = detail.find('img')
clubLink = club['src']
#print(clubLink)
pays = club.find_next('img')
paysLink = pays['src']
#print(paysLink)
league = pays.find_next('img')
leagueLink = league['src']
#print(leagueLink)
imgPlayer = td.find('img', class_='player_img')
if imgPlayer:
imgPlayerLink = imgPlayer['data-original']
#print(imgPlayerLink)
note = name.find_next('td')
#print(note.text)
position = note.find_next('td')
#print(position.text)
version = position.find_next('td')
#print(version.text)
prix = version.find_next('td')
#print(prix.text)
# data to be written row-wise in csv fil
players = [
name.text,
clubLink,
paysLink,
leagueLink,
imgPlayerLink,
note.text,
position.text,
version.text,
prix.text ]
#print(players)
with open('data.json', 'w') as outfile:
json.dump(players, outfile) |
Partager