Bonjour à tous,
Je viens d'importer ce code depuis python vers python 3 mais j'ai une erreur : "POST data should be bytes or an iterable of bytes. It cannot be of type str".
Voici mon code (open source!). Je vous remercie tous par avance.
--------------
from io import StringIO
import gzip
import urllib.request
import http.cookies
import http.cookiejar
import sys
import os
import time
def login():
global cp,date1,date2,region
print ("Login & cookie récuperation pour afnic.fr")
url = "http://www.afnic.fr/fr/produits-et-services/services/whois/"
opts = {
}
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'fr,en-US;q=0.8,en;q=0.5,fr-FR;q=0.3',
'Cache-Control': 'max-age=0',
'Connection':'keep-alive',
'Host':'www.afnic.fr',
'Referer':'http://www.afnic.fr/fr/produits-et-services/services/whois/',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0',
}
data = urllib.parse.urlencode(opts)
request = urllib.request.Request(url, data, headers)
cookies = http.cookiejar.CookieJar()
cookie_handler= urllib.request.HTTPCookieProcessor( cookies )
redirect_handler= urllib.request.HTTPRedirectHandler()
opener = urllib.request.build_opener(redirect_handler,cookie_handler)
response = opener.open(request)
cookie=response.headers.get('Set-Cookie')
cookies.extract_cookies(response,request)
answ=response.read()
data=StringIO.StringIO(answ)
try:
if response.info()['content-encoding'] == "gzip":
gzipper=gzip.GzipFile(fileobj=data)
answ=gzipper.read()
except:
pass
sid=answ.split('input type="hidden" name="whois[_csrf_token]" value="')[1].split('"')[0]
return sid,cookie
def post_data(cookie,sid,domainname="cellus"):
opts={}
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0',
'Accept': 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.9,image/png,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'fr,en-US;q=0.8,en;q=0.5,fr-FR;q=0.3',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Connection': 'keep-alive',
'Host':'www.afnic.fr',
'Referer':"http://www.afnic.fr/fr/produits-et-services/services/whois/",
}
url="http://www.afnic.fr/fr/produits-et-services/services/whois/?whois%5B_csrf_token%5D="+sid+"&whois%5Bname%5D="+domainname+"&whois%5Bdomains%5D=.fr"
data = urllib.urlencode(opts)
request = urllib2.Request(url, data, headers)
request.add_header('cookie',cookie)
cookies = cookielib.CookieJar()
cookie_handler= urllib2.HTTPCookieProcessor( cookies )
redirect_handler= urllib2.HTTPRedirectHandler()
opener = urllib2.build_opener(redirect_handler,cookie_handler)
response = opener.open(request)
answ=response.read()
data=StringIO.StringIO(answ)
try:
if response.info()['content-encoding']=="gzip":
gzipper=gzip.GzipFile(fileobj=data)
answ=gzipper.read()
except:
pass
#with open('temp_info.html','w') as tf:
# tf.write(answ)
if answ.find('<p>Le nom de domaine "<strong>'+domainname+'.fr</strong>" est disponible à l'enregistrement.</p>')>-1:
answ=True
else:
answ=False
return answ
if __name__ == "__main__":
domains_list=["codes-sources","cellus","un-domaine-qui-n-est-pas-encore-enregistre","ce-domaine-par-exemple","google"] #METTER ICI LA LISTE DES NOMS DE DOMAINE A VERIFIER
sid,cookie=login()
print ("Verification de la disponibilité de " + str(len(domains_list)) + " nom(s) de domaine.")
for each in domains_list:
if post_data(cookie,sid,domainname=each)==True:
print (" - '"+each + ".fr' est enregistrable")
else:
print (" - '"+each + ".fr' est deja enregistré")
time.sleep(0.25)
Partager