import requests import os import img2pdf import shutil import multiprocessing
defget_page(args): page_url,now_page_num=args pic=requests.get(page_url) with open("temp/"+str(now_page_num)+'.png','wb') as f: f.write(pic.content) print(str(now_page_num)+'done')
defparallel_download(page_num,a_url): pool=multiprocessing.Pool(processes=8) x1=a_url.find("&width=") x2=a_url.find("&ServiceType=") pool.map(get_page,[(a_url[:x1]+'&width=1500&height=2100&pageid='+str(i)+a_url[x2:],i) for i in range(1,page_num+1)]) pool.close()
if __name__ == "__main__": print('bookname?') bookname=input()
print('page_num?') page_num=int(input())
print('url?') t_url=input()
os.makedirs("temp") print("downloading pictures")
parallel_download(page_num,t_url)
print("Changing") with open(bookname+".pdf","wb") as f: f.write(img2pdf.convert(list("temp/"+str(i)+'.png'for i in range(1,page_num+1)))) print("done") shutil.rmtree("temp")