# script adapted from https://github.com/phillychi3/nhentai-favorites/

### fix 'set.yaml'
# open https://nhentai.net/favorites/
# open developer tools (F12)
# switch to network tab
# find favorites/ and click it
# find 'cookie' and 'useragent' in request headers
# copypaste into respective fields
# cookie should look something like this
# cookie: 'cf_clearance=xxxxx; csrftoken=xxxxx; sessionid=xxxxx'

### requirements
# pip install beautifulsoup4 PyYAML requests
# OR
# conda install beautifulsoup4 PyYAML requests


from bs4 import BeautifulSoup
import yaml
import requests
import os
import json

if not os.path.isfile("set.yaml"):
    with open("set.yaml", "w") as f:
        yaml.dump({"cookie": "", "useragent": ""}, f)
    print("Please edit set.yaml")
    exit()

with open("set.yaml", "r") as f:
    data = yaml.load(f, Loader=yaml.CLoader)
    cookie = data["cookie"]
    useragent = data["useragent"]
    if cookie == "":
        print("Please edit set.yaml")
        exit()

# setting
URL = "https://nhentai.net/favorites/"
APIURL = "https://nhentai.net/api/gallery/"
now = 1
allnumbers = []


def get_session(url, method="get", data=None):
    session = requests.Session()
    session.headers = {
        "Referer": "https://nhentai.net/login/",
        "User-Agent": useragent,
        "Cookie": cookie,
        "Accept-Language": "zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6",
        "Accept-Encoding": "gzip, deflate",
    }
    if method == "get":
        r = session.get(url)
    elif method == "post":
        r = session.post(url, data=data)
    r.encoding = "utf-8"
    return r


def check_pass():
    res = get_session("https://nhentai.net/")
    if res.status_code == 403:
        print("403 error, maby block by cloudflare , please check if the cookie is correct")
        exit()


check_pass()

while True:
    data = get_session(f"{URL}?page={now}")
    if "Abandon all hope, ye who enter here" in data.text:
        print("No login, please login first")
        exit()
    soup = BeautifulSoup(data.text, "html.parser")
    book = soup.find_all("div", class_="gallery-favorite")
    if book == []:
        break
    numbers = [t.get("data-id") for t in book]
    allnumbers.extend(numbers)
    now += 1
    print(len(allnumbers))


with open("downloadme.txt", "w", encoding="utf8") as f:
    for k in allnumbers:
        f.write(k + "\n")

print("Done!")
Edit
Pub: 14 Sep 2024 18:19 UTC
Views: 472