Skip to content

Commit

Permalink
Merge pull request #6 from AliAryanTech/master
Browse files Browse the repository at this point in the history
fixed: .to results not found on .net
  • Loading branch information
LuckyYam authored Jun 23, 2024
2 parents 94e861e + a6f3dbf commit 58acb0d
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 17 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@shineiichijo/nhentai-ts",
"description": "A scraper for NHentai with types",
"version": "1.0.3",
"version": "1.0.4",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"license": "MIT",
Expand Down
29 changes: 14 additions & 15 deletions src/Parser/doujin.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import { CheerioAPI } from 'cheerio'
import { baseURLS, clean, getExtension, Pages, imageSites } from '../lib'
import { baseURLS, clean, getExtension, getPageStatus, Pages, imageSites } from '../lib'
import { TURL, IDoujinInfo } from '../Types'

export const parseDoujinInfo = (
export const parseDoujinInfo = async (
$: CheerioAPI,
site: keyof typeof baseURLS,
api_pages?: { t: string }[]
): IDoujinInfo => {
): Promise<IDoujinInfo> => {
const pages: string[] = []
const gallery_id = (
$('.thumb-container').first().find('a > img').attr('data-src') ||
Expand All @@ -22,16 +22,15 @@ export const parseDoujinInfo = (
}.${getExtension(page.t)}`
)
)
else
$('.thumb-container').each((i, el) => {
const url = $(el).find('a > img').attr('data-src')
if (url)
pages.push(
url
.replace(`${i + 1}t`, `${i + 1}`)
.replace(imageSites[site], 'i.nhentai.net')
)
})
else
for (const el of $('.thumb-container')) {
const url = ($(el).find('a > img').attr('data-src') || '').replace(/t(?=\.)/, '')
if (url) {
const page = url.replace(imageSites[site], 'i.nhentai.net')
const status = await getPageStatus(page)
pages.push(status === 200 ? page : url)
}
}
const cover =
$('#cover').find('a > img').attr('data-src') ||
$('#cover').find('a > img').attr('src')
Expand Down Expand Up @@ -90,9 +89,9 @@ export const parseDoujinInfo = (
groups: clean(groups),
languages: clean(languages),
categories: clean(categories),
cover: cover
cover: !pages.includes('cdn.dogehls.xyz')
? cover.replace('cdn.dogehls.xyz', 't3.nhentai.net')
: null,
: cover,
images,
url
}
Expand Down
1 change: 1 addition & 0 deletions src/lib/Classes/Pages.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ export class Pages {
pdf.pipe(stream)
for (const url of this.pages) {
const { data } = await axios.get<Buffer>(url, {
headers: url.includes('cdn.dogehls.xyz') ? { 'Referer': 'https://nhentai.to' } : {},
responseType: 'arraybuffer'
})
const img = (pdf as any).openImage(data)
Expand Down
7 changes: 6 additions & 1 deletion src/lib/util.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { AxiosInstance } from 'axios'
import axios, { AxiosInstance } from 'axios'
import { load } from 'cheerio'

export const clean = (x: string[]): string[] => {
Expand Down Expand Up @@ -35,3 +35,8 @@ export const getAPIGalleryPages = async (
)
).data.images.pages
}

export const getPageStatus = (url: string): Promise<number> =>
axios.head(url)
.then((res: any) => res.status as number)
.catch((err: any) => (err.response?.status || 500) as number)

0 comments on commit 58acb0d

Please sign in to comment.