summaryrefslogblamecommitdiffstatshomepage
path: root/tests/plugins/scrapers/test_schale_network.py
blob: 236520bac8ed3d81c57a49e961c9fd989b4e2281 (plain) (tree)























































































                                                                        
import os
from zipfile import ZipFile

import pytest

import hircine.enums as enums
from hircine.plugins.scrapers.schale_network import SchaleNetworkScraper
from hircine.scraper.types import (
    Artist,
    Censorship,
    Circle,
    Direction,
    Language,
    Tag,
    Title,
)


@pytest.fixture
def archive_file(tmpdir):
    file = os.path.join(tmpdir, "archive.zip")

    data = """
source: SchaleNetwork:/g/1/1
title: 'Example Title'
general:
  - example
artist:
  - example
circle:
  - example
magazine:
  - example
male:
  - example
female:
  - example
mixed:
  - example
language:
  - english
  - translated
other:
  - uncensored
  - vanilla
"""

    with ZipFile(file, "x") as ziph:
        ziph.writestr("info.yaml", data)

    yield file


def test_does_scrape(monkeypatch, archive_file, gen_comic):
    comic = next(gen_comic)
    comic.archive.path = archive_file

    scraper = SchaleNetworkScraper(comic)

    assert scraper.is_available
    assert scraper.source == SchaleNetworkScraper.source
    assert scraper.name == "schale.network info.yaml"

    assert set(scraper.collect()) == set(
        [
            Artist(name="example"),
            Circle(name="example"),
            Direction(value=enums.Direction.RIGHT_TO_LEFT),
            Censorship(value=enums.Censorship.NONE),
            Language(value=enums.Language.EN),
            Tag(namespace="none", tag="example"),
            Tag(namespace="none", tag="vanilla"),
            Tag(namespace="male", tag="example"),
            Tag(namespace="female", tag="example"),
            Tag(namespace="mixed", tag="example"),
            Title(value="Example Title"),
        ]
    )


def test_does_not_scrape_on_error(tmpdir, gen_comic):
    comic = next(gen_comic)
    comic.archive.path = os.path.join(tmpdir, "nonexistent.zip")

    scraper = SchaleNetworkScraper(comic)

    assert scraper.data == {}
    assert not scraper.is_available