summaryrefslogtreecommitdiffstatshomepage
path: root/tests/plugins/scrapers/test_schale_network.py
blob: 236520bac8ed3d81c57a49e961c9fd989b4e2281 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import os
from zipfile import ZipFile

import pytest

import hircine.enums as enums
from hircine.plugins.scrapers.schale_network import SchaleNetworkScraper
from hircine.scraper.types import (
    Artist,
    Censorship,
    Circle,
    Direction,
    Language,
    Tag,
    Title,
)


@pytest.fixture
def archive_file(tmpdir):
    file = os.path.join(tmpdir, "archive.zip")

    data = """
source: SchaleNetwork:/g/1/1
title: 'Example Title'
general:
  - example
artist:
  - example
circle:
  - example
magazine:
  - example
male:
  - example
female:
  - example
mixed:
  - example
language:
  - english
  - translated
other:
  - uncensored
  - vanilla
"""

    with ZipFile(file, "x") as ziph:
        ziph.writestr("info.yaml", data)

    yield file


def test_does_scrape(monkeypatch, archive_file, gen_comic):
    comic = next(gen_comic)
    comic.archive.path = archive_file

    scraper = SchaleNetworkScraper(comic)

    assert scraper.is_available
    assert scraper.source == SchaleNetworkScraper.source
    assert scraper.name == "schale.network info.yaml"

    assert set(scraper.collect()) == set(
        [
            Artist(name="example"),
            Circle(name="example"),
            Direction(value=enums.Direction.RIGHT_TO_LEFT),
            Censorship(value=enums.Censorship.NONE),
            Language(value=enums.Language.EN),
            Tag(namespace="none", tag="example"),
            Tag(namespace="none", tag="vanilla"),
            Tag(namespace="male", tag="example"),
            Tag(namespace="female", tag="example"),
            Tag(namespace="mixed", tag="example"),
            Title(value="Example Title"),
        ]
    )


def test_does_not_scrape_on_error(tmpdir, gen_comic):
    comic = next(gen_comic)
    comic.archive.path = os.path.join(tmpdir, "nonexistent.zip")

    scraper = SchaleNetworkScraper(comic)

    assert scraper.data == {}
    assert not scraper.is_available