summaryrefslogtreecommitdiffstatshomepage
path: root/tests/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'tests/plugins')
-rw-r--r--tests/plugins/scrapers/test_schale_network.py88
1 files changed, 88 insertions, 0 deletions
diff --git a/tests/plugins/scrapers/test_schale_network.py b/tests/plugins/scrapers/test_schale_network.py
new file mode 100644
index 0000000..236520b
--- /dev/null
+++ b/tests/plugins/scrapers/test_schale_network.py
@@ -0,0 +1,88 @@
+import os
+from zipfile import ZipFile
+
+import pytest
+
+import hircine.enums as enums
+from hircine.plugins.scrapers.schale_network import SchaleNetworkScraper
+from hircine.scraper.types import (
+ Artist,
+ Censorship,
+ Circle,
+ Direction,
+ Language,
+ Tag,
+ Title,
+)
+
+
+@pytest.fixture
+def archive_file(tmpdir):
+ file = os.path.join(tmpdir, "archive.zip")
+
+ data = """
+source: SchaleNetwork:/g/1/1
+title: 'Example Title'
+general:
+ - example
+artist:
+ - example
+circle:
+ - example
+magazine:
+ - example
+male:
+ - example
+female:
+ - example
+mixed:
+ - example
+language:
+ - english
+ - translated
+other:
+ - uncensored
+ - vanilla
+"""
+
+ with ZipFile(file, "x") as ziph:
+ ziph.writestr("info.yaml", data)
+
+ yield file
+
+
+def test_does_scrape(monkeypatch, archive_file, gen_comic):
+ comic = next(gen_comic)
+ comic.archive.path = archive_file
+
+ scraper = SchaleNetworkScraper(comic)
+
+ assert scraper.is_available
+ assert scraper.source == SchaleNetworkScraper.source
+ assert scraper.name == "schale.network info.yaml"
+
+ assert set(scraper.collect()) == set(
+ [
+ Artist(name="example"),
+ Circle(name="example"),
+ Direction(value=enums.Direction.RIGHT_TO_LEFT),
+ Censorship(value=enums.Censorship.NONE),
+ Language(value=enums.Language.EN),
+ Tag(namespace="none", tag="example"),
+ Tag(namespace="none", tag="vanilla"),
+ Tag(namespace="male", tag="example"),
+ Tag(namespace="female", tag="example"),
+ Tag(namespace="mixed", tag="example"),
+ Title(value="Example Title"),
+ ]
+ )
+
+
+def test_does_not_scrape_on_error(tmpdir, gen_comic):
+ comic = next(gen_comic)
+ comic.archive.path = os.path.join(tmpdir, "nonexistent.zip")
+
+ scraper = SchaleNetworkScraper(comic)
+
+ assert scraper.data == {}
+ assert not scraper.is_available