mirror of
https://github.com/guezoloic/millesima-ai-engine.git
synced 2026-03-28 18:03:47 +00:00
ajout: test fonction 7
This commit is contained in:
49
main.py
49
main.py
@@ -6,7 +6,14 @@ from json import JSONDecodeError, loads
|
|||||||
|
|
||||||
|
|
||||||
class _ScraperData:
|
class _ScraperData:
|
||||||
|
"""_summary_
|
||||||
|
"""
|
||||||
def __init__(self, data: dict[str, object]) -> None:
|
def __init__(self, data: dict[str, object]) -> None:
|
||||||
|
"""_summary_
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict[str, object]): _description_
|
||||||
|
"""
|
||||||
self._data: dict[str, object] = data
|
self._data: dict[str, object] = data
|
||||||
|
|
||||||
def _getcontent(self) -> dict[str, object] | None:
|
def _getcontent(self) -> dict[str, object] | None:
|
||||||
@@ -285,34 +292,49 @@ class Scraper:
|
|||||||
return _ScraperData(cast(dict[str, object], current_data))
|
return _ScraperData(cast(dict[str, object], current_data))
|
||||||
|
|
||||||
def _geturlproductslist(self, subdir: str):
|
def _geturlproductslist(self, subdir: str):
|
||||||
|
"""_summary_
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subdir (str): _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
_type_: _description_
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
data: dict[str, object] = self.getjsondata(subdir).getdata()
|
data: dict[str, object] = self.getjsondata(subdir).getdata()
|
||||||
|
|
||||||
for element in ["initialReduxState", "categ", "content"]:
|
for element in ["initialReduxState", "categ", "content"]:
|
||||||
data = cast(dict[str, object], data.get(element))
|
data: dict[str, object] = cast(dict[str, object], data.get(element))
|
||||||
if data is None or not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
products = data.get("products")
|
products: list[str] = cast(list[str], data.get("products"))
|
||||||
if isinstance(products, list):
|
if isinstance(products, list):
|
||||||
return products
|
return products
|
||||||
except JSONDecodeError | HTTPError:
|
|
||||||
|
except (JSONDecodeError, HTTPError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def getvins(self, subdir: str, filename: str):
|
def getvins(self, subdir: str, filename: str):
|
||||||
cache: set[str] = set[str]()
|
"""_summary_
|
||||||
page = 0
|
|
||||||
|
Args:
|
||||||
|
subdir (str): _description_
|
||||||
|
filename (str): _description_
|
||||||
|
"""
|
||||||
|
with open(filename, "a") as f:
|
||||||
|
cache: set[str] = set[str]()
|
||||||
|
page = 0
|
||||||
|
|
||||||
with open(filename, 'a') as f:
|
|
||||||
while True:
|
while True:
|
||||||
page += 1
|
page += 1
|
||||||
products_list = self._geturlproductslist(f"{subdir}?page={page}")
|
products_list = self._geturlproductslist(f"{subdir}?page={page}")
|
||||||
|
|
||||||
print(f"---- {page} ----")
|
|
||||||
if not products_list:
|
if not products_list:
|
||||||
break
|
break
|
||||||
|
|
||||||
for product in products_list:
|
products_list_length = len(products_list)
|
||||||
|
for i, product in enumerate(products_list):
|
||||||
if not isinstance(product, dict):
|
if not isinstance(product, dict):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -322,11 +344,14 @@ class Scraper:
|
|||||||
try:
|
try:
|
||||||
infos = self.getjsondata(link).informations()
|
infos = self.getjsondata(link).informations()
|
||||||
_ = f.write(infos + "\n")
|
_ = f.write(infos + "\n")
|
||||||
print(infos)
|
print(
|
||||||
|
f"page: {page} | {i + 1}/{products_list_length} {link}"
|
||||||
|
)
|
||||||
cache.add(link)
|
cache.add(link)
|
||||||
except JSONDecodeError | HTTPError as e:
|
except (JSONDecodeError, HTTPError) as e:
|
||||||
print(f"Erreur sur le produit {link}: {e}")
|
print(f"Erreur sur le produit {link}: {e}")
|
||||||
f.flush()
|
f.flush()
|
||||||
|
|
||||||
|
|
||||||
Scraper().getvins("bordeaux.html", "donnee.csv")
|
if __name__ == "__main__":
|
||||||
|
Scraper().getvins("bordeaux.html", "donnee.csv")
|
||||||
|
|||||||
116
test_main.py
116
test_main.py
@@ -1,5 +1,5 @@
|
|||||||
from json import dumps
|
from json import dumps
|
||||||
from bs4 import Tag
|
from unittest.mock import patch, mock_open
|
||||||
import pytest
|
import pytest
|
||||||
from requests_mock import Mocker
|
from requests_mock import Mocker
|
||||||
from main import Scraper
|
from main import Scraper
|
||||||
@@ -71,10 +71,10 @@ def mock_site():
|
|||||||
"_id": "J4131/22/C/CC/6-11652",
|
"_id": "J4131/22/C/CC/6-11652",
|
||||||
"partnumber": "J4131/22/C/CC/6",
|
"partnumber": "J4131/22/C/CC/6",
|
||||||
"taxRate": "H",
|
"taxRate": "H",
|
||||||
"listPrice": 390,
|
"listPrice": 842,
|
||||||
"offerPrice": 390,
|
"offerPrice": 842,
|
||||||
"seoKeyword": "nino-negri-5-stelle-sfursat-2022-c-cc-6.html",
|
"seoKeyword": "vin-de-charazade1867.html",
|
||||||
"shortdesc": "Un carton de 6 Bouteilles (75cl)",
|
"shortdesc": "Une bouteille du meilleur vin du monde?",
|
||||||
"attributes": {
|
"attributes": {
|
||||||
"promotion_o_n": {
|
"promotion_o_n": {
|
||||||
"valueId": "0",
|
"valueId": "0",
|
||||||
@@ -101,9 +101,9 @@ def mock_site():
|
|||||||
"isSpirit": False,
|
"isSpirit": False,
|
||||||
},
|
},
|
||||||
"nbunit": {
|
"nbunit": {
|
||||||
"valueId": "6",
|
"valueId": "1",
|
||||||
"name": "nbunit",
|
"name": "nbunit",
|
||||||
"value": "6",
|
"value": "1",
|
||||||
"isSpirit": False,
|
"isSpirit": False,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -120,14 +120,14 @@ def mock_site():
|
|||||||
"appellation": {
|
"appellation": {
|
||||||
"valueId": "433",
|
"valueId": "433",
|
||||||
"name": "Appellation",
|
"name": "Appellation",
|
||||||
"value": "Sforzato di Valtellina",
|
"value": "Madame-Loïk",
|
||||||
"url": "sforzato-di-valtellina.html",
|
"url": "Madame-loik.html",
|
||||||
"isSpirit": False,
|
"isSpirit": False,
|
||||||
"groupIdentifier": "appellation_433",
|
"groupIdentifier": "appellation_433",
|
||||||
},
|
},
|
||||||
"note_rp": {
|
"note_rp": {
|
||||||
"valueId": "91",
|
"valueId": "91",
|
||||||
"name": "Parker",
|
"name": "Peter Parker",
|
||||||
"value": "91",
|
"value": "91",
|
||||||
"isSpirit": False,
|
"isSpirit": False,
|
||||||
},
|
},
|
||||||
@@ -139,7 +139,7 @@ def mock_site():
|
|||||||
},
|
},
|
||||||
"note_js": {
|
"note_js": {
|
||||||
"valueId": "93-94",
|
"valueId": "93-94",
|
||||||
"name": "J. Suckling",
|
"name": "J. cherazade",
|
||||||
"value": "93-94",
|
"value": "93-94",
|
||||||
"isSpirit": False,
|
"isSpirit": False,
|
||||||
},
|
},
|
||||||
@@ -166,6 +166,79 @@ def mock_site():
|
|||||||
text=html_product,
|
text=html_product,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
html_product = f"""
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<h1>MILLESIMA</h1>
|
||||||
|
<script id="__NEXT_DATA__" type="application/json">
|
||||||
|
{dumps(json_data)}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
list_pleine = f"""
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<h1>LE WINE</h1>
|
||||||
|
<script id="__NEXT_DATA__" type="application/json">
|
||||||
|
{dumps({
|
||||||
|
"props": {
|
||||||
|
"pageProps": {
|
||||||
|
"initialReduxState": {
|
||||||
|
"categ": {
|
||||||
|
"content": {
|
||||||
|
"products": [
|
||||||
|
{"seoKeyword": "/nino-negri-5-stelle-sfursat-2022.html",},
|
||||||
|
{"seoKeyword": "/poubelle",},
|
||||||
|
{"seoKeyword": "/",}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
list_vide = f"""
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<h1>LE WINE</h1>
|
||||||
|
<script id="__NEXT_DATA__" type="application/json">
|
||||||
|
{dumps({
|
||||||
|
"props": {
|
||||||
|
"pageProps": {
|
||||||
|
"initialReduxState": {
|
||||||
|
"categ": {
|
||||||
|
"content": {
|
||||||
|
"products": [
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
m.get(
|
||||||
|
"https://www.millesima.fr/wine.html",
|
||||||
|
complete_qs=False,
|
||||||
|
response_list=[
|
||||||
|
{"text": list_pleine},
|
||||||
|
{"text": list_vide},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
# on return m sans fermer le server qui simule la page
|
# on return m sans fermer le server qui simule la page
|
||||||
yield m
|
yield m
|
||||||
|
|
||||||
@@ -190,7 +263,7 @@ def test_appellation(scraper: Scraper):
|
|||||||
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
||||||
assert vide.appellation() is None
|
assert vide.appellation() is None
|
||||||
assert poubelle.appellation() is None
|
assert poubelle.appellation() is None
|
||||||
assert contenu.appellation() == "Sforzato di Valtellina"
|
assert contenu.appellation() == "Madame-Loïk"
|
||||||
|
|
||||||
|
|
||||||
def test_fonctionprivee(scraper: Scraper):
|
def test_fonctionprivee(scraper: Scraper):
|
||||||
@@ -207,7 +280,6 @@ def test_fonctionprivee(scraper: Scraper):
|
|||||||
assert contenu._getattributes() is not None
|
assert contenu._getattributes() is not None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_critiques(scraper: Scraper):
|
def test_critiques(scraper: Scraper):
|
||||||
vide = scraper.getjsondata("")
|
vide = scraper.getjsondata("")
|
||||||
poubelle = scraper.getjsondata("poubelle")
|
poubelle = scraper.getjsondata("poubelle")
|
||||||
@@ -232,13 +304,23 @@ def test_prix(scraper: Scraper):
|
|||||||
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
||||||
assert vide.prix() is None
|
assert vide.prix() is None
|
||||||
assert poubelle.prix() is None
|
assert poubelle.prix() is None
|
||||||
assert contenu.prix() == 65.0
|
assert contenu.prix() == 842.0
|
||||||
|
|
||||||
|
|
||||||
def test_informations(scraper: Scraper):
|
def test_informations(scraper: Scraper):
|
||||||
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
|
||||||
assert contenu.informations() == "Sforzato di Valtellina,91,17,93.5,65.0"
|
assert contenu.informations() == "Madame-Loïk,91,17,93.5,842.0"
|
||||||
vide = scraper.getjsondata("")
|
vide = scraper.getjsondata("")
|
||||||
poubelle = scraper.getjsondata("poubelle")
|
poubelle = scraper.getjsondata("poubelle")
|
||||||
assert vide.informations() == "None,None,None,None,None"
|
assert vide.informations() == "None,None,None,None,None"
|
||||||
assert poubelle.informations() == "None,None,None,None,None"
|
assert poubelle.informations() == "None,None,None,None,None"
|
||||||
|
|
||||||
|
|
||||||
|
def test_search(scraper: Scraper):
|
||||||
|
m = mock_open()
|
||||||
|
with patch("builtins.open", m):
|
||||||
|
scraper.getvins("wine.html", "fake_file.csv")
|
||||||
|
|
||||||
|
assert m().write.called
|
||||||
|
all_writes = "".join(call.args[0] for call in m().write.call_args_list)
|
||||||
|
assert "Madame-Loïk,91,17,93.5,842.0" in all_writes
|
||||||
|
|||||||
Reference in New Issue
Block a user