19 Commits

Author SHA1 Message Date
0f6eb856c6 ajout: restructuration des fichiers et modifications scraper 2026-03-01 19:39:57 +01:00
d62145e250 ajout: ajout fonction main 2026-02-16 13:56:55 +01:00
829c303e78 ajout: debut question 8 2026-02-16 13:36:17 +01:00
b584f9a301 remplacement: changer le fichiers *main par scraper 2026-02-16 13:19:13 +01:00
547c7ec4c1 ajout: 2e jalon 2026-02-16 13:02:45 +01:00
0aa765d6a0 fix: ajout commentaire en titre et bug sur les scores 2026-02-16 11:11:02 +01:00
8a357abe86 ajout(requirements.txt): ajout lib 2026-02-13 18:14:39 +01:00
DAHMANI chahrazad
2f5af5aabf Merge pull request #9 from guezoloic/exo7-loic
Exo7 loic
2026-02-13 17:58:41 +01:00
a33b484dea ajout: test fonction 7 2026-02-13 17:52:11 +01:00
dd430b9861 ajout(main.py): ajout dans csv 2026-02-11 23:58:51 +01:00
011bb6a689 ajout(main.py): optimisation des fonctions 2026-02-11 23:46:22 +01:00
96dbaaaaf6 ajout: fonctions de recherche 2026-02-11 23:35:56 +01:00
ed86e588f7 merge exo2 et commentaire exo7 2026-02-11 23:20:20 +01:00
Loïc GUEZO
0182bbbf20 Merge pull request #7 from guezoloic/exo7+6
Exo7 sans exo6
2026-02-10 20:12:54 +01:00
Chahrazad650
cd1e266f25 optimisation fonction prix() 2026-02-10 19:57:20 +01:00
Chahrazad650
2aa99453a0 modefication fonction prix() return None+tests 2026-02-10 19:39:47 +01:00
9f1ff1ef7b ajout(main.py): initialise la fonction getvin 2026-02-10 19:01:10 +01:00
Chahrazad650
bfc39db652 ajout de la fonction informations 2026-02-10 01:25:00 +01:00
DAHMANI chahrazad
8cae082344 Merge pull request #5 from guezoloic/exo3 (il manque les test !!)
Exo3
2026-02-09 18:57:39 +01:00
8 changed files with 284 additions and 90 deletions

2
.gitignore vendored
View File

@@ -205,3 +205,5 @@ cython_debug/
marimo/_static/ marimo/_static/
marimo/_lsp/ marimo/_lsp/
__marimo__/ __marimo__/
*.csv

15
pyproject.toml Normal file
View File

@@ -0,0 +1,15 @@
[project]
name = "projet-millesima-s6"
version = "0.1.0"
dependencies = [
"requests==2.32.5",
"beautifulsoup4==4.14.3",
"pandas==2.3.3",
]
[project.optional-dependencies]
test = ["pytest==8.4.2", "requests-mock==1.12.1"]
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

View File

@@ -1,3 +0,0 @@
requests>=2.32.5
requests-mock>=1.12.1
beautifulsoup4>=4.14.3

Binary file not shown.

20
src/main.py Executable file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env python3
from os import getcwd
from os.path import normpath, join
from sys import argv
from pandas import read_csv, DataFrame
def main() -> None:
if len(argv) != 2:
raise ValueError(f"{argv[0]} <filename.csv>")
path: str = normpath(join(getcwd(), argv[1]))
db: DataFrame = read_csv(path)
print(db.all())
if __name__ == "__main__":
try:
main()
except Exception as e:
print(f"ERREUR: {e}")

204
main.py → src/scraper.py Normal file → Executable file
View File

@@ -1,12 +1,22 @@
#!/usr/bin/env python3
from sys import argv
from typing import cast from typing import cast
from requests import Response, Session from requests import HTTPError, Response, Session
from bs4 import BeautifulSoup, Tag from bs4 import BeautifulSoup, Tag
from collections import OrderedDict from collections import OrderedDict
from json import loads from json import JSONDecodeError, loads
class _ScraperData: class _ScraperData:
"""_summary_"""
def __init__(self, data: dict[str, object]) -> None: def __init__(self, data: dict[str, object]) -> None:
"""_summary_
Args:
data (dict[str, object]): _description_
"""
self._data: dict[str, object] = data self._data: dict[str, object] = data
def _getcontent(self) -> dict[str, object] | None: def _getcontent(self) -> dict[str, object] | None:
@@ -35,64 +45,26 @@ class _ScraperData:
return None return None
return cast(dict[str, object], current_data.get("attributes")) return cast(dict[str, object], current_data.get("attributes"))
def prix(self) -> float: def prix(self) -> float | None:
""" """
Retourne le prix unitaire d'une bouteille (75cl). Retourne le prix unitaire d'une bouteille (75cl).
Le JSON contient plusieurs formats de vente dans content["items"] : Si aucun prix n'est disponible, retourne None.
- bouteille seule : nbunit = 1 et equivbtl = 1 -> prix direct """
- caisse de plusieurs bouteilles : nbunit > 1 -> on divise le prix total
- formats spéciaux (magnum etc.) : equivbtl > 1 -> même calcul
Formule générale :
prix_unitaire = offerPrice / (nbunit * equivbtl)
"""
content = self._getcontent() content = self._getcontent()
# si content n'existe pas -> erreur
if content is None: if content is None:
raise ValueError("Contenu introuvable") return None
# On récupère la liste des formats disponibles (bouteille, carton...)
items = content.get("items") items = content.get("items")
# Vérification que items est bien une liste non vide # Vérifie que items existe et n'est pas vide
if not isinstance(items, list) or len(items) == 0: if not isinstance(items, list) or len(items) == 0:
raise ValueError("Aucun prix disponible (items vide)") return None
prix_calcule: float | None = None
# --------------------------
# CAS 1 : bouteille unitaire
# --------------------------
# On cherche un format où nbunit=1 et equivbtl=1 ->bouteille standard 75cl
for item in items: for item in items:
if not isinstance(item, dict):
continue
# On récupère les attributs du format
attrs = item.get("attributes", {})
# On récupère nbunit et equivbtl
nbunit = attrs.get("nbunit", {}).get("value")
equivbtl = attrs.get("equivbtl", {}).get("value")
# Si c'est une bouteille unitaire
if nbunit == "1" and equivbtl == "1":
p = item.get("offerPrice")
# Vérification que c'est bien un nombre
if isinstance(p, (int, float)):
return float(p)
# --------------------------
# CAS 2 : caisse ou autre format
# --------------------------
# On calcule le prix unitaire à partir du prix total
for item in items:
if not isinstance(item, dict): if not isinstance(item, dict):
continue continue
@@ -102,23 +74,21 @@ class _ScraperData:
nbunit = attrs.get("nbunit", {}).get("value") nbunit = attrs.get("nbunit", {}).get("value")
equivbtl = attrs.get("equivbtl", {}).get("value") equivbtl = attrs.get("equivbtl", {}).get("value")
# Vérification que toutes les valeurs existent if not isinstance(p, (int, float)) or not nbunit or not equivbtl:
if isinstance(p, (int, float)) and nbunit and equivbtl: continue
# Calcul du nombre total de bouteilles équivalentes nb = float(nbunit)
denom = float(nbunit) * float(equivbtl) eq = float(equivbtl)
# Évite division par zéro if nb <= 0 or eq <= 0:
if denom > 0: continue
# Calcul du prix unitaire if nb == 1 and eq == 1:
prix_unitaire = float(p) / denom return float(p)
# Arrondi à 2 décimales prix_calcule = round(float(p) / (nb * eq), 2)
return round(prix_unitaire, 2)
# Si aucun prix trouvé return prix_calcule
raise ValueError("Impossible de trouver le prix unitaire.")
def appellation(self) -> str | None: def appellation(self) -> str | None:
"""_summary_ """_summary_
@@ -153,8 +123,8 @@ class _ScraperData:
return None return None
val = cast(str, app_dict.get("value")).rstrip("+").split("-") val = cast(str, app_dict.get("value")).rstrip("+").split("-")
if len(val) > 1: if len(val) > 1 and val[1] != "":
val[0] = str((int(val[0]) + int(val[1])) / 2) val[0] = str(round((float(val[0]) + float(val[1])) / 2, 1))
return val[0] return val[0]
return None return None
@@ -171,6 +141,23 @@ class _ScraperData:
def getdata(self) -> dict[str, object]: def getdata(self) -> dict[str, object]:
return self._data return self._data
def informations(self) -> str:
"""
Retourne toutes les informations sous la forme :
"Appelation,Parker,J.Robinson,J.Suckling,Prix"
"""
appellation = self.appellation()
parker = self.parker()
robinson = self.robinson()
suckling = self.suckling()
try:
prix = self.prix()
except ValueError:
prix = None
return f"{appellation},{parker},{robinson},{suckling},{prix}"
class Scraper: class Scraper:
""" """
@@ -187,6 +174,18 @@ class Scraper:
# Très utile pour éviter de renvoyer toujours les mêmes handshake # Très utile pour éviter de renvoyer toujours les mêmes handshake
# TCP et d'avoir toujours une connexion constante avec le server # TCP et d'avoir toujours une connexion constante avec le server
self._session: Session = Session() self._session: Session = Session()
# Crée une "fausse carte d'identité" pour éviter que le site nous
# bloque car on serait des robots
self._session.headers.update(
{
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
AppleWebKit/537.36 (KHTML, like Gecko) \
Chrome/122.0.0.0 Safari/537.36",
"Accept-Language":
"fr-FR,fr;q=0.9,en;q=0.8",
}
)
# Système de cache pour éviter de solliciter le serveur inutilement # Système de cache pour éviter de solliciter le serveur inutilement
self._latest_request: tuple[(str, Response)] | None = None self._latest_request: tuple[(str, Response)] | None = None
self._latest_soups: OrderedDict[str, BeautifulSoup] = OrderedDict[ self._latest_soups: OrderedDict[str, BeautifulSoup] = OrderedDict[
@@ -207,7 +206,8 @@ class Scraper:
HTTPError: Si le serveur renvoie un code d'erreur (4xx, 5xx). HTTPError: Si le serveur renvoie un code d'erreur (4xx, 5xx).
""" """
target_url: str = self._url + subdir.lstrip("/") target_url: str = self._url + subdir.lstrip("/")
response: Response = self._session.get(url=target_url, timeout=10) # envoyer une requête GET sur la page si erreur, renvoie un raise
response: Response = self._session.get(url=target_url, timeout=30)
response.raise_for_status() response.raise_for_status()
return response return response
@@ -307,3 +307,79 @@ class Scraper:
return _ScraperData(cast(dict[str, object], current_data)) return _ScraperData(cast(dict[str, object], current_data))
def _geturlproductslist(self, subdir: str) -> list[str] | None:
"""_summary_
Args:
subdir (str): _description_
Returns:
_type_: _description_
"""
try:
data: dict[str, object] = self.getjsondata(subdir).getdata()
for element in ["initialReduxState", "categ", "content"]:
data: dict[str, object] = cast(dict[str, object], data.get(element))
if not isinstance(data, dict):
return None
products: list[str] = cast(list[str], data.get("products"))
if isinstance(products, list):
return products
except (JSONDecodeError, HTTPError):
return None
def getvins(self, subdir: str, filename: str) -> None:
"""_summary_
Args:
subdir (str): _description_
filename (str): _description_
"""
with open(filename, "w") as f:
cache: set[str] = set[str]()
page = 0
_ = f.write("Appellation,Robert,Robinson,Suckling,Prix\n")
while True:
page += 1
products_list: list[str] | None = \
self._geturlproductslist(f"{subdir}?page={page}")
if not products_list:
break
products_list_length = len(products_list)
for i, product in enumerate(products_list):
if not isinstance(product, dict):
continue
link = product.get("seoKeyword")
if link and link not in cache:
try:
infos = self.getjsondata(link).informations()
_ = f.write(infos + "\n")
print(
f"page: {page} | {i + 1}/{products_list_length} {link}"
)
cache.add(link)
except (JSONDecodeError, HTTPError) as e:
print(f"Erreur sur le produit {link}: {e}")
f.flush()
def main() -> None:
if len(argv) != 2:
raise ValueError(f"{argv[0]} <sous-url>")
scraper: Scraper = Scraper()
scraper.getvins(argv[1], "donnee.csv")
if __name__ == "__main__":
try:
main()
except Exception as e:
print(f"ERREUR: {e}")

0
tests/test_main.py Normal file
View File

View File

@@ -1,8 +1,8 @@
from json import dumps from json import dumps
from bs4 import Tag from unittest.mock import patch, mock_open
import pytest import pytest
from requests_mock import Mocker from requests_mock import Mocker
from main import Scraper from scraper import Scraper
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@@ -71,10 +71,10 @@ def mock_site():
"_id": "J4131/22/C/CC/6-11652", "_id": "J4131/22/C/CC/6-11652",
"partnumber": "J4131/22/C/CC/6", "partnumber": "J4131/22/C/CC/6",
"taxRate": "H", "taxRate": "H",
"listPrice": 390, "listPrice": 842,
"offerPrice": 390, "offerPrice": 842,
"seoKeyword": "nino-negri-5-stelle-sfursat-2022-c-cc-6.html", "seoKeyword": "vin-de-charazade1867.html",
"shortdesc": "Un carton de 6 Bouteilles (75cl)", "shortdesc": "Une bouteille du meilleur vin du monde?",
"attributes": { "attributes": {
"promotion_o_n": { "promotion_o_n": {
"valueId": "0", "valueId": "0",
@@ -101,9 +101,9 @@ def mock_site():
"isSpirit": False, "isSpirit": False,
}, },
"nbunit": { "nbunit": {
"valueId": "6", "valueId": "1",
"name": "nbunit", "name": "nbunit",
"value": "6", "value": "1",
"isSpirit": False, "isSpirit": False,
}, },
}, },
@@ -120,14 +120,14 @@ def mock_site():
"appellation": { "appellation": {
"valueId": "433", "valueId": "433",
"name": "Appellation", "name": "Appellation",
"value": "Sforzato di Valtellina", "value": "Madame-Loïk",
"url": "sforzato-di-valtellina.html", "url": "Madame-loik.html",
"isSpirit": False, "isSpirit": False,
"groupIdentifier": "appellation_433", "groupIdentifier": "appellation_433",
}, },
"note_rp": { "note_rp": {
"valueId": "91", "valueId": "91",
"name": "Parker", "name": "Peter Parker",
"value": "91", "value": "91",
"isSpirit": False, "isSpirit": False,
}, },
@@ -138,8 +138,8 @@ def mock_site():
"isSpirit": False, "isSpirit": False,
}, },
"note_js": { "note_js": {
"valueId": "93-94", "valueId": "93-94.5",
"name": "J. Suckling", "name": "J. cherazade",
"value": "93-94", "value": "93-94",
"isSpirit": False, "isSpirit": False,
}, },
@@ -166,6 +166,79 @@ def mock_site():
text=html_product, text=html_product,
) )
html_product = f"""
<html>
<body>
<h1>MILLESIMA</h1>
<script id="__NEXT_DATA__" type="application/json">
{dumps(json_data)}
</script>
</body>
</html>
"""
list_pleine = f"""
<html>
<body>
<h1>LE WINE</h1>
<script id="__NEXT_DATA__" type="application/json">
{dumps({
"props": {
"pageProps": {
"initialReduxState": {
"categ": {
"content": {
"products": [
{"seoKeyword": "/nino-negri-5-stelle-sfursat-2022.html",},
{"seoKeyword": "/poubelle",},
{"seoKeyword": "/",}
]
}
}
}
}
}
}
)}
</script>
</body>
</html>
"""
list_vide = f"""
<html>
<body>
<h1>LE WINE</h1>
<script id="__NEXT_DATA__" type="application/json">
{dumps({
"props": {
"pageProps": {
"initialReduxState": {
"categ": {
"content": {
"products": [
]
}
}
}
}
}
}
)}
</script>
</body>
</html>
"""
m.get(
"https://www.millesima.fr/wine.html",
complete_qs=False,
response_list=[
{"text": list_pleine},
{"text": list_vide},
],
)
# on return m sans fermer le server qui simule la page # on return m sans fermer le server qui simule la page
yield m yield m
@@ -190,7 +263,7 @@ def test_appellation(scraper: Scraper):
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html") contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
assert vide.appellation() is None assert vide.appellation() is None
assert poubelle.appellation() is None assert poubelle.appellation() is None
assert contenu.appellation() == "Sforzato di Valtellina" assert contenu.appellation() == "Madame-Loïk"
def test_fonctionprivee(scraper: Scraper): def test_fonctionprivee(scraper: Scraper):
@@ -207,7 +280,6 @@ def test_fonctionprivee(scraper: Scraper):
assert contenu._getattributes() is not None assert contenu._getattributes() is not None
def test_critiques(scraper: Scraper): def test_critiques(scraper: Scraper):
vide = scraper.getjsondata("") vide = scraper.getjsondata("")
poubelle = scraper.getjsondata("poubelle") poubelle = scraper.getjsondata("poubelle")
@@ -225,18 +297,30 @@ def test_critiques(scraper: Scraper):
assert contenu.suckling() == "93.5" assert contenu.suckling() == "93.5"
assert contenu._getcritiques("test_ts") is None assert contenu._getcritiques("test_ts") is None
def test_prix(scraper: Scraper): def test_prix(scraper: Scraper):
vide = scraper.getjsondata("") vide = scraper.getjsondata("")
poubelle = scraper.getjsondata("poubelle") poubelle = scraper.getjsondata("poubelle")
contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html") contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
assert vide.prix() is None
assert poubelle.prix() is None
assert contenu.prix() == 842.0
# Cas vide : items == [] -> on ne peut pas calculer -> ValueError
with pytest.raises(ValueError):
_ = vide.prix()
# Cas poubelle : JSON incomplet -> _getcontent() None -> ValueError def test_informations(scraper: Scraper):
with pytest.raises(ValueError): contenu = scraper.getjsondata("nino-negri-5-stelle-sfursat-2022.html")
_ = poubelle.prix() assert contenu.informations() == "Madame-Loïk,91,17,93.5,842.0"
vide = scraper.getjsondata("")
poubelle = scraper.getjsondata("poubelle")
assert vide.informations() == "None,None,None,None,None"
assert poubelle.informations() == "None,None,None,None,None"
assert contenu.prix() == 65.0
def test_search(scraper: Scraper):
m = mock_open()
with patch("builtins.open", m):
scraper.getvins("wine.html", "fake_file.csv")
assert m().write.called
all_writes = "".join(call.args[0] for call in m().write.call_args_list)
assert "Madame-Loïk,91,17,93.5,842.0" in all_writes