mirror of
https://github.com/guezoloic/millesima_projetS6.git
synced 2026-03-28 19:13:42 +00:00
Compare commits
13 Commits
jalon2-loi
...
416cfcbf8b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
416cfcbf8b | ||
| 32c5310e37 | |||
| 9dfc7457a0 | |||
| f5d5703e49 | |||
| 888defb6b6 | |||
| 734e3898e9 | |||
| 4bb3112dd0 | |||
| 54e4b7860b | |||
| b865a59aba | |||
|
|
fde1f36148 | ||
| 6fbb36ea37 | |||
|
|
bcacd7a915 | ||
|
|
d182e08f9b |
18
.github/dependabot.yml
vendored
Normal file
18
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
open-pull-requests-limit: 5
|
||||
groups:
|
||||
python-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
58
.github/workflows/static.yml
vendored
Normal file
58
.github/workflows/static.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# Simple workflow for deploying static content to GitHub Pages
|
||||
name: Deploy static content to Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Single deploy job since we're just deploying
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
# Installe le projet en mode éditable avec les extras de doc
|
||||
pip install -e ".[doc]"
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
|
||||
- name: Build Documentation
|
||||
run: mkdocs build
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
# Upload entire repository
|
||||
path: './site'
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
38
README.md
38
README.md
@@ -1 +1,37 @@
|
||||
# millesima_projetS6
|
||||
# Millesima AI Engine 🍷
|
||||
|
||||
> A **University of Paris-Est Créteil (UPEC)** Semester 6 project.
|
||||
|
||||
## Documentation
|
||||
- 🇫🇷 [Version Française](https://guezoloic.github.io/millesima-ai-engine)
|
||||
> note: only french version enabled for now.
|
||||
---
|
||||
|
||||
## Installation
|
||||
> Make sure you have **Python 3.10+** installed.
|
||||
|
||||
1. **Clone the repository:**
|
||||
```bash
|
||||
git clone https://github.com/votre-pseudo/millesima-ai-engine.git
|
||||
cd millesima-ai-engine
|
||||
```
|
||||
|
||||
2. **Set up a virtual environment:**
|
||||
```bash
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate # Windows: .venv\Scripts\activate
|
||||
```
|
||||
|
||||
3. **Install dependencies:**
|
||||
```bash
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Data Extraction (Scraping)
|
||||
To fetch the latest wine data from Millesima:
|
||||
```bash
|
||||
python3 src/scraper.py
|
||||
```
|
||||
> Note: that will take some time to fetch all data depending on the catalog size.
|
||||
17
docs/cleaning.md
Normal file
17
docs/cleaning.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Cleaning
|
||||
|
||||
## Sommaire
|
||||
[TOC]
|
||||
|
||||
---
|
||||
|
||||
## Classe `Cleaning`
|
||||
::: src.cleaning.Cleaning
|
||||
options:
|
||||
heading_level: 3
|
||||
members:
|
||||
- __init__
|
||||
- getVins
|
||||
- drop_empty_appellation
|
||||
- fill_missing_scores
|
||||
- encode_appellation
|
||||
@@ -1 +1,16 @@
|
||||
# Millesima
|
||||
|
||||
L’objectif de ce projet est d’étudier, en utilisant des méthodes d’apprentissage automatique, l’impact de différents critères (notes des critiques, appelation) sur le prix d’un vin. Pour ce faire, on s’appuiera sur le site Millesima (https://www.millesima.fr/), qui a l’avantage de ne pas posséder de protection contre les bots. Par respect pour l’hébergeur du site, on veillera à limiter au maximum le nombre de requêtes. En particulier, on s’assurera d’avoir un code fonctionnel avant de scraper l’intégralité du site, pour éviter les répétitions.
|
||||
|
||||
## projet
|
||||
<div style="text-align: center;">
|
||||
<object
|
||||
data="/millesima-ai-engine/projet.pdf"
|
||||
type="application/pdf"
|
||||
width="100%"
|
||||
height="1000px"
|
||||
>
|
||||
<p>Votre navigateur ne peut pas afficher ce PDF.
|
||||
<a href="/millesima-ai-engine/projet.pdf">Cliquez ici pour le télécharger.</a></p>
|
||||
</object>
|
||||
</div>
|
||||
Binary file not shown.
@@ -1,3 +1,31 @@
|
||||
# Scraper
|
||||
|
||||
## Sommaire
|
||||
[TOC]
|
||||
|
||||
---
|
||||
|
||||
## Classe `Scraper`
|
||||
::: scraper.Scraper
|
||||
options:
|
||||
members:
|
||||
- __init__
|
||||
- getvins
|
||||
- getjsondata
|
||||
- getresponse
|
||||
- getsoup
|
||||
heading_level: 4
|
||||
|
||||
## Classe `_ScraperData`
|
||||
::: scraper._ScraperData
|
||||
options:
|
||||
members:
|
||||
- __init__
|
||||
- getdata
|
||||
- appellation
|
||||
- parker
|
||||
- robinson
|
||||
- suckling
|
||||
- prix
|
||||
- informations
|
||||
heading_level: 4
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
# _ScraperData
|
||||
|
||||
::: scraper._ScraperData
|
||||
@@ -1,4 +1,5 @@
|
||||
site_name: "Projet Millesima S6"
|
||||
site_url: "https://github.guezoloic.com/millesima-ai-engine/"
|
||||
|
||||
theme:
|
||||
name: "material"
|
||||
@@ -7,6 +8,11 @@ plugins:
|
||||
- search
|
||||
- mkdocstrings
|
||||
|
||||
extra:
|
||||
generator: false
|
||||
|
||||
copyright: "Loïc GUEZO & Chahrazad DAHMANI – UPEC S6 – 2026"
|
||||
|
||||
markdown_extensions:
|
||||
- admonition
|
||||
- pymdownx.details
|
||||
|
||||
@@ -8,6 +8,10 @@ dependencies = [
|
||||
"tqdm==4.67.3",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
pythonpath = "src"
|
||||
testpaths = ["tests"]
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = ["pytest==8.4.2", "requests-mock==1.12.1", "flake8==7.3.0"]
|
||||
doc = ["mkdocs<2.0.0", "mkdocs-material==9.6.23", "mkdocstrings[python]"]
|
||||
|
||||
@@ -99,7 +99,11 @@ def main() -> None:
|
||||
|
||||
filename = argv[1]
|
||||
cleaning: Cleaning = Cleaning(filename)
|
||||
_ = cleaning.drop_empty_appellation().fill_missing_scores().encode_appellation()
|
||||
cleaning.drop_empty_appellation() \
|
||||
.fill_missing_scores() \
|
||||
.encode_appellation() \
|
||||
.getVins() \
|
||||
.to_csv("clean.csv", index=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -377,9 +377,6 @@ class Scraper:
|
||||
try:
|
||||
data: dict[str, object] = self.getjsondata(subdir).getdata()
|
||||
|
||||
for element in ["initialReduxState", "categ", "content"]:
|
||||
data = cast(dict[str, object], data.get(element))
|
||||
|
||||
products: list[dict[str, Any]] = cast(
|
||||
list[dict[str, Any]], data.get("products")
|
||||
)
|
||||
|
||||
@@ -185,17 +185,11 @@ def mock_site():
|
||||
{dumps({
|
||||
"props": {
|
||||
"pageProps": {
|
||||
"initialReduxState": {
|
||||
"categ": {
|
||||
"content": {
|
||||
"products": [
|
||||
{"seoKeyword": "/nino-negri-5-stelle-sfursat-2022.html",},
|
||||
{"seoKeyword": "/poubelle",},
|
||||
{"seoKeyword": "/",}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"products": [
|
||||
{"seoKeyword": "/nino-negri-5-stelle-sfursat-2022.html",},
|
||||
{"seoKeyword": "/poubelle",},
|
||||
{"seoKeyword": "/",}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -213,14 +207,8 @@ def mock_site():
|
||||
{dumps({
|
||||
"props": {
|
||||
"pageProps": {
|
||||
"initialReduxState": {
|
||||
"categ": {
|
||||
"content": {
|
||||
"products": [
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"products": [
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user