Spaces:
Sleeping
Sleeping
Commit
·
bdfa8a5
0
Parent(s):
FEATURE: Adding the first commit of main
Browse files- .github/workflows/huggingface.yaml +17 -0
- .gitignore +58 -0
- .pre-commit-config.yaml +10 -0
- Dockerfile +17 -0
- README.md +39 -0
- data/historical_simulation_9023624.csv +0 -0
- notebooks/01_benchmark.ipynb +0 -0
- pyproject.toml +31 -0
- requirements.txt +12 -0
- src/chronos_conference/adapters/filter_ts.py +10 -0
- src/chronos_conference/adapters/model_instance.py +52 -0
- src/chronos_conference/adapters/ts_plot.py +16 -0
- src/chronos_conference/domain/inference.py +25 -0
- src/chronos_conference/service_layer/main.py +73 -0
- src/chronos_conference/settings.py +11 -0
.github/workflows/huggingface.yaml
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Sync to Hugging Face hub
|
| 2 |
+
on:
|
| 3 |
+
push:
|
| 4 |
+
branches: [main]
|
| 5 |
+
|
| 6 |
+
jobs:
|
| 7 |
+
sync-to-hub:
|
| 8 |
+
runs-on: ubuntu-latest
|
| 9 |
+
steps:
|
| 10 |
+
- uses: actions/checkout@v3
|
| 11 |
+
with:
|
| 12 |
+
fetch-depth: 0
|
| 13 |
+
lfs: true
|
| 14 |
+
- name: Push to hub
|
| 15 |
+
env:
|
| 16 |
+
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
| 17 |
+
run: git push --force https://${{ secrets.HF_USERNAME }}:[email protected]/spaces/${{ secrets.HF_USERNAME }}/${{ secrets.SPACE_NAME }} main
|
.gitignore
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Archivos de Python
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*.pyo
|
| 5 |
+
*.pyd
|
| 6 |
+
*.pdb
|
| 7 |
+
*.egg-info/
|
| 8 |
+
*.egg
|
| 9 |
+
*.manifest
|
| 10 |
+
*.spec
|
| 11 |
+
|
| 12 |
+
# Entornos virtuales
|
| 13 |
+
.env
|
| 14 |
+
.venv
|
| 15 |
+
env/
|
| 16 |
+
venv/
|
| 17 |
+
ENV/
|
| 18 |
+
env.bak/
|
| 19 |
+
venv.bak/
|
| 20 |
+
|
| 21 |
+
# Archivos de configuración/editor
|
| 22 |
+
.vscode/
|
| 23 |
+
.idea/
|
| 24 |
+
*.swp
|
| 25 |
+
*.swo
|
| 26 |
+
|
| 27 |
+
# Archivos de compilación
|
| 28 |
+
build/
|
| 29 |
+
dist/
|
| 30 |
+
.eggs/
|
| 31 |
+
*.so
|
| 32 |
+
|
| 33 |
+
# Logs y cachés
|
| 34 |
+
*.log
|
| 35 |
+
*.pot
|
| 36 |
+
*.pyc
|
| 37 |
+
*.tmp
|
| 38 |
+
*.DS_Store
|
| 39 |
+
.cache/
|
| 40 |
+
coverage/
|
| 41 |
+
htmlcov/
|
| 42 |
+
.tox/
|
| 43 |
+
.mypy_cache/
|
| 44 |
+
.pytest_cache/
|
| 45 |
+
|
| 46 |
+
# Jupyter Notebooks
|
| 47 |
+
.ipynb_checkpoints
|
| 48 |
+
*.ipynb_checkpoints
|
| 49 |
+
|
| 50 |
+
# Otros
|
| 51 |
+
*.sqlite3
|
| 52 |
+
*.db
|
| 53 |
+
*.mo
|
| 54 |
+
|
| 55 |
+
*.parquet
|
| 56 |
+
*.xlsx
|
| 57 |
+
*.json
|
| 58 |
+
**/AutogluonModels/**
|
.pre-commit-config.yaml
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
repos:
|
| 2 |
+
- repo: https://github.com/astral-sh/ruff-pre-commit
|
| 3 |
+
rev: v0.1.15
|
| 4 |
+
hooks:
|
| 5 |
+
- id: ruff
|
| 6 |
+
types_or: [python, pyi, jupyter]
|
| 7 |
+
args:
|
| 8 |
+
- --fix
|
| 9 |
+
- id: ruff-format
|
| 10 |
+
types_or: [python, pyi, jupyter]
|
Dockerfile
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
COPY requirements.txt requirements.txt
|
| 5 |
+
COPY pyproject.toml pyproject.toml
|
| 6 |
+
COPY src/ src/
|
| 7 |
+
COPY data/ /data
|
| 8 |
+
|
| 9 |
+
RUN pip install uv
|
| 10 |
+
RUN uv pip install -r requirements.txt
|
| 11 |
+
RUN pip install -e .
|
| 12 |
+
|
| 13 |
+
EXPOSE 8501
|
| 14 |
+
|
| 15 |
+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
|
| 16 |
+
|
| 17 |
+
ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
README.md
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Amazon Chronos Bolt
|
| 3 |
+
emoji: 👁
|
| 4 |
+
colorFrom: gray
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: docker
|
| 7 |
+
pinned: false
|
| 8 |
+
license: mit
|
| 9 |
+
short_description: Large Time-Series Models Amazon
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
# Modelos Fundacionales para Series de tiempo
|
| 13 |
+
|
| 14 |
+
Conferencia: **Aprendiendo el Lenguaje de las Series de Tiempo: AWS Chronos Bolt.**
|
| 15 |
+
|
| 16 |
+
## Descripción
|
| 17 |
+
|
| 18 |
+
Este repo contiene el benchmark y código de la aplicación con datos del INAMHI de la conferencia `Aprendiendo el Lenguaje de las Series de Tiempo: AWS Chronos Bolt.` dada durante el AWS Community Day Ecuador 2025.
|
| 19 |
+
|
| 20 |
+
AWS Chronos Bolt es un modelo open-source. Cualquier persona interesada, puede acceder en los siguientes links a las versiones disponibles del modelo.
|
| 21 |
+
|
| 22 |
+
- [Tiny](https://huggingface.co/amazon/chronos-bolt-tiny)
|
| 23 |
+
- [Mini](https://huggingface.co/amazon/chronos-bolt-mini)
|
| 24 |
+
- [Small](https://huggingface.co/amazon/chronos-bolt-small)
|
| 25 |
+
- [Base](https://huggingface.co/amazon/chronos-bolt-base)
|
| 26 |
+
|
| 27 |
+
## ☕ Support the Project
|
| 28 |
+
|
| 29 |
+
If you find this demo helpful and would like to support its continued development, consider buying me a coffee. Your support helps maintain and improve this project!
|
| 30 |
+
|
| 31 |
+
[](https://www.paypal.com/paypalme/sebassarasti)
|
| 32 |
+
|
| 33 |
+
### Other Ways to Support
|
| 34 |
+
- ⭐ Star this repository
|
| 35 |
+
- 🍴 Fork it and contribute
|
| 36 |
+
- 📢 Share it with others who might find it useful
|
| 37 |
+
- 🐛 Report issues or suggest new features
|
| 38 |
+
|
| 39 |
+
Your support, in any form, is greatly appreciated! 🙏
|
data/historical_simulation_9023624.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
notebooks/01_benchmark.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
pyproject.toml
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "chronos_conference"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = "A Python package to execute most common models for retail"
|
| 5 |
+
authors = [
|
| 6 |
+
{ name = "Sebastian Sarasti", email = "[email protected]" }
|
| 7 |
+
]
|
| 8 |
+
readme = "README.md"
|
| 9 |
+
|
| 10 |
+
requires-python = ">=3.11"
|
| 11 |
+
dependencies = [
|
| 12 |
+
"autogluon==1.4.0",
|
| 13 |
+
"kagglehub==0.3.13",
|
| 14 |
+
"numpy==2.1.3",
|
| 15 |
+
"pandas==2.3.2",
|
| 16 |
+
"utilsforecast==0.2.11",
|
| 17 |
+
"statsforecast==2.0.1",
|
| 18 |
+
"ipywidgets",
|
| 19 |
+
"ipykernel",
|
| 20 |
+
"pre_commit==4.3.0",
|
| 21 |
+
"streamlit==1.50.0",
|
| 22 |
+
"plotly==6.3.0",
|
| 23 |
+
"nbformat==5.10.4"
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
[project.urls]
|
| 27 |
+
Repository = "https://github.com/sebassaras02/AWS_Community_Day_Ecuador_2025"
|
| 28 |
+
|
| 29 |
+
[build-system]
|
| 30 |
+
requires = ["setuptools>=61.0", "wheel"]
|
| 31 |
+
build-backend = "setuptools.build_meta"
|
requirements.txt
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
autogluon==1.4.0
|
| 2 |
+
kagglehub==0.3.13
|
| 3 |
+
numpy==2.1.3
|
| 4 |
+
pandas==2.3.2
|
| 5 |
+
utilsforecast==0.2.11
|
| 6 |
+
statsforecast==2.0.1
|
| 7 |
+
ipywidgets
|
| 8 |
+
ipykernel
|
| 9 |
+
pre_commit==4.3.0
|
| 10 |
+
streamlit==1.50.0
|
| 11 |
+
plotly==6.3.0
|
| 12 |
+
nbformat==5.10.4
|
src/chronos_conference/adapters/filter_ts.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def filter_ts(
|
| 5 |
+
df: pd.DataFrame, date_col: str, min_date: str, max_date: str
|
| 6 |
+
) -> pd.DataFrame:
|
| 7 |
+
df = df.copy()
|
| 8 |
+
if pd.api.types.is_datetime64_any_dtype(df[date_col]) is False:
|
| 9 |
+
df[date_col] = pd.to_datetime(df[date_col])
|
| 10 |
+
return df[(df[date_col] >= min_date) & (df[date_col] <= max_date)]
|
src/chronos_conference/adapters/model_instance.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from abc import ABC
|
| 2 |
+
|
| 3 |
+
import pandas as pd
|
| 4 |
+
from autogluon.timeseries import TimeSeriesDataFrame, TimeSeriesPredictor
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ForecastingBaseModel(ABC):
|
| 8 |
+
def __init__(self, freq: str, n_jobs: int = 1) -> None:
|
| 9 |
+
self.model = None
|
| 10 |
+
self.freq = freq
|
| 11 |
+
self.n_jobs = n_jobs
|
| 12 |
+
|
| 13 |
+
def fit(
|
| 14 |
+
self, df: pd.DataFrame, date_col: str, item_col: str, targe_col: str
|
| 15 |
+
) -> None:
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
def predict(self, n_steps: int) -> pd.DataFrame:
|
| 19 |
+
pass
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ChronosForecaster(ForecastingBaseModel):
|
| 23 |
+
def __init__(self, freq: str = "H"):
|
| 24 |
+
super().__init__(freq=freq)
|
| 25 |
+
|
| 26 |
+
def fit(
|
| 27 |
+
self, df: pd.DataFrame, date_col: str, item_col: str, target_col: str
|
| 28 |
+
) -> None:
|
| 29 |
+
self.item_id = item_col
|
| 30 |
+
df = df.copy()
|
| 31 |
+
df = df.rename(columns={target_col: "target"})
|
| 32 |
+
self.df = TimeSeriesDataFrame.from_data_frame(
|
| 33 |
+
df,
|
| 34 |
+
id_column=item_col,
|
| 35 |
+
timestamp_column=date_col,
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
def predict(self, n_steps):
|
| 39 |
+
self.model = TimeSeriesPredictor(
|
| 40 |
+
prediction_length=n_steps, freq=self.freq, verbosity=0
|
| 41 |
+
).fit(self.df, presets="bolt_base")
|
| 42 |
+
results = self.model.predict(self.df)
|
| 43 |
+
results = results.to_data_frame().reset_index()
|
| 44 |
+
results = results[["mean", "item_id", "timestamp"]]
|
| 45 |
+
results = results.rename(
|
| 46 |
+
columns={
|
| 47 |
+
"mean": "AWSChronosForecast",
|
| 48 |
+
"item_id": "unique_id",
|
| 49 |
+
"timestamp": "ds",
|
| 50 |
+
}
|
| 51 |
+
)
|
| 52 |
+
return results
|
src/chronos_conference/adapters/ts_plot.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import plotly.express as px
|
| 2 |
+
import pandas as pd
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def get_plot(df_story: pd.DataFrame, df_pred: pd.DataFrame):
|
| 6 |
+
if pd.api.types.is_datetime64_any_dtype(df_story["datetime"]) is False:
|
| 7 |
+
df_story["datetime"] = pd.to_datetime(df_story["datetime"])
|
| 8 |
+
|
| 9 |
+
if pd.api.types.is_datetime64_any_dtype(df_pred["ds"]) is False:
|
| 10 |
+
df_pred["ds"] = pd.to_datetime(df_pred["ds"])
|
| 11 |
+
|
| 12 |
+
fig = px.line(df_story, x="datetime", y="value", title="Historical Information")
|
| 13 |
+
fig.add_scatter(
|
| 14 |
+
x=df_pred["ds"], y=df_pred["AWSChronosForecast"], mode="lines", name="Forecast"
|
| 15 |
+
)
|
| 16 |
+
return fig
|
src/chronos_conference/domain/inference.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def get_forecast(
|
| 5 |
+
df: pd.DataFrame,
|
| 6 |
+
date_col: str,
|
| 7 |
+
target_col: str,
|
| 8 |
+
item_col: str,
|
| 9 |
+
model_instance,
|
| 10 |
+
horizon: int = 24,
|
| 11 |
+
) -> pd.DataFrame:
|
| 12 |
+
"""
|
| 13 |
+
This function is designed to generate forecasts using a pre-trained
|
| 14 |
+
TimeSeriesPredictor model from the AutoGluon library.
|
| 15 |
+
"""
|
| 16 |
+
df = df.copy()
|
| 17 |
+
df = df.rename(columns={target_col: "target"})
|
| 18 |
+
model_instance.fit(
|
| 19 |
+
df=df,
|
| 20 |
+
date_col=date_col,
|
| 21 |
+
item_col=item_col,
|
| 22 |
+
target_col=target_col,
|
| 23 |
+
)
|
| 24 |
+
results = model_instance.predict(n_steps=horizon)
|
| 25 |
+
return results
|
src/chronos_conference/service_layer/main.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: F403, F405
|
| 2 |
+
|
| 3 |
+
import streamlit as st
|
| 4 |
+
import pandas as pd
|
| 5 |
+
|
| 6 |
+
from chronos_conference.domain.inference import get_forecast
|
| 7 |
+
from chronos_conference.adapters.filter_ts import filter_ts
|
| 8 |
+
from chronos_conference.adapters.model_instance import ChronosForecaster
|
| 9 |
+
from chronos_conference.adapters.ts_plot import get_plot
|
| 10 |
+
from chronos_conference.settings import *
|
| 11 |
+
|
| 12 |
+
st.title("AWS Community Day Ecuador 2025")
|
| 13 |
+
st.header(
|
| 14 |
+
"Conferencia: Aprendiendo el Lenguaje de las series de tiempo con AWS Chronos Bolt"
|
| 15 |
+
)
|
| 16 |
+
st.subheader("Ponente: Sebastian Sarasti")
|
| 17 |
+
|
| 18 |
+
st.write(
|
| 19 |
+
"""
|
| 20 |
+
Esta aplicación demuestra cómo utilizar AWS Chronos Bolt para la predicción del clima mediante
|
| 21 |
+
datos abiertos obtenidos del INAMHI.
|
| 22 |
+
"""
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
df = pd.read_csv("../../../data/historical_simulation_9023624.csv")
|
| 26 |
+
|
| 27 |
+
col1, col2, col3 = st.columns(3)
|
| 28 |
+
|
| 29 |
+
with col1:
|
| 30 |
+
min_date = st.date_input("Fecha mínima", value=MIN_PRED_DATE)
|
| 31 |
+
|
| 32 |
+
with col2:
|
| 33 |
+
max_date = st.date_input("Fecha máxima", value=MAX_PRED_DATE)
|
| 34 |
+
|
| 35 |
+
with col3:
|
| 36 |
+
n_steps = st.number_input(
|
| 37 |
+
"Número de pasos a predecir",
|
| 38 |
+
min_value=MIN_PRED_DATE_LIMIT,
|
| 39 |
+
max_value=MAX_PRED_DATE_LIMIT,
|
| 40 |
+
value=N_PRED_STEPS,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
execution_button = st.button("Ejecutar modelo")
|
| 44 |
+
|
| 45 |
+
if not execution_button:
|
| 46 |
+
st.stop()
|
| 47 |
+
|
| 48 |
+
with st.spinner("Filtrando datos..."):
|
| 49 |
+
df_useful = filter_ts(
|
| 50 |
+
df,
|
| 51 |
+
date_col=HISTORICAL_DATE_COLUMN,
|
| 52 |
+
min_date=str(min_date),
|
| 53 |
+
max_date=str(max_date),
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
model = ChronosForecaster(freq=FREQUENCY)
|
| 57 |
+
|
| 58 |
+
with st.spinner("Modelo en ejecución..."):
|
| 59 |
+
results = get_forecast(
|
| 60 |
+
df=df_useful,
|
| 61 |
+
date_col=HISTORICAL_DATE_COLUMN,
|
| 62 |
+
target_col=HISTORICAL_TARGET_COLUMN,
|
| 63 |
+
item_col=HISTORICAL_ITEM_COLUMN,
|
| 64 |
+
model_instance=model,
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
st.success("¡Ejecución completada!")
|
| 68 |
+
|
| 69 |
+
st.write("Resultados de la ejecución:")
|
| 70 |
+
|
| 71 |
+
fig = get_plot(df_useful, results)
|
| 72 |
+
|
| 73 |
+
st.plotly_chart(fig, use_container_width=True)
|
src/chronos_conference/settings.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
HISTORICAL_DATE_COLUMN = "datetime"
|
| 2 |
+
HISTORICAL_ITEM_COLUMN = "unique_id"
|
| 3 |
+
HISTORICAL_TARGET_COLUMN = "value"
|
| 4 |
+
|
| 5 |
+
FREQUENCY = "D"
|
| 6 |
+
|
| 7 |
+
MIN_PRED_DATE = "2022-01-01"
|
| 8 |
+
MAX_PRED_DATE = "2023-06-30"
|
| 9 |
+
N_PRED_STEPS = 48
|
| 10 |
+
MIN_PRED_DATE_LIMIT = 1
|
| 11 |
+
MAX_PRED_DATE_LIMIT = 128
|