Containerized MLflow tracking server with persistent storage.
# Build image
sh scripts/build_image.sh
# Stop and Build image
uv run scripts/stop_container.py && sh scripts/build_image.sh
# Start container (port 5500)
uv run scripts/run_container.py
# Configure shell
source mlflow_env.shDirect logging:
uv run scripts/test_mlflow_project.pyDecorator:
from mlflow_utils import mlflow_track
@mlflow_track(experiment_name="My_Experiment")
def train():
return {"params": {...}, "metrics": {...}}source mlflow_env.sh && uv run scripts/test_decorator_example.py
OR
source mlflow_env.sh && python scripts/test_decorator_example.py# Stop container
uv run scripts/stop_container.py
OR
python scripts/stop_container.py
# Check port
lsof -i :5500- MLflow version:
pyproject.toml - Port/paths:
mlflow.env - Data:
~/mlflow(default)