This commit is contained in:
commit
b2810f3d10
|
@ -0,0 +1,34 @@
|
||||||
|
# Folder virtual environment (bisa beda nama, contoh: venv, env, .venv, dll)
|
||||||
|
venv/
|
||||||
|
env/
|
||||||
|
.venv/
|
||||||
|
|
||||||
|
# File Python cache dan artefak
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# VSCode settings (kalau pakai VSCode)
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
# File database sementara
|
||||||
|
*.sqlite3
|
||||||
|
|
||||||
|
# Jupyter Notebook checkpoint
|
||||||
|
.ipynb_checkpoints/
|
||||||
|
|
||||||
|
# Log files
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# macOS stuff
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# Python environment config (jika ada)
|
||||||
|
.Python
|
||||||
|
|
||||||
|
# dotenv
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
|
||||||
|
# Compiled Cython files
|
||||||
|
*.so
|
|
@ -0,0 +1,26 @@
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from routes.predict_file import router as predict_file_router
|
||||||
|
from routes.predict_json import router as predict_json_router
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Register API Router
|
||||||
|
app.include_router(predict_file_router, prefix="/api")
|
||||||
|
app.include_router(predict_json_router, prefix="/api")
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {"message": "Welcome to ARIMA Prediction API (JSON version)"}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=6000)
|
|
@ -0,0 +1,32 @@
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.9.0
|
||||||
|
click==8.2.0
|
||||||
|
colorama==0.4.6
|
||||||
|
Cython==3.1.0
|
||||||
|
exceptiongroup==1.3.0
|
||||||
|
fastapi==0.115.12
|
||||||
|
h11==0.16.0
|
||||||
|
idna==3.10
|
||||||
|
joblib==1.5.0
|
||||||
|
numpy==1.23.5
|
||||||
|
packaging==25.0
|
||||||
|
pandas==2.2.3
|
||||||
|
patsy==1.0.1
|
||||||
|
pmdarima==2.0.4
|
||||||
|
pydantic==2.11.4
|
||||||
|
pydantic_core==2.33.2
|
||||||
|
python-dateutil==2.9.0.post0
|
||||||
|
python-multipart==0.0.20
|
||||||
|
pytz==2025.2
|
||||||
|
scikit-learn==1.6.1
|
||||||
|
scipy==1.15.3
|
||||||
|
six==1.17.0
|
||||||
|
sniffio==1.3.1
|
||||||
|
starlette==0.46.2
|
||||||
|
statsmodels==0.14.4
|
||||||
|
threadpoolctl==3.6.0
|
||||||
|
typing-inspection==0.4.0
|
||||||
|
typing_extensions==4.13.2
|
||||||
|
tzdata==2025.2
|
||||||
|
urllib3==2.4.0
|
||||||
|
uvicorn==0.34.2
|
|
@ -0,0 +1,72 @@
|
||||||
|
from fastapi import APIRouter, File, UploadFile, Form, HTTPException
|
||||||
|
from typing import List, Literal
|
||||||
|
import pandas as pd
|
||||||
|
import io
|
||||||
|
|
||||||
|
from services.forecastService import forecast_arima_per_product
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/predict-file")
|
||||||
|
async def predict(
|
||||||
|
sheet: UploadFile = File(...),
|
||||||
|
# recordPeriod: Literal["daily", "weekly", "monthly"] = Form(...),
|
||||||
|
predictionPeriod: Literal["weekly", "monthly"] = Form(...),
|
||||||
|
predictionMode: Literal["auto", "optimal", "custom"] = Form(...),
|
||||||
|
arimaModel: str = Form("")
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# Parse model
|
||||||
|
model_values: List[int] = []
|
||||||
|
if predictionMode == "custom":
|
||||||
|
if not arimaModel:
|
||||||
|
raise HTTPException(status_code=400, detail="arimaModel harus diisi saat predictionMode adalah 'custom'")
|
||||||
|
try:
|
||||||
|
model_values = list(map(int, arimaModel.split(",")))
|
||||||
|
if len(model_values) != 3:
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(status_code=400, detail="Format arimaModel harus 'p,d,q'.")
|
||||||
|
|
||||||
|
# Baca file
|
||||||
|
content = await sheet.read()
|
||||||
|
df = pd.read_csv(io.BytesIO(content)) if sheet.filename.endswith(".csv") else pd.read_excel(io.BytesIO(content))
|
||||||
|
if df.empty:
|
||||||
|
raise HTTPException(status_code=400, detail="File tidak berisi data.")
|
||||||
|
|
||||||
|
# Validasi kolom
|
||||||
|
if 'product_code' not in df.columns and 'product_name' not in df.columns:
|
||||||
|
raise HTTPException(status_code=400, detail="Data harus memiliki kolom 'product_code' atau 'product_name'.")
|
||||||
|
if 'date' not in df.columns or 'sold(qty)' not in df.columns:
|
||||||
|
raise HTTPException(status_code=400, detail="Data harus memiliki kolom 'date' dan 'sold(qty)'.")
|
||||||
|
|
||||||
|
product_column = 'product_name' if 'product_name' in df.columns else 'product_code'
|
||||||
|
df['date'] = pd.to_datetime(df['date'])
|
||||||
|
df = df.sort_values(by=[product_column, 'date'])
|
||||||
|
|
||||||
|
freq_map = {"daily": "D", "weekly": "W", "monthly": "M"}
|
||||||
|
horizon = 3
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for product, group in df.groupby(product_column):
|
||||||
|
try:
|
||||||
|
result = forecast_arima_per_product(group, freq_map[predictionPeriod], predictionMode, model_values, horizon)
|
||||||
|
forecast = result["forecast"]
|
||||||
|
results.append({
|
||||||
|
"predictionPeriod":predictionPeriod,
|
||||||
|
"product": product,
|
||||||
|
"order": ",".join(map(str, result["model_params"])),
|
||||||
|
"phase1": forecast[0] if len(forecast) > 0 else None,
|
||||||
|
"phase2": forecast[1] if len(forecast) > 1 else None,
|
||||||
|
"phase3": forecast[2] if len(forecast) > 2 else None,
|
||||||
|
})
|
||||||
|
except Exception as model_err:
|
||||||
|
results.append({
|
||||||
|
"product": product,
|
||||||
|
"error": str(model_err)
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"status": "success", "data": results}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan saat memproses file: {str(e)}")
|
|
@ -0,0 +1,80 @@
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List, Optional, Literal
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from statsmodels.tsa.arima.model import ARIMA
|
||||||
|
from statsmodels.tsa.stattools import adfuller, acf, pacf
|
||||||
|
from sklearn.metrics import mean_squared_error
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
class TimeSeriesData(BaseModel):
|
||||||
|
date: List[str]
|
||||||
|
value: List[float]
|
||||||
|
|
||||||
|
class PredictionRequest(BaseModel):
|
||||||
|
data: TimeSeriesData
|
||||||
|
model: Literal['optimal', 'custom', 'auto'] = "auto"
|
||||||
|
forecast_step: int
|
||||||
|
order: Optional[List[int]] = None
|
||||||
|
|
||||||
|
def determine_d(series):
|
||||||
|
""" Menentukan jumlah differencing (d) berdasarkan uji Augmented Dickey-Fuller """
|
||||||
|
d = 0
|
||||||
|
while adfuller(series)[1] > 0.05 and d < 2:
|
||||||
|
series = series.diff().dropna()
|
||||||
|
d += 1
|
||||||
|
return d
|
||||||
|
|
||||||
|
def determine_p_q(series):
|
||||||
|
""" Menentukan p dan q berdasarkan ACF dan PACF """
|
||||||
|
acf_vals = acf(series.dropna(), nlags=10)
|
||||||
|
pacf_vals = pacf(series.dropna(), nlags=10)
|
||||||
|
p = next((i for i, v in enumerate(pacf_vals[1:], start=1) if abs(v) > 0.2), 1)
|
||||||
|
q = next((i for i, v in enumerate(acf_vals[1:], start=1) if abs(v) > 0.2), 1)
|
||||||
|
return p, q
|
||||||
|
|
||||||
|
@router.post("/predict-json")
|
||||||
|
async def predict_json(request: PredictionRequest):
|
||||||
|
if len(request.data.date) != len(request.data.value):
|
||||||
|
raise HTTPException(status_code=400, detail="Date and value lists must have the same length.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
df = pd.DataFrame({"date": pd.to_datetime(request.data.date), "value": request.data.value})
|
||||||
|
df = df.dropna().sort_values(by="date").set_index("date")
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid data format: {str(e)}")
|
||||||
|
|
||||||
|
if len(df) < 60:
|
||||||
|
raise HTTPException(status_code=400, detail="Insufficient data: At least 60 records required.")
|
||||||
|
|
||||||
|
train_size = int(len(df) * 0.7)
|
||||||
|
train, test = df[:train_size], df[train_size:]
|
||||||
|
|
||||||
|
if request.model == "auto":
|
||||||
|
d = determine_d(train["value"])
|
||||||
|
p, q = determine_p_q(train["value"])
|
||||||
|
elif request.model == "optimal":
|
||||||
|
p, d, q = 2, 1, 2
|
||||||
|
elif request.model == "custom":
|
||||||
|
if not request.order or len(request.order) != 3:
|
||||||
|
raise HTTPException(status_code=400, detail="Custom model requires an array of [p, d, q].")
|
||||||
|
p, d, q = request.order
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid model type. Choose 'auto', 'optimal', or 'custom'.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
arima_model = ARIMA(train["value"], order=(p, d, q))
|
||||||
|
model_fit = arima_model.fit()
|
||||||
|
predictions = model_fit.forecast(steps=len(test)).tolist()
|
||||||
|
rmse = np.sqrt(mean_squared_error(test["value"], predictions))
|
||||||
|
future_forecast = model_fit.forecast(steps=request.forecast_step).tolist()
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Model training error: {str(e)}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"arima_order": [p, d, q],
|
||||||
|
"rmse": rmse,
|
||||||
|
"forecast": future_forecast
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
from statsmodels.tsa.arima.model import ARIMA
|
||||||
|
from pmdarima import auto_arima
|
||||||
|
from statsmodels.tsa.stattools import adfuller
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
def forecast_arima_per_product(group: pd.DataFrame, freq: str, mode: str, arima_order: list[int], horizon: int):
|
||||||
|
group = group.set_index('date')
|
||||||
|
df_resampled = group.resample(freq).sum().dropna()
|
||||||
|
series = df_resampled['sold(qty)']
|
||||||
|
|
||||||
|
if adfuller(series)[1] > 0.05:
|
||||||
|
series = series.diff().dropna()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if mode == "auto":
|
||||||
|
model = auto_arima(
|
||||||
|
series,
|
||||||
|
start_p=0, start_q=0,
|
||||||
|
max_p=5, max_q=5,
|
||||||
|
d=None,
|
||||||
|
seasonal=False,
|
||||||
|
stepwise=True,
|
||||||
|
suppress_warnings=True,
|
||||||
|
error_action="ignore"
|
||||||
|
)
|
||||||
|
forecast = model.predict(n_periods=horizon)
|
||||||
|
return {
|
||||||
|
"forecast": forecast.tolist(),
|
||||||
|
"model_params": model.order
|
||||||
|
}
|
||||||
|
|
||||||
|
elif mode == "optimal":
|
||||||
|
model_order = (2, 1, 2)
|
||||||
|
model = ARIMA(series, order=model_order)
|
||||||
|
model_fit = model.fit()
|
||||||
|
forecast = model_fit.forecast(steps=horizon)
|
||||||
|
return {
|
||||||
|
"forecast": forecast.tolist(),
|
||||||
|
"model_params": model_order
|
||||||
|
}
|
||||||
|
|
||||||
|
elif mode == "custom":
|
||||||
|
if len(arima_order) != 3:
|
||||||
|
raise ValueError("Parameter ARIMA harus 3 angka: p,d,q.")
|
||||||
|
model = ARIMA(series, order=tuple(arima_order))
|
||||||
|
model_fit = model.fit()
|
||||||
|
forecast = model_fit.forecast(steps=horizon)
|
||||||
|
return {
|
||||||
|
"forecast": forecast.tolist(),
|
||||||
|
"model_params": arima_order
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError("Mode prediksi tidak valid.")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(f"Model ARIMA gagal dibentuk: {str(e)}")
|
Loading…
Reference in New Issue