diff --git a/backend/routes/__pycache__/predictTomat.cpython-310.pyc b/backend/routes/__pycache__/predictTomat.cpython-310.pyc index 326c47ff..bf2d30a4 100644 Binary files a/backend/routes/__pycache__/predictTomat.cpython-310.pyc and b/backend/routes/__pycache__/predictTomat.cpython-310.pyc differ diff --git a/backend/routes/__pycache__/testingModel.cpython-310.pyc b/backend/routes/__pycache__/testingModel.cpython-310.pyc index bbd0e70d..71eefe5a 100644 Binary files a/backend/routes/__pycache__/testingModel.cpython-310.pyc and b/backend/routes/__pycache__/testingModel.cpython-310.pyc differ diff --git a/backend/routes/predictTomat.py b/backend/routes/predictTomat.py index 132cade7..b3e7431c 100644 --- a/backend/routes/predictTomat.py +++ b/backend/routes/predictTomat.py @@ -6,7 +6,7 @@ from sqlalchemy import select, insert, text, join, delete from sklearn.svm import SVR from sklearn.preprocessing import MinMaxScaler, StandardScaler from sklearn.model_selection import train_test_split -from sklearn.metrics import mean_absolute_error, mean_squared_error +from sklearn.metrics import mean_absolute_error, mean_squared_error, mean_absolute_percentage_error from config.db import get_db, conn from models.index import priceTomat, settingPredict, resultPredict from datetime import datetime, timedelta @@ -21,13 +21,28 @@ predict_router = APIRouter( @predict_router.get("/date") async def read_data(db: Session = Depends(get_db)): try: - query = text("SELECT tanggal FROM price_tomat ORDER BY tanggal DESC LIMIT 1;") - result = db.execute(query).fetchone() - - if result: - return {"tanggal": result[0]} - else: - return {"message": "No data found"} + query = text(""" + SELECT + (SELECT pt.tanggal + FROM predict.price_tomat AS pt + JOIN predict.result_predict AS rp ON pt.id = rp.id + ORDER BY rp.id ASC + LIMIT 1 OFFSET 30) AS tanggal_old, + + (SELECT tanggal + FROM predict.price_tomat + ORDER BY tanggal DESC + LIMIT 1) AS tanggal_new; + """) + result = db.execute(query).fetchone() + + if result: + return { + "tanggal_old": result[0], + "tanggal_new": result[1] + } + else: + return {"message": "No data found"} except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @@ -76,6 +91,15 @@ def predict_price(db: Session = Depends(get_db)): df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_2Hari_Lalu', 'RataRata_Sekarang']] ) + X = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_2Hari_Lalu']].values + y = df['RataRata_Sekarang'].values + ids = df['id'].values + tanggal = df['Tanggal'].values + + X_train, X_test, y_train, y_test, id_train, id_test, tanggal_train, tanggal_test = train_test_split( + X, y, ids, tanggal, test_size=0.2, shuffle=False + ) + # Ambil parameter model dari database kernel = settings.nama_kernel C = float(settings.nilai_c) if settings.nilai_c is not None else 1.0 @@ -110,41 +134,58 @@ def predict_price(db: Session = Depends(get_db)): X = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_2Hari_Lalu']].values y = df['RataRata_Sekarang'].values - # Latih model dengan semua data yang tersedia - svr.fit(X, y) + # # Latih model dengan semua data yang tersedia + # svr.fit(X, y) - # Prediksi harga untuk semua tanggal di masa depan - for i in range(len(df)): - fitur_input = X[i] - prediksi = svr.predict([fitur_input])[0] - hasil_prediksi.append(prediksi) + # # Prediksi harga untuk semua tanggal di masa depan + # for i in range(len(df)): + # fitur_input = X[i] + # prediksi = svr.predict([fitur_input])[0] + # hasil_prediksi.append(prediksi) + + # Latih model dengan data latih + svr.fit(X_train, y_train) + + # Prediksi untuk data uji + y_pred = svr.predict(X_test) # Evaluasi Model - mae = mean_absolute_error(y, hasil_prediksi) - rmse = np.sqrt(mean_squared_error(y, hasil_prediksi)) - mape = np.mean(np.abs((y - hasil_prediksi) / y)) * 100 - - # Simpan hasil ke database - for i in range(len(hasil_prediksi)): - id_tomat = df.iloc[i]['id'] # Ambil ID dari tabel price_tomat - prediksi_value = float(scaler.inverse_transform([[0, 0, 0, 0, 0, hasil_prediksi[i]]])[0][5]) + mae = mean_absolute_error(y_test, y_pred) + rmse = np.sqrt(mean_squared_error(y_test, y_pred)) + mape = mean_absolute_percentage_error(y_test, y_pred) + + jumlah_data_dikirim = 0 + + # Gabungkan hasil prediksi ke data uji + for i in range(len(y_pred)): + id_tomat = id_test[i] + tanggal_pred = tanggal_test[i] + hasil = y_pred[i] + hasil_asli = y_test[i] + + # Invers hasil prediksi + dummy_row = np.zeros((1, 6)) # [0, 0, 0, 0, 0, hasil_prediksi] + dummy_row[0][5] = hasil + prediksi_asli = float(scaler.inverse_transform(dummy_row)[0][5]) - # Perbarui atau masukkan data baru existing = db.execute(select(resultPredict).where(resultPredict.c.id == id_tomat)).fetchone() if existing: db.execute( resultPredict.update() .where(resultPredict.c.id == id_tomat) - .values(hasil_prediksi=prediksi_value) + .values(hasil_prediksi=prediksi_asli) ) else: - db.execute(insert(resultPredict).values(id=id_tomat, hasil_prediksi=prediksi_value)) + db.execute(insert(resultPredict).values(id=id_tomat, hasil_prediksi=prediksi_asli)) + + jumlah_data_dikirim += 1 db.commit() return { "Kernel": kernel, "Evaluasi": { "MAE": mae, "RMSE": rmse, "MAPE": mape }, + "Jumlah_data_dikirim": jumlah_data_dikirim, "Pesan": "Prediksi seluruh data berhasil disimpan ke database" } @@ -256,7 +297,7 @@ def get_price_history( # 10. Evaluasi Model mae = mean_absolute_error(y_test, y_pred) rmse = np.sqrt(mean_squared_error(y_test, y_pred)) - mape = np.mean(np.abs((y_test - y_pred) / y_test)) * 100 + mape = mape = mean_absolute_percentage_error(y_test, y_pred) # 11. Prediksi 30 Hari ke Depan @@ -311,7 +352,7 @@ def get_price_history( else: # Ambil data historis jika tanggal bukan yang terbaru - start_date = tanggal_input - timedelta(days=30) + start_date = tanggal_input - timedelta(days=29) end_date = tanggal_input query = ( diff --git a/backend/routes/testingModel.py b/backend/routes/testingModel.py index db20b4f2..e65c473d 100644 --- a/backend/routes/testingModel.py +++ b/backend/routes/testingModel.py @@ -123,36 +123,60 @@ def predict_price( rmse = np.sqrt(mean_squared_error(y_test, y_pred)) mape = mean_absolute_percentage_error(y_test, y_pred) - # Kembalikan skala data + # Kembalikan skala data df_prediksi = df.iloc[len(X_train):].copy() + df_prediksi['Tanggal'] = df_prediksi['Tanggal'].dt.date df_prediksi['Harga_Prediksi'] = y_pred - df_prediksi[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang', 'Harga_Prediksi']] = scaler.inverse_transform(df_prediksi[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang', 'Harga_Prediksi']]) - # Simpan hasil prediksi - hasil_prediksi = df_prediksi[['Tanggal', 'Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang', 'Harga_Prediksi']].to_dict(orient='records') + # Daftar kolom yang telah dinormalisasi + cols_scaled = ['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang'] - # Cek jumlah data bernilai 0 awal - zero_data_log = {"Sebelum Preprocessing": (df == 0).sum().to_dict()} + # Invers scaling satu per satu kolom berdasarkan mean dan std dari StandardScaler + feature_names = scaler.feature_names_in_.tolist() - # Konversi ke numerik dan hitung 0 setelah konversi - df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang']] = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang']].apply(pd.to_numeric, errors='coerce') - zero_data_log["Setelah Konversi Numerik"] = (df == 0).sum().to_dict() + # Invers Harga_Prediksi berdasarkan skala 'Harga_Sekarang' + index_harga_sekarang = feature_names.index('Harga_Sekarang') + mean_sekarang = scaler.mean_[index_harga_sekarang] + std_sekarang = np.sqrt(scaler.var_[index_harga_sekarang]) + df_prediksi['Harga_Prediksi'] = df_prediksi['Harga_Prediksi'] * std_sekarang + mean_sekarang - # Hapus NaN dan hitung 0 setelah drop NaN - df.dropna(inplace=True) - zero_data_log["Setelah Drop NaN Pertama"] = (df == 0).sum().to_dict() + df_prediksi['Harga_Prediksi'] = df_prediksi['Harga_Prediksi'].round(0) - # Konversi tanggal - df['Tanggal'] = pd.to_datetime(df['Tanggal'], errors='coerce') - zero_data_log["Setelah Konversi Tanggal"] = (df == 0).sum().to_dict() + # Invers kolom lain yang tersedia di df_prediksi + for kolom in cols_scaled: + if kolom in df_prediksi.columns: + index = feature_names.index(kolom) + mean = scaler.mean_[index] + std = np.sqrt(scaler.var_[index]) + df_prediksi[kolom] = df_prediksi[kolom] * std + mean - # Tambah fitur Harga_2Hari_Lalu - df['Harga_2Hari_Lalu'] = df['Harga_Kemarin'].shift(1) - zero_data_log["Setelah Tambah Harga_2Hari_Lalu"] = (df == 0).sum().to_dict() + # Export atau tampilkan hasil + hasil_prediksi = df_prediksi[['Tanggal', 'Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', + 'Harga_Kemarin', 'Harga_Sekarang', 'Harga_Prediksi']].to_dict(orient='records') - # Drop NaN setelah penambahan fitur dan hitung 0 - df.dropna(inplace=True) - zero_data_log["Setelah Drop NaN Kedua"] = (df == 0).sum().to_dict() + + # # Cek jumlah data bernilai 0 awal + # zero_data_log = {"Sebelum Preprocessing": (df == 0).sum().to_dict()} + + # # Konversi ke numerik dan hitung 0 setelah konversi + # df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang']] = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'Harga_Kemarin', 'Harga_Sekarang']].apply(pd.to_numeric, errors='coerce') + # zero_data_log["Setelah Konversi Numerik"] = (df == 0).sum().to_dict() + + # # Hapus NaN dan hitung 0 setelah drop NaN + # df.dropna(inplace=True) + # zero_data_log["Setelah Drop NaN Pertama"] = (df == 0).sum().to_dict() + + # # Konversi tanggal + # df['Tanggal'] = pd.to_datetime(df['Tanggal'], errors='coerce') + # zero_data_log["Setelah Konversi Tanggal"] = (df == 0).sum().to_dict() + + # # Tambah fitur Harga_2Hari_Lalu + # df['Harga_2Hari_Lalu'] = df['Harga_Kemarin'].shift(1) + # zero_data_log["Setelah Tambah Harga_2Hari_Lalu"] = (df == 0).sum().to_dict() + + # # Drop NaN setelah penambahan fitur dan hitung 0 + # df.dropna(inplace=True) + # zero_data_log["Setelah Drop NaN Kedua"] = (df == 0).sum().to_dict() @@ -164,7 +188,7 @@ def predict_price( "C": C, "Gamma": gamma, "Epsilon": epsilon, - "Zero_Data_Log": zero_data_log, + # "Zero_Data_Log": zero_data_log, "Data_Asli": data_asli, "Hasil_Preprocessing": hasil_preprocessing, "Hasil_Normalisasi": hasil_normalisasi, diff --git a/frontend/src/pages/LandingPage/ViewGrafik.jsx b/frontend/src/pages/LandingPage/ViewGrafik.jsx index 9e198070..0d588b8f 100644 --- a/frontend/src/pages/LandingPage/ViewGrafik.jsx +++ b/frontend/src/pages/LandingPage/ViewGrafik.jsx @@ -1,5 +1,5 @@ import React, { useState, useEffect } from "react"; -import { format, parseISO, isAfter, startOfDay } from "date-fns"; +import { format, parseISO, isBefore, isAfter, startOfDay } from "date-fns"; import { CalendarIcon } from "lucide-react"; import { cn } from "@/lib/utils"; import { Button } from "@/components/ui/button"; @@ -43,6 +43,8 @@ import { useToast } from '@/hooks/use-toast'; const ViewGrafik = ({ date, setDate, dataYAxis, setDataYAxis, priceType, setPriceType, chartData, setChartData, setTabelDataAktual, setTabelDataPredict }) => { const { toast } = useToast(); const [dateTerbaru, setDateTerbaru] = useState(null); + const [dateTerlama, setDateTerlama] = useState(null); + const oldestDate = dateTerlama ? parseISO(dateTerlama) : null; const latestDate = dateTerbaru ? parseISO(dateTerbaru) : null; const handleDateChange = (selectedDate) => setDate(selectedDate); @@ -52,8 +54,9 @@ const ViewGrafik = ({ date, setDate, dataYAxis, setDataYAxis, priceType, setPric try { const response = await axios.get(`${API_URL}/predict/date`); - console.log(response.data.tanggal) - setDateTerbaru(response.data.tanggal) + console.log(response.data.tanggal_old) + setDateTerbaru(response.data.tanggal_new) + setDateTerlama(response.data.tanggal_old) } catch (error) { console.error("Error fetching data", error); } @@ -119,7 +122,12 @@ const ViewGrafik = ({ date, setDate, dataYAxis, setDataYAxis, priceType, setPric onSelect={handleDateChange} initialFocus defaultMonth={latestDate} - disabled={(day) => latestDate && isAfter(startOfDay(day), latestDate)} + disabled={(day) => + // Nonaktifkan tanggal SETELAH tanggal terbaru + (latestDate && isAfter(startOfDay(day), latestDate)) || + // Nonaktifkan tanggal SEBELUM tanggal terlama + (oldestDate && isBefore(startOfDay(day), oldestDate)) + } />