From 6d38369c556e23d93f24e157cd8fad6d28a72c0c Mon Sep 17 00:00:00 2001 From: muhamad fais aizat Date: Thu, 15 May 2025 13:21:06 +0700 Subject: [PATCH] fix: memperbaiki predict tomat, feact:menambahkan file vercel --- backend/config/__pycache__/db.cpython-310.pyc | Bin 576 -> 665 bytes backend/config/db.py | 2 +- backend/requirements.txt | Bin 0 -> 1508 bytes .../__pycache__/predictTomat.cpython-310.pyc | Bin 8840 -> 9103 bytes backend/routes/predictTomat.py | 250 ++++++++++-------- backend/vercel.json | 14 + frontend/src/pages/LandingPage/ViewGrafik.jsx | 2 +- 7 files changed, 162 insertions(+), 106 deletions(-) create mode 100644 backend/requirements.txt create mode 100644 backend/vercel.json diff --git a/backend/config/__pycache__/db.cpython-310.pyc b/backend/config/__pycache__/db.cpython-310.pyc index 5545742327f138684109e3eb8d698cd11b76c35a..659d3ced08a15fac6db50576a75dab3a540ff026 100644 GIT binary patch delta 161 zcmX@WGLw}rpO=@50SGQvsbPzk23^z}t(`lFE&$ zic_r&liae?QY;G6qRd>QG72&r0|FBrl2Vcr^NR{{(kzopED{Uy%<@e0b-_mH6{i-J pWhSQ<>m}!;mZcWyCg%>STjx-FP#d5dck6IDP;C delta 72 zcmbQqdVqy5pO=@50SLsm+)O{pIFWC@j&^Qkabb>jK_!^B($_D_&o6Pv$xlwq$;dA* Xu`)I`Fw-w6N=?a3F4=6vn9c|QJ_r}R diff --git a/backend/config/db.py b/backend/config/db.py index b294a1df..49a507c9 100644 --- a/backend/config/db.py +++ b/backend/config/db.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine, MetaData -engine = create_engine("mysql+pymysql://root@localhost:3306/predict") +engine = create_engine("mysql+pymysql://uupkixae2jbw2zse:1bFkfd8pfZ6EZhphAPQa@bdcaorplf9ct8apn6n5o-mysql.services.clever-cloud.com:3306/bdcaorplf9ct8apn6n5o") meta = MetaData() conn = engine.connect() diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..082788b7c033e439f6664f2cc398507d3453d5ec GIT binary patch literal 1508 zcmZvcOK;Oq5QS%r#7}7zrwx<^3$|5}Kp?S z(Tg!!Mx(ki9&(JTB3IM3RKG^2RsXHdfJ^^(8JCPCX1h2lYcCXX=m0{|=RB)ZBj)Yc zp@`)euT;N|lMtG?6Bjb+LIx_Wuu?{;$X?m#Oz}(oz}@F;FsWE(%sa6?D08D|Jwq*G zrJSoi$J}9$PWy_smDCNmpj3KN=4A)1ZP0fwoy81;4(42IZmIXpHGcnFc(>|T>+j$f zRp$Pn_fI;9%<56t_6>z(nLYF9q>lM?vt9$vz%I$5t@1n>JH&LD?@sSgM)af}a2d3% za>iaL?8ZiJ*V%JVW@1Z5l1}^{rUr@;aKFhMpL(UJ;MfU}sbz=Q!l6TvC+{75nX^_N zJ2`F1GT#Kd_eCAA<41gd(aXC?^s~;l_!>7oBlB?_EEKg;GwH<>G)*~O+>1lft%N-{ z^q3XcLvlZ4jpypNQ?G8OcV5fvn84%6g^hm=XDfszW0;>e(f0#(lb>Du>9sHPo_%+} zgM6zs%UWH|)T_!m(}>K@&ol9Yd3a*zW(;%^4!s*=El~tjrP$hrY``us#T9+uZi2^|Hz@)f&c&j literal 0 HcmV?d00001 diff --git a/backend/routes/__pycache__/predictTomat.cpython-310.pyc b/backend/routes/__pycache__/predictTomat.cpython-310.pyc index 44df38d6d4b42afd910cc38efe59673b1edeea62..ab607ae7436152e03ba8d77c920e977d267044e8 100644 GIT binary patch delta 1268 zcmXw&O>7%Q6vyYyuAR*~ahxxwcIsW{%ZU@`gQheUg#&Fvg+M*PArMqmPR0<&mSe^n zBFrp7m;(r=fRmVV#h5 zS`My5`A%g1j zcShG#x6{vBM2847YfW<_g_LM#c8;Q+M8!s>6Ser7j+%v9PhC4MgN-pbBU6o!tv*9s z8}GO?;-o~h0u?w-&W*c0A|!eu#O)Q)`r7A=vH{U6`YIX_r@cDH5;bu9MW5(rjS<2I z?w{_42sufc&QLMq@62)jCgc>`={gVq4ZIy_s%T1jD7C=jt~W3OM-$9j(%tFUXz z9mggjZldm|)gv`hZFK0Oxi~2jEF2*@EVUR#dlJ z*6`H`)@&Eu;;@LZ)P^Q3Hp<2}^cgZYLonc|3V598#JP8!K5qbV2Z5AZQ8}g*>~~xY zULu>=AdngIBw-lw;$Xpdu&%%gdhga@Sn004LXs2StoateY44xF5AakuRC3bXi5Gc> zW#^sjQXxsXg~#lcPXUh_LB~;Cc7hA(QZ|3S9+@j;%i$u=7MwUsJLz~XQ_AF*`SL=# z!1F2xcwXLWej!k9nC5(m-xp6#c~1wQ0AD>xUT=vW+3}}~#Y};5Lsj=L7qL?2U@PBz zeyNn<2_+bODW9K9&*zdu@~@WLkwCV9l5<||yAwFbsz1nILSrUZzX%^j$d1-N<0~`_ z+6MB0)`heBAss^0pawL7sWm|ubd3UqrajZ=ry(#j9sJM+2Izaneuv+nR5MYhVB9eS zp*sdd!FY-8Q5vRvOa9dQ5L}Vbwv+Iwcdl)u0siw|i_8K%Du0je!nXXa`{Ai8s+-Rg zK1bjrPbm9)L|L0iO(cE%hH`wP@U_Bqg>SvN!z~c@!g zctEJV0}@^U3sdOjDFWCOQTh-9O+;u_Rh7D6hq|GQRNYkFw5rqvyS52E$7uyKns1K& z|G!sf&fJe)JGC$HetWx*;dNjxI)7x>{lJn2_tY2Kw2>EuIcMx+eb`*IH)=Jkuly%_ zKkLgdl)-f-K2!Kqu4{}*R%Z@4yb2AdYYpat1iM##XS~rl>G%Z~n$T|opeI=oX9l5P06H8iMB8kgRd}b+o+Gp zR0B4YFsp)_em2I@a&U@^;2r%X4 z{f}U4mfLaskH57LbVL?b1M{<$!os`F15m0IFfxILy+DYolYtL;cAMAU@eZhk;O6e8 zxoR9<6thm=rh}2qs0YEP>if=|Z`I5nEmWLB`91YlXWF=kZWmXXuKBT`W`YSm%@4hh z;0EYmKme()8_zZyer|HE`GDhX{A1r%KXttdcT_602R`-kp^;Yj!8;e30Z6Go`qyAt z{T_XN@(wll8R2v4g4-`s{SD#+ZG3b*+hRW?;VZ%e!V2MQuNdoq&|Rv}AXbgp#l?JC z*yq&gcz%$U+slMO!Vn@mv9a@)B*X|0Rkt;;{XCV5aDhM{YhNN-*-unDJqWwh Z{&cR_bbUOC)6~~+de1oL>s$-2{R^7A2crN0 diff --git a/backend/routes/predictTomat.py b/backend/routes/predictTomat.py index 0a114d3e..5489d320 100644 --- a/backend/routes/predictTomat.py +++ b/backend/routes/predictTomat.py @@ -24,13 +24,13 @@ async def read_data(db: Session = Depends(get_db)): query = text(""" SELECT (SELECT pt.tanggal - FROM predict.price_tomat AS pt - JOIN predict.result_predict AS rp ON pt.id = rp.id + FROM price_tomat AS pt + JOIN result_predict AS rp ON pt.id = rp.id ORDER BY rp.id ASC LIMIT 1 OFFSET 29) AS tanggal_old, (SELECT tanggal - FROM predict.price_tomat + FROM price_tomat ORDER BY tanggal DESC LIMIT 1) AS tanggal_new; """) @@ -50,125 +50,167 @@ async def read_data(db: Session = Depends(get_db)): @predict_router.get("/price", response_model=dict, dependencies=[Depends(verify_token)]) def predict_price(db: Session = Depends(get_db)): - existing_data = db.execute(select(resultPredict)).fetchone() - if existing_data: - db.execute(delete(resultPredict)) # Hapus semua data lama - db.commit() - # Ambil data dari database - data = db.execute(select(priceTomat)).fetchall() - settings = db.execute(select(settingPredict).where(settingPredict.c.status == True)).fetchone() - - if not data: - raise HTTPException(status_code=404, detail="Data harga tomat tidak ditemukan") - if not settings: - raise HTTPException(status_code=400, detail="Tidak ada konfigurasi prediksi yang aktif") - - # Konversi data ke DataFrame - df = pd.DataFrame([{ - "id": item.id, # Simpan ID - "Tanggal": item.tanggal, - "Pasar_Bandung": item.pasar_bandung, - "Pasar_Ngunut": item.pasar_ngunut, - "Pasar_Ngemplak": item.pasar_ngemplak, - "RataRata_Kemarin": item.ratarata_kemarin, - "RataRata_Sekarang": item.ratarata_sekarang - } for item in data]) - - if df.shape[0] < 3: - raise HTTPException(status_code=400, detail="Data tidak cukup untuk melakukan prediksi") - - # Preprocessing data - kolom_numerik = ['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang'] - df[kolom_numerik] = df[kolom_numerik].apply(pd.to_numeric, errors='coerce') - df[kolom_numerik] = df[kolom_numerik].replace(0, np.nan) - # Interpolasi nilai kosong (0 yang sudah jadi NaN) - df[kolom_numerik] = df[kolom_numerik].interpolate(method='linear', limit_direction='both') - # Drop jika masih ada NaN (misalnya di ujung data) - df.dropna(inplace=True) + try: + existing_data = db.execute(select(resultPredict)).fetchone() + if existing_data: + db.execute(delete(resultPredict)) # Hapus semua data lama + db.commit() + # Ambil data dari database + data = db.execute(select(priceTomat)).fetchall() + settings = db.execute(select(settingPredict).where(settingPredict.c.status == True)).fetchone() + + if not data: + raise HTTPException(status_code=404, detail="Data harga tomat tidak ditemukan") + if not settings: + raise HTTPException(status_code=400, detail="Tidak ada konfigurasi prediksi yang aktif") + + # Konversi data ke DataFrame + df = pd.DataFrame([{ + "id": item.id, # Simpan ID + "Tanggal": item.tanggal, + "Pasar_Bandung": item.pasar_bandung, + "Pasar_Ngunut": item.pasar_ngunut, + "Pasar_Ngemplak": item.pasar_ngemplak, + "RataRata_Kemarin": item.ratarata_kemarin, + "RataRata_Sekarang": item.ratarata_sekarang + } for item in data]) + + if df.shape[0] < 3: + raise HTTPException(status_code=400, detail="Data tidak cukup untuk melakukan prediksi") + + # Preprocessing data + kolom_numerik = ['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang'] + df[kolom_numerik] = df[kolom_numerik].apply(pd.to_numeric, errors='coerce') + df[kolom_numerik] = df[kolom_numerik].replace(0, np.nan) + # Interpolasi nilai kosong (0 yang sudah jadi NaN) + df[kolom_numerik] = df[kolom_numerik].interpolate(method='linear', limit_direction='both') + # Drop jika masih ada NaN (misalnya di ujung data) + df.dropna(inplace=True) - df['Tanggal'] = pd.to_datetime(df['Tanggal']) - # df['Harga_2Hari_Lalu'] = df['Harga_Kemarin'].shift(1) - df.dropna(inplace=True) - - # Normalisasi Data - scaler = StandardScaler() - df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang']] = scaler.fit_transform( - df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang']] - ) + df['Tanggal'] = pd.to_datetime(df['Tanggal']) + # df['Harga_2Hari_Lalu'] = df['Harga_Kemarin'].shift(1) + df.dropna(inplace=True) + + # Normalisasi Data + scaler = StandardScaler() + df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang']] = scaler.fit_transform( + df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', 'RataRata_Sekarang']] + ) - X = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', ]].values - y = df['RataRata_Sekarang'].values - ids = df['id'].values - tanggal = df['Tanggal'].values + X = df[['Pasar_Bandung', 'Pasar_Ngunut', 'Pasar_Ngemplak', 'RataRata_Kemarin', ]].values + y = df['RataRata_Sekarang'].values + ids = df['id'].values + tanggal = df['Tanggal'].values - X_train, X_test, y_train, y_test, id_train, id_test, tanggal_train, tanggal_test = train_test_split( - X, y, ids, tanggal, test_size=0.2, shuffle=False - ) + X_train, X_test, y_train, y_test, id_train, id_test, tanggal_train, tanggal_test = train_test_split( + X, y, ids, tanggal, test_size=0.2, shuffle=False + ) - # Ambil parameter model dari database - kernel = settings.nama_kernel - C = float(settings.nilai_c) if settings.nilai_c is not None else 1.0 - gamma = float(settings.nilai_gamma) if settings.nilai_gamma not in [None, "auto", "scale"] else settings.nilai_gamma - epsilon = float(settings.nilai_epsilon) if settings.nilai_epsilon is not None else 0.1 - degree = int(settings.nilai_degree) if settings.nilai_degree is not None else 3 - coef0 = float(settings.nilai_coef) if settings.nilai_coef is not None else 0.0 + # Ambil parameter model dari database + kernel = settings.nama_kernel + C = float(settings.nilai_c) if settings.nilai_c is not None else 1.0 + gamma = float(settings.nilai_gamma) if settings.nilai_gamma not in [None, "auto", "scale"] else settings.nilai_gamma + epsilon = float(settings.nilai_epsilon) if settings.nilai_epsilon is not None else 0.1 + degree = int(settings.nilai_degree) if settings.nilai_degree is not None else 3 + coef0 = float(settings.nilai_coef) if settings.nilai_coef is not None else 0.0 - if kernel == "linear": - svr = SVR(kernel="linear", C=C, epsilon=epsilon) + if kernel == "linear": + svr = SVR(kernel="linear", C=C, epsilon=epsilon) - elif kernel == "rbf": - svr = SVR(kernel="rbf", C=C, gamma=gamma, epsilon=epsilon) + elif kernel == "rbf": + svr = SVR(kernel="rbf", C=C, gamma=gamma, epsilon=epsilon) - elif kernel == "sigmoid": - svr = SVR(kernel="sigmoid", C=C, gamma=gamma, coef0=coef0, epsilon=epsilon) + elif kernel == "sigmoid": + svr = SVR(kernel="sigmoid", C=C, gamma=gamma, coef0=coef0, epsilon=epsilon) - elif kernel == "poly": - svr = SVR(kernel="poly", C=C, gamma=gamma, coef0=coef0, degree=degree, epsilon=epsilon) - + elif kernel == "poly": + svr = SVR(kernel="poly", C=C, gamma=gamma, coef0=coef0, degree=degree, epsilon=epsilon) + - # Latih model dengan data latih - svr.fit(X_train, y_train) + # Latih model dengan data latih + svr.fit(X_train, y_train) - # Prediksi untuk data uji - y_pred = svr.predict(X_test) - - # Evaluasi Model - mae = mean_absolute_error(y_test, y_pred) - rmse = np.sqrt(mean_squared_error(y_test, y_pred)) - mape = mean_absolute_percentage_error(y_test, y_pred) + # Prediksi untuk data uji + y_pred = svr.predict(X_test) + + # Evaluasi Model + mae = mean_absolute_error(y_test, y_pred) + rmse = np.sqrt(mean_squared_error(y_test, y_pred)) + mape = mean_absolute_percentage_error(y_test, y_pred) - jumlah_data_dikirim = 0 - - # Gabungkan hasil prediksi ke data uji - for i in range(len(y_pred)): - id_tomat = id_test[i] - hasil = y_pred[i] + jumlah_data_dikirim = 0 + + # Gabungkan hasil prediksi ke data uji + # for i in range(len(y_pred)): + # id_tomat = id_test[i] + # hasil = y_pred[i] - # Invers hasil prediksi - dummy_row = np.zeros((1, 5)) # [0, 0, 0, 0, 0, hasil_prediksi] - dummy_row[0][4] = hasil - prediksi_asli = float(scaler.inverse_transform(dummy_row)[0][4]) + # # Invers hasil prediksi + # dummy_row = np.zeros((1, 5)) # [0, 0, 0, 0, 0, hasil_prediksi] + # dummy_row[0][4] = hasil + # prediksi_asli = float(scaler.inverse_transform(dummy_row)[0][4]) - existing = db.execute(select(resultPredict).where(resultPredict.c.id == id_tomat)).fetchone() - if existing: + # existing = db.execute(select(resultPredict).where(resultPredict.c.id == id_tomat)).fetchone() + # if existing: + # db.execute( + # resultPredict.update() + # .where(resultPredict.c.id == id_tomat) + # .values(hasil_prediksi=prediksi_asli) + # ) + # else: + # db.execute(insert(resultPredict).values(id=id_tomat, hasil_prediksi=prediksi_asli)) + + # jumlah_data_dikirim += 1 + + insert_data = [] + update_data = [] + + for i in range(len(y_pred)): + + id_tomat = id_test[i] + hasil = y_pred[i] + print("start prediction", id_tomat, hasil) + + # Invers hasil prediksi + dummy_row = np.zeros((1, 5)) + dummy_row[0][4] = hasil + prediksi_asli = float(scaler.inverse_transform(dummy_row)[0][4]) + + + insert_data.append({ + "id": id_tomat, + "hasil_prediksi": prediksi_asli + }) + + print("print data keseluruhan", insert_data) + # Bulk insert + if insert_data: + db.execute(insert(resultPredict), insert_data) + + # Bulk update (looped, karena SQLAlchemy core tidak punya bulk update langsung) + for item in update_data: db.execute( resultPredict.update() - .where(resultPredict.c.id == id_tomat) - .values(hasil_prediksi=prediksi_asli) + .where(resultPredict.c.id == item["id"]) + .values(hasil_prediksi=item["hasil_prediksi"]) ) - else: - db.execute(insert(resultPredict).values(id=id_tomat, hasil_prediksi=prediksi_asli)) - jumlah_data_dikirim += 1 + jumlah_data_dikirim += len(insert_data) + len(update_data) - db.commit() - - return { - "Kernel": kernel, - "Evaluasi": { "MAE": mae, "RMSE": rmse, "MAPE": mape }, - "Jumlah_data_dikirim": jumlah_data_dikirim, - "Pesan": "Prediksi seluruh data berhasil disimpan ke database" - } + db.commit() + + return { + "Kernel": kernel, + "Evaluasi": { "MAE": mae, "RMSE": rmse, "MAPE": mape }, + "Jumlah_data_dikirim": jumlah_data_dikirim, + "Pesan": "Prediksi seluruh data berhasil disimpan ke database" + } + + except Exception as e: + db.rollback() # penting! + print(f"Error: {e}") + finally: + db.close() diff --git a/backend/vercel.json b/backend/vercel.json new file mode 100644 index 00000000..34ce48d5 --- /dev/null +++ b/backend/vercel.json @@ -0,0 +1,14 @@ +{ + "builds":[ + { + "src": "index.py", + "use": "@vercel/python" + } + ], + "routes": [ + { + "src": "/(.*)", + "dest": "index.py" + } + ] +} \ No newline at end of file diff --git a/frontend/src/pages/LandingPage/ViewGrafik.jsx b/frontend/src/pages/LandingPage/ViewGrafik.jsx index 5f283ff7..abff078d 100644 --- a/frontend/src/pages/LandingPage/ViewGrafik.jsx +++ b/frontend/src/pages/LandingPage/ViewGrafik.jsx @@ -71,7 +71,7 @@ const ViewGrafik = ({ date, setDate, dataYAxis, setDataYAxis, priceType, setPric - if (!!date && !!tempPriceType) { + if (!date && !tempPriceType) { setTabelDataAktual([]); setTabelDataPredict([]);