TIF_E41210014/api.py

179 lines
5.3 KiB
Python

# from flask import Flask, request, jsonify
# from flask_cors import CORS
# import joblib
# app = Flask(__name__)
# CORS(app)
# # Load model dan vectorizer
# model = joblib.load('model_svm.pkl')
# vectorizer = joblib.load('tfidf_vectorizer.pkl')
# @app.route('/predict', methods=['POST'])
# def predict():
# try:
# data = request.json
# text = data.get("text", "")
# if not text:
# return jsonify({"error": "No text provided"}), 400
# # Transformasi teks
# text_tfidf = vectorizer.transform([text])
# prediction = model.predict(text_tfidf)
# return jsonify({"sentiment": int(prediction[0])})
# except Exception as e:
# return jsonify({"error": str(e)}), 500
# if __name__ == '__main__':
# app.run(host='0.0.0.0', port=5000)
# from flask import Flask, request, jsonify
# from flask_cors import CORS
# import joblib
# app = Flask(__name__)
# CORS(app)
# # Load model dan vectorizer
# model = joblib.load('model_svm.pkl')
# vectorizer = joblib.load('tfidf_vectorizer.pkl')
# # Mapping hasil prediksi ke label sentimen
# sentiment_labels = {
# 0: "netral",
# 1: "positif",
# 2: "negatif"
# }
# @app.route('/predict', methods=['POST'])
# def predict():
# try:
# data = request.get_json()
# if not data or "text" not in data:
# return jsonify({"error": "No text provided"}), 400
# # Transformasi teks
# text_tfidf = vectorizer.transform([data["text"]])
# prediction = model.predict(text_tfidf)
# # Konversi hasil prediksi ke label sentimen
# sentiment = sentiment_labels.get(int(prediction[0]), "unknown")
# return jsonify({"sentiment": sentiment})
# except Exception as e:
# return jsonify({"error": str(e)}), 500
# if __name__ == '__main__':
# app.run(host='0.0.0.0', port=5000, debug=True)
import mysql.connector
from flask import Flask, request, jsonify, render_template,redirect, url_for
from flask_cors import CORS
import joblib
app = Flask(__name__)
CORS(app)
# Load model dan vectorizer
model = joblib.load('model_svm.pkl')
vectorizer = joblib.load('tfidf_vectorizer.pkl')
# Mapping hasil prediksi ke label sentimen
sentiment_labels = {
0: "netral",
1: "positif",
-1: "negatif"
}
# Konfigurasi koneksi MySQL
db_config = {
"host": "localhost",
"user": "root", # Ganti dengan username MySQL kamu
"password": "", # Ganti dengan password MySQL kamu
"database": "db_tweet"
}
@app.route('/predict', methods=['POST'])
def predict():
try:
text = request.form.get('text')
if not text:
return jsonify({"error": "No text provided"}), 400
# Transformasi teks
text_tfidf = vectorizer.transform([text])
prediction = model.predict(text_tfidf)
# Konversi hasil prediksi ke label sentimen
sentiment = sentiment_labels.get(int(prediction[0]), "unknown")
conn = mysql.connector.connect(**db_config)
cursor = conn.cursor()
sql = "INSERT INTO `tweet`(`sentimen`, `klasifikasi`) VALUES ('"+text+"','"+sentiment+"');"
cursor.execute(sql)
conn.commit()
cursor.close()
conn.close()
# return render_template('index.html')
return redirect("/#analisis")
# return jsonify({"sentiment": sentiment})
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/', methods=['GET'])
# def landing():
# try:
# conn = mysql.connector.connect(**db_config)
# cursor = conn.cursor(dictionary=True)
# cursor.execute("SELECT id, sentimen, klasifikasi FROM tweet ORDER BY id DESC")
# tweets = cursor.fetchall()
# # Ambil data dari tabel preprocessing
# cursor.execute("""
# SELECT id, clean_text, tokens, normalized_tokens, filtered_tokens, stemmed_tokens
# FROM preprocessing
# ORDER BY id DESC
# """)
# preprocessing_data = cursor.fetchall()
# cursor.close()
# conn.close()
# return render_template('index.html', tweets=tweets)
# except Exception as e:
# print("Database Error:", e)
# return render_template('index.html', tweets=[], error="Gagal mengambil data dari database!")
def landing():
try:
conn = mysql.connector.connect(**db_config)
cursor = conn.cursor(dictionary=True)
# Ambil data dari tabel tweet
cursor.execute("SELECT id, sentimen, klasifikasi FROM tweet ORDER BY id DESC")
tweets = cursor.fetchall()
# Ambil data dari tabel preprocessing
cursor.execute("""
SELECT id, clean_text, tokens, normalized_tokens, filtered_tokens, stemmed_tokens
FROM preprocessing
ORDER BY id DESC
""")
preprocessing_data = cursor.fetchall()
cursor.close()
conn.close()
return render_template('index.html', tweets=tweets, preprocessing_data=preprocessing_data)
except mysql.connector.Error as err:
print(f"Error: {err}")
return "Terjadi kesalahan pada database", 500
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)