-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdataCrawl_to_DB.py
36 lines (28 loc) · 1.01 KB
/
dataCrawl_to_DB.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import pandas as pd
import mysql.connector
# Connect to the database
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="0705",
database="FashonApp"
)
mycursor = mydb.cursor()
# Load the CSV file into a pandas DataFrame
df = pd.read_csv("dataCrawl.csv", skiprows=1)
# Get the top 1000 rows and reset the index
df_top1000 = df.head(1000).reset_index(drop=True)
# Loop over the rows in the DataFrame
for _, row in df_top1000.iterrows():
# Prepare an insert statement
sql = """
INSERT INTO product_info (product_name, original_price, discounted_price, discount_rate, image_link, search_keyword, purchase_link)
VALUES (%s, %s, %s, %s, %s, %s, %s)
"""
val = (row["product_name"], row["original_price"], row["discounted_price"], row["discount_rate"], row["image_link"],
row["search_keyword"], row["purchase_link"])
# Execute the insert statement
mycursor.execute(sql, val)
# Commit the transaction
mydb.commit()
print(mycursor.rowcount, "record inserted.")