Add faircot_automation.py
This commit is contained in:
167
faircot_automation.py
Normal file
167
faircot_automation.py
Normal file
@@ -0,0 +1,167 @@
|
||||
import pandas as pd
|
||||
import pyodbc
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.types import Date, VARCHAR
|
||||
|
||||
# =========================
|
||||
# Connexion SQL Server
|
||||
# =========================
|
||||
sql_server_conn_str = (
|
||||
"DRIVER={ODBC Driver 18 for SQL Server};"
|
||||
"SERVER=VPS88.DATACENTER.CSTI;"
|
||||
"DATABASE=FAIRCOT-Test;"
|
||||
"UID=SINGA_META;"
|
||||
"PWD=Start.123;"
|
||||
"TrustServerCertificate=yes;"
|
||||
)
|
||||
|
||||
sql_conn = pyodbc.connect(sql_server_conn_str)
|
||||
|
||||
# =========================
|
||||
# Lecture de la vue
|
||||
# =========================
|
||||
query = "SELECT * FROM [SINGA_VW_FREIGHT_BOOKING_INFO]"
|
||||
df = pd.read_sql(query, sql_conn)
|
||||
|
||||
print("Colonnes chargées depuis la vue :")
|
||||
print(df.columns.tolist())
|
||||
print(f"{len(df)} lignes chargées")
|
||||
|
||||
# =========================
|
||||
# Conversion des dates
|
||||
# =========================
|
||||
df["BL_Date"] = pd.to_datetime(
|
||||
df["BL_Date"].astype(str),
|
||||
format="%Y%m%d",
|
||||
errors="coerce"
|
||||
)
|
||||
|
||||
df["ETD_Date"] = pd.to_datetime(
|
||||
df["ETD_Date"].astype(str),
|
||||
format="%Y%m%d",
|
||||
errors="coerce"
|
||||
)
|
||||
|
||||
query2 = "SELECT * FROM [SINGA_VW_LOTS_SHIPPED_BY_BOOKING]"
|
||||
df2 = pd.read_sql(query2, sql_conn)
|
||||
|
||||
df2["BOOKING_DATE"] = pd.to_datetime(df2["BOOKING_DATE"].astype(str), format="%Y%m%d", errors="coerce")
|
||||
df2["LOT_CREATION_DATE"] = pd.to_datetime(df2["LOT_CREATION_DATE"].astype(str), format="%Y%m%d", errors="coerce")
|
||||
df2["LOT_UPDATE_DATE"] = pd.to_datetime(df2["LOT_UPDATE_DATE"].astype(str), format="%Y%m%d", errors="coerce")
|
||||
df2["LOGDOC_DATE"] = pd.to_datetime(df2["LOGDOC_DATE"].astype(str), format="%Y%m%d", errors="coerce")
|
||||
df2["SALE_INVOICE_DATE"] = pd.to_datetime(df2["SALE_INVOICE_DATE"].astype(str), format="%Y%m%d", errors="coerce")
|
||||
|
||||
# =========================
|
||||
# Diagnostic
|
||||
# =========================
|
||||
print("\n🔎 DIAGNOSTIC BL_Date")
|
||||
print("BL_Date type :", df["BL_Date"].dtype)
|
||||
print("BL_Date min :", df["BL_Date"].min())
|
||||
print("BL_Date max :", df["BL_Date"].max())
|
||||
|
||||
# =========================
|
||||
# Filtre
|
||||
# =========================
|
||||
df = df[df["BL_Date"] > pd.Timestamp("2025-09-01")]
|
||||
print(f"{len(df)} lignes après filtre BL_Date > 2025-09-01")
|
||||
|
||||
# =========================
|
||||
# Sécurisation colonnes texte
|
||||
# =========================
|
||||
if "ShippingInstructionNumber" in df.columns:
|
||||
df["ShippingInstructionNumber"] = df["ShippingInstructionNumber"].astype(str)
|
||||
|
||||
df3 = pd.read_sql("SELECT * FROM [ALF]", sql_conn)
|
||||
df4 = pd.read_sql("SELECT * FROM [PLACES]", sql_conn)
|
||||
|
||||
# =========================
|
||||
# Connexion PostgreSQL
|
||||
# =========================
|
||||
pg_engine = create_engine(
|
||||
"postgresql://postgres:dsproject@localhost:5433/tradon"
|
||||
)
|
||||
|
||||
inspector = inspect(pg_engine)
|
||||
|
||||
table_name = "freight_booking_info"
|
||||
table_name2 = "freight_booking_lots"
|
||||
table_name3 = "alf"
|
||||
table_name4 = "places"
|
||||
|
||||
dtype_mapping = {
|
||||
"ETD_Date": Date(),
|
||||
"BL_Date": Date(),
|
||||
"ShippingInstructionNumber": VARCHAR(255),
|
||||
}
|
||||
|
||||
dtype_mapping2 = {
|
||||
"BOOKING_DATE": Date(),
|
||||
"LOT_CREATION_DATE": Date(),
|
||||
"LOT_UPDATE_DATE": Date(),
|
||||
"LOGDOC_DATE": Date(),
|
||||
"SALE_INVOICE_DATE": Date(),
|
||||
}
|
||||
|
||||
# =========================
|
||||
# Synchronisation PostgreSQL
|
||||
# =========================
|
||||
|
||||
# ---- freight_booking_info
|
||||
with pg_engine.begin() as conn:
|
||||
if table_name in inspector.get_table_names():
|
||||
conn.execute(text(f'TRUNCATE TABLE "{table_name}"'))
|
||||
|
||||
df.to_sql(
|
||||
table_name,
|
||||
conn,
|
||||
if_exists="append",
|
||||
index=False,
|
||||
dtype=dtype_mapping,
|
||||
)
|
||||
|
||||
print(f"♻️ Table mise à jour : {table_name} ({len(df)} lignes)")
|
||||
|
||||
# ---- freight_booking_lots
|
||||
with pg_engine.begin() as conn:
|
||||
if table_name2 in inspector.get_table_names():
|
||||
conn.execute(text(f'TRUNCATE TABLE "{table_name2}"'))
|
||||
|
||||
df2.to_sql(
|
||||
table_name2,
|
||||
conn,
|
||||
if_exists="append",
|
||||
index=False,
|
||||
dtype=dtype_mapping2,
|
||||
)
|
||||
|
||||
print(f"♻️ Table mise à jour : {table_name2} ({len(df2)} lignes)")
|
||||
|
||||
# ---- alf
|
||||
with pg_engine.begin() as conn:
|
||||
if table_name3 in inspector.get_table_names():
|
||||
conn.execute(text(f'TRUNCATE TABLE "{table_name3}"'))
|
||||
|
||||
df3.to_sql(
|
||||
table_name3,
|
||||
conn,
|
||||
if_exists="append",
|
||||
index=False,
|
||||
)
|
||||
|
||||
print(f"♻️ Table mise à jour : {table_name3} ({len(df3)} lignes)")
|
||||
|
||||
# ---- places
|
||||
with pg_engine.begin() as conn:
|
||||
if table_name4 in inspector.get_table_names():
|
||||
conn.execute(text(f'TRUNCATE TABLE "{table_name4}"'))
|
||||
|
||||
df4.to_sql(
|
||||
table_name4,
|
||||
conn,
|
||||
if_exists="append",
|
||||
index=False,
|
||||
)
|
||||
|
||||
print(f"♻️ Table mise à jour : {table_name4} ({len(df4)} lignes)")
|
||||
|
||||
print("\n🎉 Synchronisation terminée : table filtrée sur BL_Date > 01/09/2025")
|
||||
Reference in New Issue
Block a user