Initial commit
This commit is contained in:
674
Reference Data/Euclid_Queries.sql
Normal file
674
Reference Data/Euclid_Queries.sql
Normal file
@@ -0,0 +1,674 @@
|
||||
-- To be imported
|
||||
--Forward Curves
|
||||
--Mtm formula LINKED TO physical contract
|
||||
|
||||
|
||||
|
||||
-- All counterparties
|
||||
SELECT
|
||||
C.Id AS SupplierId,
|
||||
C.Name,
|
||||
C.Description,
|
||||
C.Address,
|
||||
C.City,
|
||||
C.ZipCode,
|
||||
C.Type,
|
||||
C2.CountryName,
|
||||
L.LocationName
|
||||
FROM counterpart.Company AS C
|
||||
LEFT JOIN geo.Location AS L ON C.LocationId = L.Id
|
||||
LEFT JOIN geo.Country AS C2 ON C.CountryId = C2.Id
|
||||
|
||||
|
||||
-- All counterparties for import
|
||||
SELECT
|
||||
--ROW_NUMBER() OVER (ORDER BY C.name) AS code,
|
||||
C.Name,
|
||||
'' AS tax_identifier,
|
||||
'' AS vat_code,
|
||||
'' AS address_name,
|
||||
CASE REPLACE(LTRIM(RTRIM(C.Address)),'"','')
|
||||
WHEN '0' THEN ''
|
||||
WHEN '.' THEN ''
|
||||
WHEN 'a' THEN ''
|
||||
ELSE REPLACE(LTRIM(RTRIM(C.Address)),'"','') END
|
||||
AS street,
|
||||
ISNULL(L.LocationName,'') AS city,
|
||||
C.ZipCode AS zip,
|
||||
C2.IsoCode2 AS country_code,
|
||||
'' AS subdivision_code
|
||||
FROM counterpart.Company AS C
|
||||
LEFT JOIN geo.Location AS L ON C.LocationId = L.Id
|
||||
LEFT JOIN geo.Country AS C2 ON C.CountryId = C2.Id
|
||||
ORDER BY c.Name
|
||||
|
||||
|
||||
-- Supplier used in purchase contracts
|
||||
SELECT DISTINCT
|
||||
C.Id AS SupplierId,
|
||||
C.Name,
|
||||
C.Description,
|
||||
C.Address,
|
||||
C.City,
|
||||
C.ZipCode,
|
||||
C.Type,
|
||||
C2.CountryName,
|
||||
L.LocationName
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN counterpart.Company AS C ON D.CounterpartId = C.Id
|
||||
LEFT JOIN geo.Location AS L ON C.LocationId = L.Id
|
||||
LEFT JOIN geo.Country AS C2 ON C.CountryId = C2.Id
|
||||
WHERE 1=1
|
||||
AND D.Status <> 'Cancelled'
|
||||
AND D.BuyOrSell = 1
|
||||
|
||||
|
||||
|
||||
-- Customers used in sale contracts
|
||||
SELECT DISTINCT
|
||||
C.Id AS CustomerId,
|
||||
C.Name,
|
||||
C.Description,
|
||||
C.Address,
|
||||
C.City,
|
||||
C.ZipCode,
|
||||
C.Type,
|
||||
C2.CountryName,
|
||||
L.LocationName
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN counterpart.Company AS C ON D.CounterpartId = C.Id
|
||||
LEFT JOIN geo.Location AS L ON C.LocationId = L.Id
|
||||
LEFT JOIN geo.Country AS C2 ON C.CountryId = C2.Id
|
||||
WHERE 1=1
|
||||
AND D.Status <> 'Cancelled'
|
||||
AND D.BuyOrSell <> 1
|
||||
|
||||
|
||||
-- Type de produits tradé -> Desk
|
||||
SELECT
|
||||
D.Id AS DeskId,
|
||||
D.Caption AS Desk,
|
||||
D.Description AS Description,
|
||||
D.DeskType,
|
||||
D.IsDefaultDesk,
|
||||
D.DefaultUnitReference,
|
||||
D.ActualMT,
|
||||
D.QuantityPrecision,
|
||||
D.ActualM3,
|
||||
D.MT2BBLPrecision
|
||||
FROM profiles.Desk AS D
|
||||
|
||||
|
||||
-- Book: utilisé pour classer les deals FY (projection)
|
||||
SELECT
|
||||
B.Id AS BookId,
|
||||
B.Caption AS Book,
|
||||
B.Description AS Description,
|
||||
D.Caption AS Desk,
|
||||
B.Year,
|
||||
B.IsActive
|
||||
FROM Book AS B
|
||||
INNER JOIN profiles.Desk AS D ON B.DeskId = D.Id
|
||||
ORDER BY B.Year
|
||||
|
||||
|
||||
-- Lien entre company (supplier, client...) et le produits tradé
|
||||
SELECT
|
||||
C.Name,
|
||||
D.Caption
|
||||
FROM dbo.CompanyDeskAndTradingUnit AS CDATU
|
||||
INNER JOIN counterpart.Company AS C ON CDATU.CompanyId = C.Id
|
||||
INNER JOIN profiles.Desk AS D ON CDATU.DeskId = D.Id
|
||||
|
||||
|
||||
-- Users
|
||||
SELECT
|
||||
XU.Id AS UserId,
|
||||
XU.FirstName,
|
||||
XU.LastName,
|
||||
XU.Login AS LoginName,
|
||||
D.Caption AS Desk,
|
||||
D.Description AS DeskDescription,
|
||||
P.Caption AS ProfileName,
|
||||
P.Description AS ProfileDescription,
|
||||
XU.IsAdmin,
|
||||
XU.IsActive,
|
||||
XU.IsSystemAdministrator
|
||||
FROM profiles.XUser AS XU
|
||||
INNER JOIN profiles.Desk AS D ON XU.DeskId = D.Id
|
||||
INNER JOIN profiles.Profile AS P ON XU.ProfileId = P.Id
|
||||
WHERE 1=1
|
||||
AND XU.Login NOT IN ('Sylvain.Duvernay','Nicolas.Vignon', 'Walter.Marques', 'Katia.Savtsenko', 'Arnaud.Chevallier')
|
||||
|
||||
|
||||
-- Security Group
|
||||
SELECT *
|
||||
FROM profiles.Profile AS P
|
||||
|
||||
|
||||
-- Strategy and related book
|
||||
SELECT
|
||||
S.Id AS StrategyId,
|
||||
S.Caption AS StrageyName,
|
||||
S.Description AS StrategyDescription,
|
||||
B.Caption AS Book,
|
||||
B.Year AS YearBook,
|
||||
S.IsActive
|
||||
FROM dbo.Strategy AS S
|
||||
INNER JOIN dbo.Book AS B ON S.BookId = B.Id
|
||||
ORDER BY B.Year DESC
|
||||
|
||||
|
||||
-- Product Type
|
||||
SELECT
|
||||
PT.Id AS ProductTypeId,
|
||||
PT.Caption AS ProductType,
|
||||
PT.Description
|
||||
FROM dbo.ProductType AS PT
|
||||
|
||||
|
||||
-- Product Familly
|
||||
SELECT
|
||||
PF.Id AS ProductFamilyId,
|
||||
PF.Caption AS ProductFamily,
|
||||
PF.Description,
|
||||
PT.Caption AS ProductType
|
||||
FROM dbo.ProductFamily AS PF
|
||||
INNER JOIN ProductType AS PT ON PF.ProductTypeId = PT.Id
|
||||
|
||||
-- Product
|
||||
SELECT
|
||||
P.Id AS ProductId,
|
||||
P.ProductName,
|
||||
PF.Caption AS ProductFamily,
|
||||
PT.Caption AS ProductType,
|
||||
P.DegreeApi,
|
||||
p.Density,
|
||||
P.Mt2BblFactor,
|
||||
P.Density20,
|
||||
P.Basis
|
||||
FROM dbo.Product AS P
|
||||
INNER JOIN dbo.ProductFamily AS PF ON P.FamilyId = PF.Id
|
||||
INNER JOIN dbo.ProductType AS PT ON PF.ProductTypeId = PT.Id
|
||||
|
||||
|
||||
-- Product Polluant
|
||||
SELECT
|
||||
PP.Id AS PolluantId,
|
||||
PP.PollutantName,
|
||||
PP.EmissionFactor,
|
||||
P.ProductName
|
||||
FROM dbo.ProductPollutant AS PP
|
||||
INNER JOIN dbo.Product AS P ON PP.ProductId = P.Id
|
||||
|
||||
-- Continent
|
||||
SELECT
|
||||
C.Id AS ContinentId,
|
||||
C.ContinentName
|
||||
FROM geo.Continent AS C
|
||||
|
||||
-- Country
|
||||
SELECT
|
||||
C.Id AS CountryId,
|
||||
C.CountryName,
|
||||
C.IsoCode2,
|
||||
C.IsoCode3,
|
||||
C2.ContinentName,
|
||||
C.BoundNorth,
|
||||
C.BoundEast,
|
||||
C.BoundSouth,
|
||||
C.BoundWest
|
||||
FROM geo.Country AS C
|
||||
INNER JOIN geo.Continent AS C2 ON C.ContinentId = C2.Id
|
||||
|
||||
|
||||
|
||||
-- Incoterm
|
||||
SELECT
|
||||
BD.Id AS IncotermId,
|
||||
BD.Caption AS IncotermAbbr,
|
||||
Bd.Description AS Incoterm
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'Incoterm'
|
||||
|
||||
|
||||
|
||||
-- Payment Term Type
|
||||
SELECT
|
||||
BD.Id AS PayTermSecurityTypeId,
|
||||
BD.Caption AS PayTermSecurityTypeAbbr,
|
||||
Bd.Description AS PayTermSecurityType
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'PayTermSecurityType'
|
||||
|
||||
|
||||
SELECT BD.Caption, DPT.PayTermRangeSize, COUNT(*)
|
||||
FROM dbo.DealPaymentTerm AS DPT
|
||||
INNER JOIN dbo.Deal AS D ON DPT.DealId = D.Id
|
||||
INNER JOIN dbo.BaseDefinition AS BD ON D.PayTermSecurityTypeId = BD.Id
|
||||
WHERE 1=1
|
||||
AND DPT.PayTermRangeSize > 0
|
||||
GROUP BY BD.Caption, DPT.PayTermRangeSize
|
||||
ORDER BY 3 DESC
|
||||
|
||||
|
||||
|
||||
|
||||
-- Delivery Period (no relation with time dimension)
|
||||
SELECT
|
||||
BD.Id AS DeliveryPeriodId,
|
||||
BD.Caption AS DeliveryPeriodAbbr,
|
||||
Bd.Description AS DeliveryPeriod
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'DeliveryPeriod'
|
||||
|
||||
|
||||
-- Basis Document Type
|
||||
SELECT
|
||||
BD.Id AS BasisDocumentId,
|
||||
BD.Caption AS BasisDocumentAbbr,
|
||||
Bd.Description AS BasisDocument
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'BasisDocumentType'
|
||||
|
||||
|
||||
|
||||
-- BusinessArea
|
||||
SELECT
|
||||
BD.Id AS BusinessAreaId,
|
||||
BD.Caption AS BusinessAreaAbbr,
|
||||
Bd.Description AS BusinessArea
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'BusinessArea'
|
||||
|
||||
|
||||
|
||||
|
||||
-- CostGroup
|
||||
SELECT
|
||||
BD.Id AS CostGroupId,
|
||||
BD.Caption AS CostGroupAbbr,
|
||||
Bd.Description AS CostGroup
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'CostGroup'
|
||||
|
||||
-- CostType
|
||||
SELECT
|
||||
--BD.Id AS CostTypeId,
|
||||
BD.Caption AS [code],
|
||||
Bd.Description AS [name],
|
||||
'SERVICES' AS [category],
|
||||
'Mt' AS [uom],
|
||||
0 as [sale_price],
|
||||
0 as [cost_price],
|
||||
'' AS [description]
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'CostType'
|
||||
ORDER BY 1
|
||||
|
||||
|
||||
-- Jurisdiction
|
||||
SELECT
|
||||
BD.Id AS JurisdictionId,
|
||||
BD.Caption AS JurisdictionAbbr,
|
||||
Bd.Description AS Jurisdiction
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'Jurisdiction'
|
||||
|
||||
|
||||
-- Exchange
|
||||
SELECT
|
||||
BD.Id AS ExchangeId,
|
||||
BD.Caption AS ExchangeAbbr,
|
||||
Bd.Description AS Exchange
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'Exchange'
|
||||
|
||||
|
||||
-- Quotation/Price Provider
|
||||
SELECT
|
||||
BD.Id AS SourceId,
|
||||
BD.Caption AS SourceAbbr,
|
||||
Bd.Description AS Source
|
||||
FROM dbo.BaseDefinition AS BD
|
||||
WHERE BD.ClassType = 'QuoteSource'
|
||||
|
||||
|
||||
|
||||
-- Terminal
|
||||
SELECT
|
||||
T.Id AS TerminalId,
|
||||
T.TerminalName,
|
||||
L.LocationName,
|
||||
C.Name AS TerminalOwner
|
||||
FROM dbo.Terminal AS T
|
||||
INNER JOIN geo.Location AS L ON T.LocationId = L.Id
|
||||
INNER JOIN counterpart.Company AS C ON T.OwnerId = C.Id
|
||||
|
||||
|
||||
-- Vessel
|
||||
SELECT
|
||||
T.Id AS VesselId,
|
||||
T.TransporterName,
|
||||
T.TransportType,
|
||||
T.Draught,
|
||||
T.BuildingYear,
|
||||
c.Name AS VesselOwner,
|
||||
C2.CountryName AS Flag,
|
||||
T.ImoNB,
|
||||
T.CharterType,
|
||||
T.DeadWeightMT
|
||||
FROM dbo.Transporter AS T
|
||||
LEFT JOIN counterpart.Company AS C ON T.OwnerId = C.Id
|
||||
LEFT JOIN geo.Country AS C2 ON T.FlagId = C2.Id
|
||||
|
||||
-- Vessel for insert in TradOn (CSV file structure)
|
||||
SELECT
|
||||
(SELECT STRING_AGG(
|
||||
UPPER(LEFT(value, 1)) + LOWER(SUBSTRING(value, 2, LEN(value))),
|
||||
' '
|
||||
)
|
||||
FROM STRING_SPLIT(T.TransporterName, ' ')
|
||||
) AS vessel_name,
|
||||
T.BuildingYear AS vessel_year,
|
||||
T.ImoNB AS vessel_imo
|
||||
FROM dbo.Transporter AS T
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
-- All EUCLID Locations
|
||||
SELECT
|
||||
L.Id AS LocationId,
|
||||
L.LocationName,
|
||||
L.CoordinateX,
|
||||
L.CoordinateY,
|
||||
C.CountryName,
|
||||
C2.ContinentName,
|
||||
BD.ClassType,
|
||||
BD.Caption
|
||||
FROM geo.Location AS L
|
||||
INNER JOIN geo.Country AS C ON L.CountryId = C.Id
|
||||
INNER JOIN geo.Continent AS C2 ON C.ContinentId = C2.Id
|
||||
INNER JOIN dbo.BaseDefinition AS BD ON L.TypeId = BD.Id
|
||||
|
||||
|
||||
-- Stock Locations (supplier) - CSV file structure
|
||||
SELECT DISTINCT
|
||||
LTRIM(RTRIM(LOA.LocationName)) AS [name],
|
||||
'supplier' AS [type],
|
||||
LOA.CoordinateX AS [lat],
|
||||
LOA.CoordinateY AS [lon]
|
||||
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN geo.Location AS LOA ON D.LoadLocationId = LOA.Id
|
||||
LEFT JOIN geo.Country AS C_L ON LOA.CountryId = C_L.Id
|
||||
--LEFT JOIN geo.Location AS DIS ON D.DeliveryLocationId = DIS.Id
|
||||
--LEFT JOIN geo.Country AS C_D ON DIS.CountryId = C_D.Id
|
||||
WHERE D.Status <> 'Cancelled'
|
||||
AND ISNULL(LOA.LocationName, '') <> ''
|
||||
AND LOA.Id <> '8855ED73-344C-4F8D-B4D6-DD214D3B7E61'
|
||||
ORDER BY 2
|
||||
|
||||
|
||||
-- Stock locations (customer) - CSV file structure
|
||||
SELECT DISTINCT
|
||||
LTRIM(RTRIM(DIS.LocationName)) AS [name],
|
||||
'customer' AS [type],
|
||||
DIS.CoordinateX AS [lat],
|
||||
DIS.CoordinateY AS [lon]
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN geo.Location AS DIS ON D.DeliveryLocationId = DIS.Id
|
||||
LEFT JOIN geo.Country AS C_D ON DIS.CountryId = C_D.Id
|
||||
|
||||
WHERE D.Status <> 'Cancelled'
|
||||
AND LTRIM(RTRIM(DIS.LocationName)) <> ''
|
||||
|
||||
|
||||
|
||||
-- Purchase contracts
|
||||
;WITH PaymentTerm AS (
|
||||
SELECT D.Id AS DealId, BD.Caption, DPT.PayTermRangeSize
|
||||
FROM dbo.DealPaymentTerm AS DPT
|
||||
INNER JOIN dbo.Deal AS D ON DPT.DealId = D.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS BD ON D.PayTermSecurityTypeId = BD.Id
|
||||
WHERE 1=1
|
||||
AND DPT.PayTermRangeSize > 0
|
||||
GROUP BY D.Id , BD.Caption, DPT.PayTermRangeSize
|
||||
)
|
||||
SELECT
|
||||
D.Id AS source_id
|
||||
, D.Id AS source_line_id
|
||||
, D.Reference AS number
|
||||
, ISNULL(D.OtherReference, '') AS reference
|
||||
, ISNULL(D.ContractNumber, '') AS our_reference
|
||||
, C.name AS party_name
|
||||
, 'USD' AS currency_code
|
||||
, CAST(D.Date AS DATE) AS purchase_date
|
||||
|
||||
, CASE
|
||||
WHEN PT.Caption = 'Open account' THEN CONCAT( 'NET ' , PT.PayTermRangeSize )
|
||||
WHEN PT.Caption = 'LC Doc' THEN CONCAT( 'LC ', PT.PayTermRangeSize, ' DAYS')
|
||||
ELSE CONCAT( 'NET ' , PT.PayTermRangeSize )
|
||||
END AS payment_term
|
||||
, '' AS warehouse_code
|
||||
, 'NCSW' AS weight_basis --'NCSW'
|
||||
|
||||
, ROUND(D.QuantityToleranceMinusPercent,2) AS tol_min_pct
|
||||
, ROUND(D.QuantityTolerancePlusPercent,2) AS tol_max_pct
|
||||
, D.QuantityToleranceMin AS tol_min_qty
|
||||
, D.QuantityToleranceMax AS tol_max_qty
|
||||
, ISNULL(LOA.LocationName, '') AS from_location_name
|
||||
, ISNULL(DIS.LocationName, '') AS to_location_name
|
||||
, ISNULL(INCO.Caption, '') AS incoterm_name
|
||||
, 'manual' AS invoice_method
|
||||
,'' AS [description]
|
||||
, CONCAT( D.Reference , ' / ' , B.Description , ' / ' , S.Caption) AS [comment]
|
||||
, 'line' AS line_type
|
||||
, CASE
|
||||
WHEN ISNULL(C2.CountryName,'') <> '' THEN CONCAT('H2SO4','-',C2.CountryName)
|
||||
ELSE 'H2SO4'
|
||||
END AS line_product_code
|
||||
, ISNULL(C2.CountryName,'') AS origin
|
||||
, ROUND(D.Quantity , 2) AS line_quantity
|
||||
, 'Mt' AS line_unit_code
|
||||
, ISNULL(PRICE.AvgPrice,0) AS line_price
|
||||
, CONCAT(
|
||||
D.Quantity , ' Mt of sulphuric acid - ',
|
||||
CASE
|
||||
WHEN ISNULL(D.CommodityBasis, 0) = 0 THEN 'Tel-quel'
|
||||
ELSE CAST(D.CommodityBasis AS VARCHAR(MAX)) + '%'
|
||||
END) AS line_description
|
||||
, cast(D.DeliveryDateFrom AS DATE) AS line_from_del
|
||||
, cast(D.DeliveryDateTo AS DATE) AS line_to_del
|
||||
, 'bldate' AS pricing_trigger
|
||||
, CAST(D.EstimatedDateOfBL AS DATE) AS pricing_estimated_date
|
||||
|
||||
, ISNULL( CONCAT( TRD.FirstName, ' ', TRD.LastName) , '') AS trader
|
||||
, ISNULL( CONCAT( OP.FirstName, ' ', OP.LastName) , '') AS operator
|
||||
|
||||
, ISNULL(D.CommodityBasis, 0) AS concentration
|
||||
, B.Description AS book
|
||||
, S.Caption AS strategy
|
||||
, ISNULL(BD.Caption , 'Laycan') AS period_at
|
||||
, ISNULL(D.Demurrage, '') AS demurrage
|
||||
, D.LaytimeHours AS laytime_hours
|
||||
, D.NoticeOfReadinessExtraHours AS nor_extra_hours
|
||||
, ISNULL(D.PumpingHourlyMTRate,0) AS pumping_rate
|
||||
, D.UseOnlyMinAndMax AS use_only_min_max
|
||||
, D.DropRemainingQuantity AS drop_remaining_quantity
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN dbo.Book AS B ON D.BookId = B.Id
|
||||
LEFT JOIN dbo.ProductContainer AS PC ON D.Id = PC.Id
|
||||
LEFT JOIN geo.Location AS LOA ON D.LoadLocationId = LOA.Id
|
||||
LEFT JOIN geo.Country AS C_L ON LOA.CountryId = C_L.Id
|
||||
LEFT JOIN geo.Location AS DIS ON D.DeliveryLocationId = DIS.Id
|
||||
LEFT JOIN geo.Country AS C_D ON DIS.CountryId = C_D.Id
|
||||
LEFT JOIN counterpart.Company AS C ON D.CounterpartId = C.Id
|
||||
LEFT JOIN geo.Country AS C_CP ON C.CountryId = C_CP.Id
|
||||
LEFT JOIN dbo.Product AS P ON D.ProductId = P.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS INCO ON D.IncotermId = INCO.Id
|
||||
LEFT JOIN dbo.Strategy AS S ON D.StrategyId = S.Id
|
||||
LEFT JOIN PaymentTerm AS PT ON D.Id = PT.DealId
|
||||
LEFT JOIN (
|
||||
SELECT DealId, AVG(MvTPrice) AS AvgPrice
|
||||
FROM [singa].[VW_MVT_PRICING]
|
||||
GROUP BY DealId) AS PRICE ON D.Id = PRICE.DealId
|
||||
LEFT JOIN profiles.XUser AS TRD ON D.TraderId = TRD.Id
|
||||
LEFT JOIN profiles.XUser AS OP ON D.OperatorId = OP.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS BD ON D.DeliveryPeriodId = BD.Id
|
||||
LEFT JOIN geo.Country AS C2 ON D.CountryOfOriginId = C2.Id
|
||||
WHERE 1=1
|
||||
AND D.Status <> 'Cancelled'
|
||||
AND D.BuyOrSell = 1 -- Purchase contracts
|
||||
AND ISNULL(D.OtherReference, '') NOT LIKE '%ACCT Matching%'
|
||||
AND B.Description LIKE '%2025%'
|
||||
--AND D.Reference = 2093
|
||||
ORDER BY 3,1
|
||||
|
||||
|
||||
|
||||
-- Purchase contract costs
|
||||
SELECT
|
||||
|
||||
D.Reference AS contract_number,
|
||||
LTRIM(RTRIM( ISNULL(D.OtherReference, '') )) AS contract_ref,
|
||||
1 AS line_sequence,
|
||||
CASE
|
||||
WHEN CT.Caption = 'Commision' THEN 'Commission'
|
||||
WHEN CT.Caption = 'Freight' THEN 'Maritime Freight'
|
||||
ELSE CT.Caption
|
||||
END AS product,
|
||||
LTRIM(RTRIM( ISNULL(C.Name , 'TBD Supplier') )) AS supplier,
|
||||
ER.CurrencyCode AS currency,
|
||||
CASE ER.IsRevenue
|
||||
WHEN 1 THEN 'REC'
|
||||
ELSE 'PAY'
|
||||
END AS p_r,
|
||||
'Per qt' AS mode,
|
||||
ROUND (
|
||||
(IIF(PFE.Quantity IS NULL, PFE.WeightBalance / 100, 1) * PFE.OutputPriceValue),
|
||||
2 ) AS price,
|
||||
ER.UnitReference AS unit
|
||||
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN dbo.Book AS B ON D.BookId = B.Id
|
||||
INNER JOIN dbo.ExpenseRevenue AS ER ON D.Id = ER.DealId
|
||||
INNER JOIN dbo.PriceFormulaElement AS PFE ON PFE.ExpenseRevenueId = ER.Id
|
||||
INNER JOIN dbo.BaseDefinition AS CT ON ER.CostTypeId = CT.Id
|
||||
LEFT JOIN counterpart.Company AS C ON ER.CounterpartId = c.Id
|
||||
WHERE 1=1
|
||||
AND D.Status <> 'Cancelled'
|
||||
AND D.BuyOrSell = 1 -- Purchase contracts
|
||||
AND ISNULL(D.OtherReference, '') NOT LIKE '%ACCT Matching%'
|
||||
AND B.Description LIKE '%2025%'
|
||||
AND ER.FormulaGroup NOT IN (1,2) -- Not Price or MtM
|
||||
ORDER BY 2
|
||||
|
||||
|
||||
|
||||
-- Sale contracts - Copy/Paste results of this query in CSV file
|
||||
;WITH PaymentTerm AS (
|
||||
SELECT D.Id AS DealId, BD.Caption, DPT.PayTermRangeSize
|
||||
FROM dbo.DealPaymentTerm AS DPT
|
||||
INNER JOIN dbo.Deal AS D ON DPT.DealId = D.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS BD ON D.PayTermSecurityTypeId = BD.Id
|
||||
WHERE 1=1
|
||||
AND DPT.PayTermRangeSize > 0
|
||||
GROUP BY D.Id , BD.Caption, DPT.PayTermRangeSize
|
||||
)
|
||||
|
||||
SELECT
|
||||
D.Id AS source_id
|
||||
, D.Id AS source_line_id
|
||||
, D.Reference AS number
|
||||
, ISNULL(D.OtherReference, '') AS reference
|
||||
, ISNULL(D.ContractNumber, '') AS our_reference
|
||||
, C.name AS party_name
|
||||
, 'USD' AS currency_code
|
||||
, CAST(D.Date AS DATE) AS sale_date
|
||||
|
||||
, CASE
|
||||
WHEN PT.Caption = 'Open account' THEN CONCAT( 'NET ' , PT.PayTermRangeSize )
|
||||
WHEN PT.Caption = 'LC Doc' THEN CONCAT( 'LC ', PT.PayTermRangeSize, ' DAYS')
|
||||
ELSE CONCAT( 'NET ' , PT.PayTermRangeSize )
|
||||
END AS payment_term
|
||||
|
||||
, '' AS warehouse_code
|
||||
, 'NCSW' AS weight_basis
|
||||
, ROUND(D.QuantityToleranceMinusPercent,2) AS tol_min_pct
|
||||
, ROUND(D.QuantityTolerancePlusPercent,2) AS tol_max_pct
|
||||
, D.QuantityToleranceMin AS tol_min_qty
|
||||
, D.QuantityToleranceMax AS tol_max_qty
|
||||
|
||||
, ISNULL(LOA.LocationName, '') AS from_location_name
|
||||
, ISNULL(DIS.LocationName, '') AS to_location_name
|
||||
, ISNULL(INCO.Caption, '') AS incoterm_name
|
||||
, 'manual' AS invoice_method
|
||||
,'' AS [description]
|
||||
, CONCAT( D.Reference , ' / ' , B.Description , ' / ' , S.Caption) AS [comment]
|
||||
, 'line' AS line_type
|
||||
, CASE
|
||||
WHEN ISNULL(C2.CountryName,'') <> '' THEN CONCAT('H2SO4','-',C2.CountryName)
|
||||
ELSE 'H2SO4'
|
||||
END AS line_product_code
|
||||
, ISNULL(C2.CountryName,'') AS origin
|
||||
, ROUND(D.Quantity , 2) AS line_quantity
|
||||
, 'Mt' AS line_unit_code
|
||||
, ISNULL(PRICE.AvgPrice,0) AS line_price
|
||||
, CONCAT(
|
||||
D.Quantity , ' Mt of sulphuric acid - ',
|
||||
CASE
|
||||
WHEN ISNULL(D.CommodityBasis, 0) = 0 THEN 'Tel-quel'
|
||||
ELSE CAST(D.CommodityBasis AS VARCHAR(MAX)) + '%'
|
||||
END) AS line_description
|
||||
, cast(D.DeliveryDateFrom AS DATE) AS line_from_del
|
||||
, cast(D.DeliveryDateTo AS DATE) AS line_to_del
|
||||
, 'bldate' AS pricing_trigger
|
||||
, CAST(D.EstimatedDateOfBL AS DATE) AS pricing_estimated_date
|
||||
|
||||
, ISNULL( CONCAT( TRD.FirstName, ' ', TRD.LastName) , '') AS trader
|
||||
, ISNULL( CONCAT( OP.FirstName, ' ', OP.LastName) , '') AS operator
|
||||
|
||||
, ISNULL(D.CommodityBasis, 0) AS concentration
|
||||
, B.Description AS book
|
||||
, S.Caption AS strategy
|
||||
, ISNULL(BD.Caption , 'Laycan') AS period_at
|
||||
, ISNULL(D.Demurrage, '') AS demurrage
|
||||
, D.LaytimeHours AS laytime_hours
|
||||
, D.NoticeOfReadinessExtraHours AS nor_extra_hours
|
||||
, ISNULL(D.PumpingHourlyMTRate,0) AS pumping_rate
|
||||
, D.UseOnlyMinAndMax AS use_only_min_max
|
||||
, D.DropRemainingQuantity AS drop_remaining_quantity
|
||||
FROM dbo.Deal AS D
|
||||
LEFT JOIN dbo.Book AS B ON D.BookId = B.Id
|
||||
LEFT JOIN dbo.ProductContainer AS PC ON D.Id = PC.Id
|
||||
LEFT JOIN geo.Location AS LOA ON D.LoadLocationId = LOA.Id
|
||||
LEFT JOIN geo.Country AS C_L ON LOA.CountryId = C_L.Id
|
||||
LEFT JOIN geo.Location AS DIS ON D.DeliveryLocationId = DIS.Id
|
||||
LEFT JOIN geo.Country AS C_D ON DIS.CountryId = C_D.Id
|
||||
LEFT JOIN counterpart.Company AS C ON D.CounterpartId = C.Id
|
||||
LEFT JOIN geo.Country AS C_CP ON C.CountryId = C_CP.Id
|
||||
LEFT JOIN dbo.Product AS P ON D.ProductId = P.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS INCO ON D.IncotermId = INCO.Id
|
||||
LEFT JOIN dbo.Strategy AS S ON D.StrategyId = S.Id
|
||||
LEFT JOIN PaymentTerm AS PT ON D.Id = PT.DealId
|
||||
LEFT JOIN (
|
||||
SELECT DealId, AVG(MvTPrice) AS AvgPrice
|
||||
FROM [singa].[VW_MVT_PRICING]
|
||||
GROUP BY DealId) AS PRICE ON D.Id = PRICE.DealId
|
||||
LEFT JOIN profiles.XUser AS TRD ON D.TraderId = TRD.Id
|
||||
LEFT JOIN profiles.XUser AS OP ON D.OperatorId = OP.Id
|
||||
LEFT JOIN dbo.BaseDefinition AS BD ON D.DeliveryPeriodId = BD.Id
|
||||
LEFT JOIN geo.Country AS C2 ON D.CountryOfOriginId = C2.Id
|
||||
WHERE 1=1
|
||||
AND D.Status <> 'Cancelled'
|
||||
AND D.BuyOrSell = -1 -- Sale contracts
|
||||
AND ISNULL(D.OtherReference, '') NOT LIKE '%ACCT Matching%'
|
||||
AND B.Description LIKE '%2025%'
|
||||
ORDER BY 2,1
|
||||
|
||||
|
||||
BIN
Reference Data/Shipped Quantities.xlsx
Normal file
BIN
Reference Data/Shipped Quantities.xlsx
Normal file
Binary file not shown.
@@ -0,0 +1,361 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<title>tryton_helpers.py — Docstrings</title>
|
||||
<style>
|
||||
body { font-family: Arial, Helvetica, sans-serif; line-height: 1.5; margin: 32px; color: #222 }
|
||||
h1 { color: #0b4f6c }
|
||||
h2 { color: #0b6d3b }
|
||||
pre { background:#f6f8fa; padding:12px; border-radius:6px; overflow:auto }
|
||||
code { background:#eef; padding:2px 4px; border-radius:4px }
|
||||
.signature { font-family: Consolas, monospace; background:#fff8e6; padding:6px; display:inline-block; border-radius:4px }
|
||||
nav ul { list-style:none; padding-left:0 }
|
||||
nav li { margin:6px 0 }
|
||||
footer { margin-top:40px; color:#666; font-size:0.9em }
|
||||
.note { color:#8a6d3b; font-size:0.95em }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>tryton_helpers.py — Docstrings</h1>
|
||||
<p class="note">Generated: 2026-02-05 — Summary of Google-style docstrings for helper functions.</p>
|
||||
|
||||
<nav>
|
||||
<h2>Contents</h2>
|
||||
<ul>
|
||||
<li><a href="#find_party_by_name">find_party_by_name</a></li>
|
||||
<li><a href="#find_product_by_code">find_product_by_code</a></li>
|
||||
<li><a href="#find_currency_by_code">find_currency_by_code</a></li>
|
||||
<li><a href="#find_contract_line_by_sequence">find_contract_line_by_sequence</a></li>
|
||||
<li><a href="#find_purchase_contract_by_ref">find_purchase_contract_by_ref</a></li>
|
||||
<li><a href="#find_supplier_category">find_supplier_category</a></li>
|
||||
<li><a href="#ensure_party_is_supplier">ensure_party_is_supplier</a></li>
|
||||
<li><a href="#find_fee_mode_by_name">find_fee_mode_by_name</a></li>
|
||||
<li><a href="#find_payable_receivable_by_name">find_payable_receivable_by_name</a></li>
|
||||
<li><a href="#get_existing_fees_for_line">get_existing_fees_for_line</a></li>
|
||||
<li><a href="#fee_already_exists">fee_already_exists</a></li>
|
||||
<li><a href="#parse_decimal">parse_decimal</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
|
||||
<hr>
|
||||
|
||||
<section id="find_party_by_name">
|
||||
<h2>find_party_by_name</h2>
|
||||
<div class="signature">def find_party_by_name(party_name)</div>
|
||||
<pre>
|
||||
Find a party (typically a supplier, client, service provider...) by its name.
|
||||
|
||||
Attempts an exact name match first. If no exact match is found, performs a
|
||||
case-insensitive 'ilike' lookup. If still not found, a warning is printed
|
||||
and the function returns None.
|
||||
|
||||
Args:
|
||||
party_name (str): The name of the party to search for. Leading/trailing
|
||||
whitespace is ignored and empty values return None.
|
||||
|
||||
Returns:
|
||||
object or None: The first matching Tryton `party.party` record if found,
|
||||
otherwise `None` when no match exists or the input is invalid.
|
||||
|
||||
Notes:
|
||||
- If multiple matches exist the first record returned by Tryton is used.
|
||||
- This function prints a warning when no party is found.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_product_by_code">
|
||||
<h2>find_product_by_code</h2>
|
||||
<div class="signature">def find_product_by_code(product_code)</div>
|
||||
<pre>
|
||||
Find a product by its code.
|
||||
|
||||
Attempts an exact code match first. If no exact match is found, performs a
|
||||
case-insensitive 'ilike' lookup. If still not found, a warning is printed
|
||||
and the function returns None.
|
||||
|
||||
Args:
|
||||
product_code (str): The code of the product to search for. Leading and
|
||||
trailing whitespace is ignored and empty values return None.
|
||||
|
||||
Returns:
|
||||
object or None: The first matching Tryton `product.product` record if
|
||||
found; otherwise `None` when no match exists or the input is invalid.
|
||||
|
||||
Notes:
|
||||
- If multiple matches exist the first record returned by Tryton is used.
|
||||
- This function prints a warning when no product is found.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_currency_by_code">
|
||||
<h2>find_currency_by_code</h2>
|
||||
<div class="signature">def find_currency_by_code(currency_code)</div>
|
||||
<pre>
|
||||
Find a currency by its ISO code or name.
|
||||
|
||||
Performs a case-sensitive search by currency name after uppercasing the
|
||||
input. Returns the first matching `currency.currency` record. If the input
|
||||
is empty or no match is found, a warning is printed and the function
|
||||
returns `None`.
|
||||
|
||||
Args:
|
||||
currency_code (str): The currency code or name to search for. Leading
|
||||
and trailing whitespace is ignored; empty values return `None`.
|
||||
|
||||
Returns:
|
||||
object or None: The first matching Tryton `currency.currency` record
|
||||
if found; otherwise `None` when no match exists or the input is
|
||||
invalid.
|
||||
|
||||
Notes:
|
||||
- This function searches by the `name` field using an uppercased exact
|
||||
match. Consider expanding to include ISO code or `ilike` searches if
|
||||
needed in the future.
|
||||
- If multiple matches exist the first record returned by Tryton is used.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_contract_line_by_sequence">
|
||||
<h2>find_contract_line_by_sequence</h2>
|
||||
<div class="signature">def find_contract_line_by_sequence(contract, line_sequence)</div>
|
||||
<pre>
|
||||
Find a contract line within a purchase contract by its sequence number.
|
||||
|
||||
Validates the provided contract and attempts to convert the provided
|
||||
`line_sequence` to an integer. Searches the `lines` iterable on the
|
||||
contract for a line object whose `sequence` attribute matches the
|
||||
integer sequence number. Returns the first matching line, or `None` if the
|
||||
contract is invalid, the sequence cannot be parsed, or no matching line is
|
||||
found.
|
||||
|
||||
Args:
|
||||
contract (object): A `purchase.purchase` record (or similar) expected
|
||||
to have a `lines` iterable attribute containing line objects.
|
||||
line_sequence (int | str): Sequence number to search for. Can be an
|
||||
integer or a string representation of an integer.
|
||||
|
||||
Returns:
|
||||
object or None: The matching contract line object if found; otherwise
|
||||
`None`.
|
||||
|
||||
Notes:
|
||||
- Prints a warning when the provided `line_sequence` is invalid or when
|
||||
no matching line is found.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_purchase_contract_by_ref">
|
||||
<h2>find_purchase_contract_by_ref</h2>
|
||||
<div class="signature">def find_purchase_contract_by_ref(contract_ref)</div>
|
||||
<pre>
|
||||
Find a purchase contract by its reference identifier.
|
||||
|
||||
Performs an exact match lookup on the `reference` field of the
|
||||
`purchase.purchase` model. If the input is empty or no contract is found,
|
||||
the function prints a warning and returns `None`.
|
||||
|
||||
Args:
|
||||
contract_ref (str): The reference string of the purchase contract.
|
||||
Leading/trailing whitespace is ignored and empty values return None.
|
||||
|
||||
Returns:
|
||||
object or None: The first matching Tryton `purchase.purchase` record if
|
||||
found; otherwise `None` when no match exists or the input is invalid.
|
||||
|
||||
Notes:
|
||||
- If multiple matches exist the first record returned by Tryton is used.
|
||||
- This function prints a warning when no contract is found.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_supplier_category">
|
||||
<h2>find_supplier_category</h2>
|
||||
<div class="signature">def find_supplier_category()</div>
|
||||
<pre>
|
||||
Retrieve the 'SUPPLIER' party category from the system.
|
||||
|
||||
First attempts an exact match on the `name` field for 'SUPPLIER'. If an
|
||||
exact match is not found, the function falls back to iterating all party
|
||||
categories and returns the first one whose uppercased `name` equals
|
||||
'SUPPLIER'. If no matching category is found, a warning is printed and
|
||||
`None` is returned.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
object or None: The matching `party.category` record if found; otherwise
|
||||
`None`.
|
||||
|
||||
Notes:
|
||||
- This helper helps ensure that parties can be categorized as suppliers
|
||||
without relying on exact case-sensitive persistence of the category
|
||||
name.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="ensure_party_is_supplier">
|
||||
<h2>ensure_party_is_supplier</h2>
|
||||
<div class="signature">def ensure_party_is_supplier(party, auto_enable=True)</div>
|
||||
<pre>
|
||||
Ensure a party has the SUPPLIER category, optionally adding it.
|
||||
|
||||
Checks whether the provided `party` record contains the SUPPLIER
|
||||
category. If the category is missing and `auto_enable` is True, the
|
||||
function attempts to append the category to the party and save the
|
||||
record. On success it returns the updated party and True. If
|
||||
`auto_enable` is False the function leaves the party unchanged and
|
||||
returns (party, False), printing guidance for manual action.
|
||||
|
||||
Args:
|
||||
party (object): A `party.party` record expected to have a
|
||||
`categories` collection attribute.
|
||||
auto_enable (bool): If True (default) attempt to add the SUPPLIER
|
||||
category when missing; if False do not modify the party and
|
||||
prompt the user to add the category manually.
|
||||
|
||||
Returns:
|
||||
tuple: (party, bool) where bool is True if the party has the
|
||||
SUPPLIER category after the call, otherwise False.
|
||||
|
||||
Notes:
|
||||
- Prints informative messages for missing category, permission
|
||||
issues, and other exceptions. Use `find_supplier_category()` to
|
||||
retrieve the category object directly if needed.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_fee_mode_by_name">
|
||||
<h2>find_fee_mode_by_name</h2>
|
||||
<div class="signature">def find_fee_mode_by_name(mode_name)</div>
|
||||
<pre>
|
||||
Map a human-readable fee mode name to the system's internal mode code.
|
||||
|
||||
Normalizes the input (trims whitespace and uppercases) and returns a
|
||||
short code used internally. Known mappings are:
|
||||
- 'PER QT' -> 'perqt'
|
||||
- '% COST PRICE' -> 'pcost'
|
||||
- '% PRICE' -> 'pprice'
|
||||
- 'LUMP SUM' -> 'lumpsum'
|
||||
|
||||
Args:
|
||||
mode_name (str): Fee mode display name. Leading/trailing whitespace is
|
||||
ignored and comparison is case-insensitive.
|
||||
|
||||
Returns:
|
||||
str or None: The mapped internal mode string if recognized, otherwise
|
||||
`None` for unknown or empty inputs.
|
||||
|
||||
Notes:
|
||||
- Prints a warning when an unknown mode is encountered.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="find_payable_receivable_by_name">
|
||||
<h2>find_payable_receivable_by_name</h2>
|
||||
<div class="signature">def find_payable_receivable_by_name(p_r_value)</div>
|
||||
<pre>
|
||||
Determine whether a fee is payable or receivable from a P_R-style value.
|
||||
|
||||
Normalizes the input by trimming whitespace and uppercasing it, then maps
|
||||
common variants to either 'pay' or 'rec'. Recognised payable values include
|
||||
'PAY', 'PAYABLE', and 'P'; recognised receivable values include 'REC',
|
||||
'RECEIVABLE', and 'R'. An empty or falsy input returns `None`. Unknown
|
||||
values print a warning and default to 'pay'.
|
||||
|
||||
Args:
|
||||
p_r_value (str): Raw value from the P_R column (e.g., 'PAY', 'REC').
|
||||
|
||||
Returns:
|
||||
str or None: 'pay' for payable, 'rec' for receivable, or `None` for
|
||||
empty/invalid inputs.
|
||||
|
||||
Notes:
|
||||
- Prints a warning when encountering an unrecognised value and
|
||||
defaults to 'pay' to maintain backward compatibility.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="get_existing_fees_for_line">
|
||||
<h2>get_existing_fees_for_line</h2>
|
||||
<div class="signature">def get_existing_fees_for_line(contract_line)</div>
|
||||
<pre>
|
||||
Retrieve the existing fees associated with a contract line.
|
||||
|
||||
Validates the provided `contract_line` and returns its `fees` collection
|
||||
if present. If the contract line is missing or does not expose a `fees`
|
||||
attribute, the function returns an empty list to simplify downstream
|
||||
duplicate checks and iteration.
|
||||
|
||||
Args:
|
||||
contract_line (object): A contract line object expected to have a
|
||||
`fees` iterable attribute (may be None or empty).
|
||||
|
||||
Returns:
|
||||
list: The fees associated with the contract line, or an empty list if
|
||||
none exist or the input is invalid.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="fee_already_exists">
|
||||
<h2>fee_already_exists</h2>
|
||||
<div class="signature">def fee_already_exists(existing_fees, product, supplier, price)</div>
|
||||
<pre>
|
||||
Check whether a fee with the same product, supplier and price already exists.
|
||||
|
||||
Iterates `existing_fees` and compares each fee's `product.id`, `party.id`,
|
||||
and `price` to the provided `product`, `supplier`, and `price` respectively.
|
||||
The function performs attribute presence checks to avoid AttributeError and
|
||||
uses exact equality for price comparison.
|
||||
|
||||
Args:
|
||||
existing_fees (iterable): Iterable of fee objects (may be a list or
|
||||
None). Each fee is expected to expose `product`, `party`, and
|
||||
`price` attributes.
|
||||
product (object): Product record with an `id` attribute.
|
||||
supplier (object): Supplier/party record with an `id` attribute.
|
||||
price (Decimal | number): Price value to match against fee.price.
|
||||
|
||||
Returns:
|
||||
bool: True if a matching fee exists; False otherwise.
|
||||
|
||||
Notes:
|
||||
- Exact equality is used for price comparison; consider tolerances when
|
||||
comparing floating point values.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<section id="parse_decimal">
|
||||
<h2>parse_decimal</h2>
|
||||
<div class="signature">def parse_decimal(value, field_name)</div>
|
||||
<pre>
|
||||
Parse and validate a numeric value into a Decimal.
|
||||
|
||||
Converts `value` to a Decimal using `Decimal(str(value))`. Returns `None`
|
||||
for empty inputs, common string null markers (e.g. 'NULL', 'NONE', 'N/A'),
|
||||
or when the value cannot be parsed as a decimal (in which case a warning
|
||||
is printed referencing `field_name`).
|
||||
|
||||
Args:
|
||||
value (str|int|Decimal|None): The raw value to parse into a Decimal.
|
||||
field_name (str): Name of the field (used to provide contextual
|
||||
information in warning messages).
|
||||
|
||||
Returns:
|
||||
Decimal or None: A Decimal instance when parsing succeeds; otherwise
|
||||
`None` for empty/invalid inputs.
|
||||
|
||||
Notes:
|
||||
- Uses `Decimal(str(value))` to avoid floating-point precision issues.
|
||||
- Catching `ValueError` and `TypeError` ensures the function is safe to
|
||||
call on arbitrary input values encountered while importing data.
|
||||
</pre>
|
||||
</section>
|
||||
|
||||
<footer>
|
||||
<p>Generated from <code>helpers/tryton_helpers.py</code>.</p>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
0
Reference Data/python_project/helpers/__init__.py
Normal file
0
Reference Data/python_project/helpers/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
78
Reference Data/python_project/helpers/config.py
Normal file
78
Reference Data/python_project/helpers/config.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import psycopg2
|
||||
from proteus import config
|
||||
|
||||
# CSV file paths
|
||||
EMPLOYEE_CSV = 'loaders/employees.csv'
|
||||
PARTIES_CSV = 'loaders/parties.csv'
|
||||
CUSTOMER_STOCK_LOCATIONS_CSV = 'loaders/customer_stock_locations.csv'
|
||||
SUPPLIER_STOCK_LOCATIONS_CSV = 'loaders/supplier_stock_locations.csv'
|
||||
SERVICES_CSV = 'loaders/services.csv'
|
||||
VESSELS_CSV = 'loaders/vessels.csv'
|
||||
|
||||
PURCHASE_CONTRACTS_CSV = 'loaders/Purchase_Contracts_with_mapping.csv'
|
||||
PURCHASE_FEES_CSV = 'loaders/purchase_fees.csv'
|
||||
|
||||
SALE_CONTRACTS_CSV = 'loaders/Sale_Contracts_with_mapping.csv'
|
||||
|
||||
|
||||
# XML-RPC Configuration (for Proteus interaction)
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database access if needed)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
# Database connection configuration for direct PostgreSQL access
|
||||
# Used by migration mapper and custom field updates
|
||||
DB_CONFIG = {
|
||||
'host': DB_HOST, # Your PostgreSQL host
|
||||
'port': DB_PORT, # Your PostgreSQL port
|
||||
'database': DATABASE_NAME, # Your Tryton database name
|
||||
'user': DB_USER, # Your database user
|
||||
'password': DB_PASSWORD # Your database password
|
||||
}
|
||||
|
||||
|
||||
# Function to connect to Tryton via XML-RPC
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Server: {HTTPS}{SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
connection = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return connection
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
def get_db_connection():
|
||||
"""Get PostgreSQL database connection"""
|
||||
try:
|
||||
connection = psycopg2.connect(
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
database=DATABASE_NAME,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
return connection
|
||||
except Exception as e:
|
||||
print(f"✗ Database connection failed: {e}")
|
||||
raise
|
||||
332
Reference Data/python_project/helpers/migration_mapping.py
Normal file
332
Reference Data/python_project/helpers/migration_mapping.py
Normal file
@@ -0,0 +1,332 @@
|
||||
# helpers/migration_mapping.py
|
||||
|
||||
from datetime import date
|
||||
import psycopg2
|
||||
from psycopg2.extras import execute_values
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MigrationMapper:
|
||||
"""Handle migration mapping records between source system and Tryton"""
|
||||
|
||||
def __init__(self, db_config):
|
||||
"""
|
||||
Initialize with database configuration
|
||||
|
||||
Args:
|
||||
db_config: dict with keys: host, port, database, user, password
|
||||
"""
|
||||
self.db_config = db_config
|
||||
self.connection = None
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry - establish database connection"""
|
||||
self.connection = psycopg2.connect(**self.db_config)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit - close database connection"""
|
||||
if self.connection:
|
||||
if exc_type is None:
|
||||
self.connection.commit()
|
||||
else:
|
||||
self.connection.rollback()
|
||||
self.connection.close()
|
||||
|
||||
def save_mapping(self, object_type, source_id, tryton_model, tryton_id, recon_key):
|
||||
"""
|
||||
Save a single migration mapping record
|
||||
|
||||
Args:
|
||||
object_type: Type of object being migrated (e.g., 'sale_contract')
|
||||
source_id: ID from source system
|
||||
tryton_model: Tryton model name (e.g., 'sale.sale')
|
||||
tryton_id: Tryton record ID
|
||||
recon_key: Reconciliation key for matching (e.g., contract number)
|
||||
"""
|
||||
mappings = [{
|
||||
'object_type': object_type,
|
||||
'source_id': source_id,
|
||||
'tryton_model': tryton_model,
|
||||
'tryton_id': tryton_id,
|
||||
'recon_key': recon_key
|
||||
}]
|
||||
self.save_mappings_batch(mappings)
|
||||
|
||||
def save_mappings_batch(self, mappings):
|
||||
"""
|
||||
Save multiple migration mapping records in batch
|
||||
|
||||
Args:
|
||||
mappings: List of dicts with keys:
|
||||
- object_type: Type of object being migrated
|
||||
- source_id: ID from source system
|
||||
- tryton_model: Tryton model name
|
||||
- tryton_id: Tryton record ID
|
||||
- recon_key: Reconciliation key for matching
|
||||
"""
|
||||
if not mappings:
|
||||
logger.warning("No mappings to save")
|
||||
return
|
||||
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
# Prepare data for batch insert
|
||||
values = [
|
||||
(
|
||||
[mapping['object_type']], # Array with single element
|
||||
[mapping['source_id']], # Array with single element
|
||||
[mapping['tryton_model']], # Array with single element
|
||||
mapping['tryton_id'],
|
||||
[mapping['recon_key']], # Array with single element
|
||||
date.today()
|
||||
)
|
||||
for mapping in mappings
|
||||
]
|
||||
|
||||
try:
|
||||
# Use execute_values for efficient batch insert
|
||||
# ON CONFLICT DO NOTHING prevents duplicates
|
||||
execute_values(
|
||||
cursor,
|
||||
"""
|
||||
INSERT INTO public.os_migration_mapping
|
||||
(object_type, source_id, tryton_model, tryton_id, recon_key, write_date)
|
||||
VALUES %s
|
||||
ON CONFLICT DO NOTHING
|
||||
""",
|
||||
values,
|
||||
template="(%s, %s, %s, %s, %s, %s)"
|
||||
)
|
||||
|
||||
logger.info(f"Saved {len(mappings)} migration mapping records")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving migration mappings: {e}")
|
||||
raise
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def get_tryton_id(self, object_type, source_id):
|
||||
"""
|
||||
Retrieve Tryton ID for a given source system ID
|
||||
|
||||
Args:
|
||||
object_type: Type of object
|
||||
source_id: ID from source system
|
||||
|
||||
Returns:
|
||||
int: Tryton ID or None if not found
|
||||
"""
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT tryton_id
|
||||
FROM public.os_migration_mapping
|
||||
WHERE %s = ANY(object_type)
|
||||
AND %s = ANY(source_id)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(object_type, source_id)
|
||||
)
|
||||
|
||||
result = cursor.fetchone()
|
||||
return result[0] if result else None
|
||||
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def get_source_id(self, object_type, tryton_id):
|
||||
"""
|
||||
Retrieve source system ID for a given Tryton ID
|
||||
|
||||
Args:
|
||||
object_type: Type of object
|
||||
tryton_id: Tryton record ID
|
||||
|
||||
Returns:
|
||||
str: Source ID or None if not found
|
||||
"""
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT source_id[1]
|
||||
FROM public.os_migration_mapping
|
||||
WHERE %s = ANY(object_type)
|
||||
AND tryton_id = %s
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(object_type, tryton_id)
|
||||
)
|
||||
|
||||
result = cursor.fetchone()
|
||||
return result[0] if result else None
|
||||
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def get_mapping_by_recon_key(self, object_type, recon_key):
|
||||
"""
|
||||
Retrieve mapping by reconciliation key
|
||||
|
||||
Args:
|
||||
object_type: Type of object
|
||||
recon_key: Reconciliation key
|
||||
|
||||
Returns:
|
||||
dict: Mapping record or None if not found
|
||||
"""
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
object_type[1] as object_type,
|
||||
source_id[1] as source_id,
|
||||
tryton_model[1] as tryton_model,
|
||||
tryton_id,
|
||||
recon_key[1] as recon_key,
|
||||
write_date
|
||||
FROM public.os_migration_mapping
|
||||
WHERE %s = ANY(object_type)
|
||||
AND %s = ANY(recon_key)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(object_type, recon_key)
|
||||
)
|
||||
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
return {
|
||||
'object_type': result[0],
|
||||
'source_id': result[1],
|
||||
'tryton_model': result[2],
|
||||
'tryton_id': result[3],
|
||||
'recon_key': result[4],
|
||||
'write_date': result[5]
|
||||
}
|
||||
return None
|
||||
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def delete_mappings_by_source(self, object_type, source_ids):
|
||||
"""
|
||||
Delete mappings by source IDs (useful for re-import)
|
||||
|
||||
Args:
|
||||
object_type: Type of object
|
||||
source_ids: List of source IDs to delete
|
||||
|
||||
Returns:
|
||||
int: Number of records deleted
|
||||
"""
|
||||
if not source_ids:
|
||||
return 0
|
||||
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
try:
|
||||
# Delete records where source_id matches any in the list
|
||||
cursor.execute(
|
||||
"""
|
||||
DELETE FROM public.os_migration_mapping
|
||||
WHERE %s = ANY(object_type)
|
||||
AND source_id[1] = ANY(%s)
|
||||
""",
|
||||
(object_type, source_ids)
|
||||
)
|
||||
|
||||
deleted_count = cursor.rowcount
|
||||
logger.info(f"Deleted {deleted_count} migration mapping records")
|
||||
return deleted_count
|
||||
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def get_all_mappings(self, object_type=None):
|
||||
"""
|
||||
Retrieve all mappings, optionally filtered by object type
|
||||
|
||||
Args:
|
||||
object_type: Optional object type filter
|
||||
|
||||
Returns:
|
||||
list: List of mapping dicts
|
||||
"""
|
||||
cursor = self.connection.cursor()
|
||||
|
||||
try:
|
||||
if object_type:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
object_type[1] as object_type,
|
||||
source_id[1] as source_id,
|
||||
tryton_model[1] as tryton_model,
|
||||
tryton_id,
|
||||
recon_key[1] as recon_key,
|
||||
write_date
|
||||
FROM public.os_migration_mapping
|
||||
WHERE %s = ANY(object_type)
|
||||
ORDER BY write_date DESC
|
||||
""",
|
||||
(object_type,)
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
object_type[1] as object_type,
|
||||
source_id[1] as source_id,
|
||||
tryton_model[1] as tryton_model,
|
||||
tryton_id,
|
||||
recon_key[1] as recon_key,
|
||||
write_date
|
||||
FROM public.os_migration_mapping
|
||||
ORDER BY write_date DESC
|
||||
"""
|
||||
)
|
||||
|
||||
results = cursor.fetchall()
|
||||
return [
|
||||
{
|
||||
'object_type': row[0],
|
||||
'source_id': row[1],
|
||||
'tryton_model': row[2],
|
||||
'tryton_id': row[3],
|
||||
'recon_key': row[4],
|
||||
'write_date': row[5]
|
||||
}
|
||||
for row in results
|
||||
]
|
||||
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
|
||||
# Simplified standalone function for quick integration
|
||||
def save_migration_mapping(db_config, object_type, source_id, tryton_model,
|
||||
tryton_id, recon_key):
|
||||
"""
|
||||
Standalone function to save a single migration mapping
|
||||
|
||||
Args:
|
||||
db_config: dict with database connection parameters
|
||||
object_type: Type of object being migrated
|
||||
source_id: ID from source system
|
||||
tryton_model: Tryton model name
|
||||
tryton_id: Tryton record ID
|
||||
recon_key: Reconciliation key for matching
|
||||
"""
|
||||
with MigrationMapper(db_config) as mapper:
|
||||
mapper.save_mapping(object_type, source_id, tryton_model, tryton_id, recon_key)
|
||||
1158
Reference Data/python_project/helpers/tryton_helpers.py
Normal file
1158
Reference Data/python_project/helpers/tryton_helpers.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,43 @@
|
||||
name,type,lat,lon
|
||||
Adelaide,customer,0,0
|
||||
BAHODOPI-POSO,customer,0,0
|
||||
Bandirma,customer,40.369697136983,27.9607200622559
|
||||
Bangkok,customer,13.75398,100.50144
|
||||
Barquito,customer,-26.35,-70.65
|
||||
Beaumont,customer,30.077338673234,-94.0840816497804
|
||||
Budge Budge,customer,22.47514,88.17767
|
||||
Fangcheng,customer,21.6140258994842,108.322792053223
|
||||
Gladstone,customer,-23.81603,151.26279
|
||||
Go Dau,customer,15.9742225,105.806431
|
||||
Gresik,customer,-7.14994933032079,112.60986328125
|
||||
Hai Phong,customer,20.86774,106.69179
|
||||
Hibi,customer,34.39572,133.78713
|
||||
Huelva,customer,37.26638,-6.94004
|
||||
Isabel,customer,6.70748678037316,121.965022087097
|
||||
Japan,customer,37.3002752813444,138.515625
|
||||
Jorf Lasfar,customer,-18.7771925,46.854328
|
||||
Kakinada,customer,16.9957254352666,82.4434661865236
|
||||
Kandla,customer,22.80078,69.70705
|
||||
Kemaman,customer,4.23333,103.45
|
||||
LHOKSEUMAWE,customer,0,0
|
||||
Limas,customer,40.7389332411361,29.6246337890626
|
||||
Map Ta Phut,customer,12.648713210033,101.143569946289
|
||||
Mejillones,customer,-22.8215665806022,-70.5869030289666
|
||||
Naoshima,customer,34.4612772884371,134.009170532227
|
||||
New Mangalore,customer,0,0
|
||||
New Orleans,customer,29.95465,-90.07507
|
||||
Onahama,customer,36.9268411923102,140.907897949219
|
||||
Onsan,customer,35.4601106672107,129.37671661377
|
||||
Paradip,customer,20.2644516477765,86.7077064514162
|
||||
Pori,customer,61.6450645681561,21.3828277587891
|
||||
Port Hedland,customer,0,0
|
||||
Ras Al Khair,customer,0,0
|
||||
Rio Grande,customer,-32.082574559546,-52.0916748046876
|
||||
Saganoseki,customer,33.2411282959313,131.885418891907
|
||||
Samut Prakan,customer,13.5051524908849,100.599746704102
|
||||
San Juan,customer,18.46633,-66.10572
|
||||
Stockton,customer,37.9527255601159,-121.316356658936
|
||||
Tampa,customer,27.897652653541,-82.4285316467287
|
||||
Tema,customer,-0.828097,11.598909
|
||||
Tuticorin,customer,8.75436,78.20429
|
||||
Visakhapatnam,customer,17.4764321971955,83.5180664062501
|
||||
|
17
Reference Data/python_project/loaders/Employees.csv
Normal file
17
Reference Data/python_project/loaders/Employees.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
name,code
|
||||
Laura Girardot,EMP1
|
||||
Jean-Francois Muse,EMP2
|
||||
ShiHan Tay,EMP3
|
||||
Max Baiwir,EMP4
|
||||
Rodrigue Thomas,EMP5
|
||||
Angela Arbenz,EMP6
|
||||
Javier Miraba,EMP7
|
||||
Stephane Monnard,EMP8
|
||||
Oliver Gysler,EMP9
|
||||
Mario Isely,EMP10
|
||||
Gregory Gondeau,EMP11
|
||||
Sylviane Dubuis,EMP12
|
||||
Steve Zaccarini,EMP13
|
||||
Smult Kouane,EMP14
|
||||
David Susanto,EMP15
|
||||
Jeremie Collot,EMP16
|
||||
|
372
Reference Data/python_project/loaders/Parties.csv
Normal file
372
Reference Data/python_project/loaders/Parties.csv
Normal file
@@ -0,0 +1,372 @@
|
||||
name,tax_identifier,vat_code,address_name,street,city,zip,country_code,subdivision_code
|
||||
"HEUNG-A SHIPPING CO., LTD.",,,,"3F, HEUNG-A BLDG, 21, SAEMAL-RO 5-GIL, SONGPA-GU",Seoul,0,KR,
|
||||
SUMITOMO CORPORATION TOKYO,,,,"INORGANIC CHEMICALS TEAM, INORGANIC AND PERFORMANCE CHEMICALS DEPT. 3-2, OTEMACHI 2-CHROME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
TRANSAMMONIA AG,,,,"PRIME TOWER, HARDSTRASSE 201, 8005 ZURICH, SWITZERLAND",Zurich,8005,CH,
|
||||
AMEROPA,,,,"ONE TEMASEK AVENUE, MILLENIA TOWER HEX 31-02",Singapore,039192,SG,
|
||||
NISSHIN SHIPPING CO. LTD,,,,,Monaco,0,MC,
|
||||
KORINDO PELAYARAN PT,,,,"12th Floor, Wisma Korindo, Jalan Letjen MT Haryono 62, Kel Pancoran",Jakarta,NULL,ID,
|
||||
PARANAPANEMA S/A (C),,,,"VIA DO COBRE N° 3700, AREA INDUSTRIAL OESTE (AIO) - COPEC, CEP 42850-00 DIAS D'AVILA, BAHIA, BRAZIL",Bahia,CEP 42850-00,BR,
|
||||
AZTEC MARINE LTD,,,,,Monaco,NULL,MC,
|
||||
MALAYSIAN PHOSPHATE ADDITIVES SB.,,,,"SUITE 609, LEVEL 6, BLOCK F, PHILEO DAMANSARA 1, N° 9, JALAN 16/11, 46350 PETALING JAYA, SELANGOR DE",Petaling Jaya,46350,MY,
|
||||
BSI INSPEC. SPAIN (BUREAU VERITAS),,,,,Monaco,NULL,MC,
|
||||
MH PROGRESS LINE S.A.,,,,,Monaco,0,MC,
|
||||
NAVIERA PETRAL SA,,,,"Oficina 102, Calle Alcanflores 393, Miraflores",Lima,15074,PE,
|
||||
NAVIG8 LIMITED,,,,,Monaco,0,MC,
|
||||
INTEROCEANIC (C),,,,7 RENAISSANCE SQUARE 7TH FLOOR,New York,10601,US,
|
||||
LYNX SHIPPING PTE. LTD,,,,"16 Raffles Quay #41-02, Hong Leong Building, Singapore ",,048581,SG,
|
||||
ABO SINGAPORE PTE LTD,,,,,Monaco,0,MC,
|
||||
FERTIPAR FERTILIZANTES,,,,"RUA COMENDADOR CORREA JUNIOR, 1178, BAIRRO 29 DE JULHO, CEP 83.203-762 PARANAGUA, PR BRASIL",Paranagua,CEP 83.203-762,BR,
|
||||
NATIONAL CHEMICAL CARRIERS LTD,,,,,Monaco,0,MC,
|
||||
BALLANCE AGRI-NUTRIENTS,,,,"HEWLETT'S ROAD, MOUNT MAUNGANUI, NEW ZEALAND",Mount Maunganui,NULL,NZ,
|
||||
JX OCEAN CO. LTD,,,,"THE LANDMARK TOWER YOKOHAMA 48TH FLOOR 2-2-1, MINATOMIRAI, NISHI-KU",Yokohama,220-8148,JP,
|
||||
SUKSES OSEAN KHATULISTIWA LINE,,,,"Blok J, Jalan Mangga Dua Dalam 5&8, Jakarta Pusat",Jakarta,10730,ID,
|
||||
PARADEEP PHOSPHATES LIMITED,,,,"BAYAN BHAWAN, PANDIT J. N. MARG, BHUBANESWAR 751001, ORISSA, INDIA",Orissa,751001,IN,
|
||||
NEW EAGLE SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
KGHM POLSKA MIEDZ S.A.,,,,"UL. M. SKLODOWSKIEJ-CURIE 48, 59-301 LUBIN, POLAND",Lubin,59-301,PL,
|
||||
SUMITOMO CORPORATION - TOHO,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
INTERTEK CALEB BRETT INDIA,,,,,Monaco,0,MC,
|
||||
PTT ASAHI CHEMICAL CO. LTD.,,,,"8 PHANGMUANG CHAPOH 3-1 ROAD, HUAYPONG SUB-DISTRICT, MUANG DISTRICT, RAYONG 21150, THAILAND",Rayong,21150,TH,
|
||||
NANJING MEDIATOR PETROCHEMICAL TECHNOLOGY CO. LTD.,,,,"NANJING MEDIATOR PETROCHEMICAL TECHNOLOGY CO., LTD, Shuanglong road in Nanjing city of Jiangning Development Zone No 1700, 7th floor, A block, P.R. China China. ",,NULL,CN,
|
||||
BOLIDEN HARJAVALTA OY,,,,TEOLLISUUSKATU 1,Harjavalta,29200,FI,
|
||||
TEAM TANKERS INTERNATIONAL LTD.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION INDONESIA (SSRI),,,,Jl. Jend. Sudirman Kav. 61-62,Jakarta,12190,ID,
|
||||
P&F MARINE CO LTD,,,,,Monaco,0,MC,
|
||||
LA POSTE,,,,"WANKDORFALLEE 4, 3030 BERNE, SWITZERLAND",Bern,3030,CH,
|
||||
AURUBIS AG,,,,HOVESTRASSE 50,Hamburg,20539,DE,
|
||||
ERSHIP SAU,,,,CALLE MARINA 26,Huelva,21001,ES,
|
||||
"KAWASAKI KISEN KAISHA, LTD.",,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION - MMC,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
UBE CHEMICALS ASIA PCL (Ex TCL),,,,"18TH FLOOR, SATHORN SQUARE OFFICE TOWER, N° 98 NORTH SATHORN ROAD, SILON SUB-DISTRICT, BANGKOK 10500, THAILAND",Bangkok,10500,TH,
|
||||
KOREA ZINC COMPANY LTD,,,,"ONSAN COMPLEX, 505 DAEJONG-RI, ONSAN-EUP, ULJU-GUN ULSAN, KOREA",Ulsan,NULL,KR,
|
||||
MTM TRADING LLC OF MARSHALL ISLANDS,,,,"2960 POST ROAD, CU",Southport,06890,US,
|
||||
"HOPECHEM CO., LTD (S)",,,,"ROOM 302, NO.233 KENGBEI VILLAGE, WUTONG-SHAN ART TOWN, LUOHU DISTRICT,",Shenzhen,518114,CN,
|
||||
CS MARINE CO LTD,,,,,Monaco,0,MC,
|
||||
"HAFNIA POOLS PTE LTD ",,,,"18-01, MapleTree Business City, 10, Pasir Panjang Road, Singapore 117438",,117438,SG,
|
||||
SAEHAN MARINE CO LTD. (ULSAN),,,,"502, KYONG DONG E&S #304 JANGSENGPO-DONG, NAM-KU, ULSAN 44780, KOREA",Ulsan,44780,KR,
|
||||
CIBRAFERTIL COMP. BRASILIERA DE FERTILIZANTES,,,,"#19-01 SUNTEC TOWER THREE,",Camacari,42810-290,BR,
|
||||
FAIRFIELD CHEMICAL CARRIERS BV,,,,"8 TEMASEK BOULEVARD, #19-01 SUNTEC TOWER THREE,",Singapore,038988,SG,
|
||||
"ASHAPURA PERFOCLAY LTS. ",,,,"JEEVAN ADYOG BUILDING, 3RD FLOOR, 278 D. N. ROAD, FORT, MUMBAI 400 001",Mumbai,400 001,IN,
|
||||
TATA CHEMICALS LIMITED,,,,"DURGACHAK, HALDIA, WEST BENGAL 721 602, INDIA",West Bengal,721 602,IN,
|
||||
DOE RUN PERU S.R.L.,,,,"#14-04 SOUTHPOINT,",Lima,27,PE,
|
||||
INTEROCEANIC (S),,,,7 RENAISSANCE SQUARE 7TH FLOOR,New York,10601,US,
|
||||
"SAEHAN MARINE CO., LTD",,,,,Monaco,NULL,MC,
|
||||
MAROC-PHOSPHORE S.A.,,,,"2, RUE AL ABTAL, HAY ERRAHA, 20200 CASABLANCA, MOROCCO",Casablanca,20200,MA,
|
||||
ACE SULFERT (LIANYUNGANG) CO. LTD,,,,,Monaco,NULL,MC,
|
||||
INCHCAPE SHIPPING SERVICES,,,,,Monaco,0,MC,
|
||||
AONO MARINE CO LTD,,,,"1-1-17, SHINDEN-CHO",Niihama,792-0003,JP,
|
||||
BANDIRMA GÜBRE FABRIKALARI A.S. (S),,,,SUSAM SOK. NO:22,Cihangir-Istanbul,34433,TR,
|
||||
HEXAGON GROUP AG,,,,Bleicherweg 33,Zurich,8002,CH,
|
||||
INNOPHOS,,,,"DOMICILIO CONOCIDO SN, S.T., PAJARITOS, CP 96384, COATZACOALCOS, VER. MEXICO",Coatzacoalcos,CP 96384,MX,
|
||||
TODA KISEN KK,,,,,,NULL,MC,
|
||||
FARMHANNONG,,,,"#523 Maeam-dong Nam-ku, Ulsan 680-050 South Korea",Ulsan,680-050,KR,
|
||||
"AGROINDUSTRIAS DEL BALSAS ",,,,"ISLA DE EN MEDIO SIN NUMERO, INTERIOR RECINTO PORTUARIO, LAZARO CARDENAS, MICH. MEXICO",Mexico City,NULL,MX,
|
||||
"A.R. SULPHONATES PVT. LIMITED ",,,,"21, PRINCEP STREET, KOLKATA 700072, INDIA",Kolkata,700072,IN,
|
||||
WINA TECHNOLOGIES,,,,"9 KOFI PORTUPHY STREET, WESTLANDS, WEST LEGON, ACCRA, GHANA",Accra,0,GH,
|
||||
ARTHUR J. GALLAGHER (UK) LTD,,,,,Monaco,NULL,MC,
|
||||
STOLT TANKERS B.V.,,,,"460 ALEXANDRA ROAD, # 10-01 PSA BUILDING",Singapore,119963,SG,
|
||||
SAS International LLC,,,,"26865 Interstate 45 South - Suite 200 The Woodlands,",,TX 77380,US,
|
||||
TUFTON OCEANIC LTD.,,,,,Monaco,0,MC,
|
||||
"D3 CHEMIE ",,,,,Monaco,NULL,MC,
|
||||
Interacid Trading SA (accounting),,,,En Budron H14,Le Mont-sur-Lausanne,CH-1052,CH,
|
||||
"SHANGHAI DINGHENG SHIPPING CO.,LTD.",,,,,Monaco,0,MC,
|
||||
MCC RAMU NICO LTD,,,,"PO BOX 1229, MADANG SECTION 95, LOT 18-19, MODILON RD PAPUA, NEW GUINEA",Modilon RD Papua,0,PG,
|
||||
FERTIMAC,,,,"AV. PLINIO BRASIL MILANO, 289 CONJ. 301, PORTO ALEGRE - RS - BRASIL, CNPJ : 92194026/0001-36",Porto Alegre,0,BR,
|
||||
THE PHOSPHATE CO. LTD. .,,,,"14, NETAJI SUBHAS ROAD, KOLKATA 700001, INDIA",Kolkata,700001,IN,
|
||||
CORP. NACIONAL DEL COBRE,,,,HUÉRFANOS 1270,Santiago,8320000,CL,
|
||||
"KIMYA MARKET SANAYI.TIC.LTD.STI. ",,,,600 Evler Mah.Balıkesir Asfaltı Sağ Taraf No:72 BANDIRMA BALIKESIR Bandırma Tax Office,Bandirma,NULL,TR,
|
||||
Marsh / Navigators Insurance Company,,,,1 Penn PLZ FL 55,New York,10119-5500,US,
|
||||
DE POLI TANKERS BV,,,,TRONDHEIM 20,Barendrecht,2993LE,NL,
|
||||
"SUNWOO SHIPPING CO., LTD.",,,,"KEB HANA BANK, MAPO-YEOK BRANCH 041-68 52, MAPO-DAERO, MAPO-GU, SEOUL, SOUTH KOREA ACCOUNT NO. 176-910020-20638 SWIFT : KOEXKRSE IN FAVOUR OF SUNWOO SHIPPING CO., LTD. ",Seoul,NULL,KR,
|
||||
NYRSTAR PORT PIRIE PTY LTD.,,,,HOOFDSTRAAT 1,DORPLEIN,6024 AA BUDEL,NL,
|
||||
INSPECTORATE (SUISSE) SA,,,,,Monaco,0,MC,
|
||||
GANSU YONGQI INDUSTRY AND TRADE. CO. (C),,,,"NORTH OF YANAN ROAD, WEST OF HEYA ROAD, JINCHUAN DISTRICT, JINCHANG CITY, GANSU PROVINCE, CHINA",Gansu Province,0,CN,
|
||||
"ASOCIACION DE COOPERATIVAS ARGENTINAS COOP. ",,,,"AV. EDUARDO MADERO 942, 4° - 5° - 6° - 7° PISO/FLOOR, (1106) BUENOS AIRES - ARGENTINA, C.U.I.T. 30-50012088-2",Buenos Aires,1106,AR,
|
||||
DM SHIPPING CO LTD,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION - PPC,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
FILHET - ALLARD MARITIME,,,,,Monaco,0,MC,
|
||||
IPSL / PEQUIVEN,,,,,Monaco,0,MC,
|
||||
VENUS SHIPPING CO LTD,,,,"1830, Lotte Gold Rose2Cha 890-59 Daechi 4-sa-dong Gangnam-gu",Seoul,NULL,KR,
|
||||
"TOHO ZINC CO., LTD.",,,,"8-2,MARUNOUCHI 1-CHOME,CHIYODA-KU,",Tokyo,100-8207,JP,
|
||||
"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",,,,"8F., NO 88, SEC. 2, NANJING E. RD, ZHONGSHAN DIST., ",Taipei,10457,TW,
|
||||
INDEVCO TRADING,,,,"35 AMBER ROAD #09-14, THE SEA VIEW, SINGAPORE 439945",Singapore,439945,SG,
|
||||
"GREATHORSE INTERNATIONAL SHIP MANAGEMENT CO., LTD.",,,,,Monaco,0,MC,
|
||||
JINCHUAN,,,,"JINCHUAN INDUSTRIAL PARK, QISHA INDUSTRIAL ZONE, GANGKOU DISTRICT, FANGCHENGGANG CITY GUANGXI PROVINCE",Fangchenggang City,538002,CN,
|
||||
MC FOR LLC,,,," Room 4, Premise 8N, prospekt Nevskiy 20A, St Petersburg, 191186, Russia.",Saint Petersburg,191186,RU,
|
||||
GRUPO MEXICO,,,,"EDIFICIO PARQUE REFORMA CAMPOS ELISEOS NO. 400, LOMAS DE CHAPULTEPEC",Mexico City,11000,MX,
|
||||
UBS SWITZERLAND AG,,,,Rue des Noirettes 35 PO Box 2600,Geneva,1211,CH,
|
||||
WLR/TRF SHIPPING LTD.,,,,,Monaco,0,MC,
|
||||
TRANSMARINE NAVIGATION CORPORATION,,,,2321 W WASHINGTON ST # K,Stockton,95203,US,
|
||||
NATIONAL CHEMICAL CARRIERS LTD. CO.,,,,"9/F, ONE JLT, JUMEIRAH LAKE TOWERS",Dubai,,AE,
|
||||
SORIN CORPORATION,,,,"YOUNG-POONG BLDG, 542, GANGNAM-DAERO, GANGNAM-GU",Seoul,06110,KR,
|
||||
"JX METALS SMELTING CO.,LTD.",,,,"1-2 OHTEMACHI 1-CHOME, CHIYODA-KU,",Tokyo,100-0004,JP,
|
||||
CHENGTAI GROUP PTE LTD,,,,TBD,,NULL,CN,
|
||||
FERFERMEX S.A.,,,,"PREDIO ENCINO GORDO S/N, C.P. 96340",Veracruz,96340,MX,
|
||||
"SHOKUYU NAVIGATION CO.,SA",,,,,,NULL,MA,
|
||||
BERLIAN LAJU TANKER TBK PT,,,,"Jalan Abdul Mu'is 40, Kec Gambir",Jakarta,10160,ID,
|
||||
"INTERACID NORTH AMERICA, INC (Stockton)",,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
META NICKEL,,,,"CEYHUN ATIF KANSU CADDESI 114 BAYRAKTAR CENTER, D BLOK 1. KAT DAIRE NO: 1-",Ankara,2,TR,
|
||||
OTAMAY SHIPPING INC.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
NEW WAY VYAPAAR PVT LTD,,,,"249/B, G.T. ROAD (NORTH), LILUAH, FIRST FLOOR, ROOM N° 17, HOWRAH 711 204, WB INDIA",Howrah,711 204,IN,
|
||||
SGS NORTH AMERICA INC.,,,,,Tampa,NULL,US,
|
||||
KOYO KAIUN ASIA PTE. LTD,,,,,Monaco,0,MC,
|
||||
KUTCH CHEMICAL INDUSTRIES LTD,,,,"20-21, SARA NIWAS, HARI NAGAR, GOTRI ROAD, GUJARAT",Vadodara,390 007,IN,
|
||||
FAIRFIELD CHEMICAL CARRIERS INC,,,,"21 RIVER ROAD,2ND FLOOR, CU",Wilton,06897,US,
|
||||
EGRAIN AG,,,,HESSENSTRASSE 18,Hofheim,65719,DE,
|
||||
CHEMBULK OCEAN TRANSPORT LLC,,,,"THE DELAMAR 175 RENNELL DRIVE , CU",Southport,06890,US,
|
||||
FEFERMEX SA DE CV,,,,"PREDIO ENCINO GORDO S/N, COSOLEACAQUE, VERACRUZ, CP 96340, MEXICO",Veracruz,CP 96340,MX,
|
||||
SOUTHERN PERU COPPER CORP,,,,"AVENIDA, CAMINOS DEL LNCA 171, CHACARILLA DEL ESTANQUE, SURCO",Lima,33,PE,
|
||||
K.N.D. LINE S.A.,,,,,Monaco,0,MC,
|
||||
JUPITER DYECHEM PVT LTD.,,,,"MITTAL COURT, A WING, OFFICE N° 92 & 93, 9TH FLOOR, NARIMAN POINT, MUMBAI 400 021, BRANCH OFFICE 138/2 VANMALA COMPOUND, VALEGAON, KHANDAGALE ESTATE, PURNA, BHIWANDI, DISTRICT THANE",Mumbai,400-021,IN,
|
||||
TIMAC AGRO INDUSTRIA BRASIL,,,,"AV. ALMIRANTE MAXIMIANO FONSECA 1550 KM 02, DIST. INDUSTRIAL 96204-040, RIO GRANDE, RS, BRASIL",Rio Grande,96204-040,BR,
|
||||
SATCO SULPHURIC ACID,,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
YAOKI SHIPPING S.A.,,,,,,NULL,MC,
|
||||
"HOPECHEM CO., LTD. (AGENT)",,,,"ROOM 302, N° 233 KENGBEI VILLAGE, WUTONG-SHAN ART TOWN, LUOHU DISTRICT, SHENZHEN, P. R. CHINA 518114",Shenzhen,518114,CN,
|
||||
CONS. DOMINICAN REPUBLIC,,,,,Monaco,NULL,MC,
|
||||
BUNGE ARGENTINA S.A.,,,,"25 DE MAYO 501, 4TH FLOOR, C1002ABK BUENOS AIRES, ARGENTINA",Buenos Aires,C1002ABK,AR,
|
||||
PISCES LINE SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
TONGLING NONFERROUS METALS GROUP CO. LTD,,,,"Room 308, 3rd Floor, Nonferrous Courtyard Business Building, Changjiang West Road, Tongling, Anhui Province",Tongling,24400,CN,
|
||||
NAVQUIM SHIP MANAGEMENT,,,," 10th Floor, Tower C, Weena 242,",Rotterdam,3012 NJ,NL,
|
||||
ETI BAKIR A.S.,,,,"ALTUNIZADE, KISIKLI CD. NO:37",Üsküdar-Istanbul,34662,TR,
|
||||
NEW SEAGULL SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD - SUNTAI,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
"Asahi Tanker Co.,Ltd",,,,"Hibiya-Daibiru 3F 2-2, Uchisaiwaicho 1-Chome, Chiyoda-Ku",Tokyo,100-0011,JP,
|
||||
SGS ITALIA S.R.L.,,,,,Monaco,0,MC,
|
||||
"TATSUMI MARINE CO., LTD",,,,"TATSUMI BLDG. 3RD FLOOR 3-8-7, ITABASHI, CHIYODA-KU, TOYKO 102-0072",Tokyo,102-0072,JP,
|
||||
S5 ASIA LIMITED (THAILAND),,,,"21ST FLOOR, TIMES SQUARE BUILDING, 246 SUKHUMVIT ROAD, KLONGTOEY",Bangkok,10110,TH,
|
||||
OCEAN STAR,,,,,Monaco,0,MC,
|
||||
HICRI ERCILI DENIZCILIK AS,,,,"Balikesir Asfalti Sag Taraf Caddesi, 72, 600 Evler Mah",Bandirma,10200,TR,
|
||||
SUMITOMO CORPORATION ASIA,,,,"KUALA LUMPUR BRANCH, UBN TOWER, 35TH FLOOR, 10 JALAN P. RAMLEE, P.O. BOX 10297",Kuala Lumpur,50710,MY,
|
||||
YARA SWITZERLAND,,,,"ROUTE DU FLORISSANT 13, 1206 GENEVA, SWITZERLAND",Geneva,1206,CH,
|
||||
NEW GLORY SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
UNI-TANKERS A/S,,,,Turbinevej 10,Middelfart,5500,DK,
|
||||
SACONIX LLC,,,,"560 WEST CROSSVILLE ROAD, SUITE N° 204, ROSWELL, GA 30075, USA",Rosswell,GA 30075,US,
|
||||
"PAN PACIFIC COPPER CO., LTD.",,,,"1-2 OHTEMACHI 1-CHOME, CHIYODA-KU,",Tokyo,100-8147,JP,
|
||||
ZODIAC MARITIME LIMITED OF LONDON,,,,"PORTMAN HOUSE, 2 PORTMAN STREET",London,W1H 6DU,GB,
|
||||
UTKILEN A.S.,,,,,Monaco,0,MC,
|
||||
ULTRATANK,,,,,Monaco,0,MC,
|
||||
"TIANJIN JIAHENGYUAN INTL TRADE CO.,LTD",,,,Monaco,,NULL,CN,
|
||||
DUKSAN P&V CO LTD,,,,"2nd Floor, Dongailbo Building, 87, Jungang-daero, Jung-gu",Busan,48929,KR,
|
||||
GOLDEN MERCURY MARITIME S.A.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION THAILAND LTD.,,,,"20TH, 21ST & 22ND FLOOR UNIT 1, M. THAI TOWER, ALL SEASONS PLACE, 87 WIRELESS ROAD, LUMPINI, PHATUMWAN",Bangkok,10330,TH,
|
||||
SUMITOMO CORPORATION INDIA PVT LTD,,,,"OFFICE N° 1, UNIT B, 5TH FLOOR S-14, SOLITAIRE CORPORATE PARK 167, GURU HARGOBINDJI ROAD, CHAKALA, ANDHERI (EAST)",Mumbai,400 093,IN,
|
||||
YOUNG POONG CORPORATION,,,,"555, SUKPO-RI, SUKPO-MYUN, BONGHWA-GUN",Kyoungbuk,0,KR,
|
||||
BTS TANKERS PTE LTD,,,,,Monaco,0,MC,
|
||||
STELLA TANKER PTE LTD,,,,10 ANSON ROAD # 23-03,Singapore,079903,SG,
|
||||
GOLDEN DREAM MARITIME SA,,,,,Monaco,0,MC,
|
||||
JAL KUMUD SHIPPING IFSC PVT,,,,"Unit GA-20, Ground Floor, Pragya Accelerator Savvy Ats Group, Gift City, Gandhinagar,",Gujarat,382355,IN,
|
||||
MANGALORE CHEMICALS,,,,"UB TOWER, LEVEL 11, UB CITY, 24, VITTAL MALLYA ROAD, BENGALURU 560 001, INDIA",Bengaluru,560 001,IN,
|
||||
TATSUMI SHOKAI CO. LTD (ONAHAMA),,,,"312, ONAHAMA AZATAKAYAMA",Iwaki City,0,JP,
|
||||
CHEMSEA SHIPPING COMPANY LIMITED,,,,Rm 1510 Wing Tuck CommCtr 177-183 Wing Lok St Hong Kong,,NULL,HK,
|
||||
JEIL INTL CO LTD,,,,"RM 301, 3F SAMJUNG ENTERPRISE B/D, 5, JUNGANG-DAERO 775 BEON-GIL, BUNSANJIN-GU",Busan,0,KR,
|
||||
GLENCORE INTERNATIONAL AG,,,,"BAARERMATTSTRASSE 3, P.O. BOX 1363, 6341 BAAR, SWITZERLAND",Baar,6341,CH,
|
||||
MOL NORDIC TANKERS A/S,,,,,Monaco,0,MC,
|
||||
ESSEX SHIPPING,,,,"JUBILEE HOUSE, 3 THE DRIVE, GREAT WARLEY, BRENTWOOD ESSEX",Brentwood,CM13 3FR,GB,
|
||||
BANDIRMA GÜBRE FABRIKALARI A.S (C ),,,,SUSAM SOK. NO:22,Cihangir-Istanbul,34433,TR,
|
||||
SGS SWITZERLAND SA,,,,,Monaco,0,MC,
|
||||
"TOROS AGRI INDUSTRY AND TRADE CO., INC. (S)",,,,"TEKFEN TOWER,4. LEVENT",Istanbul,34394,TR,
|
||||
"WOOJIN SHIPPING CO.,LTD",,,,"KUNSHIN BLDG.NEW BLDGO #501, 16, SAMGAE-RO (250-4,DOHWA-DONG) MAPO-GU",Seoul,0,KR,
|
||||
TRF SHIP MANAGEMENT AS,,,,,Monaco,0,MC,
|
||||
AMERICAS EXPORT CORP.,,,,"P.O. BOX 3067, WEST PALM BEACH, FL 33402, FLORIDA, UNITED STATES",West Palm Beach,FL 33402,US,
|
||||
QUANG BINH IMPORT AND EXPORT JOINT STOCK,,,,"N° 23, BLOCK 01, AREA 97 BACH DANG, HA LY, HONG BANG DISTRICT, HAI PHONG CITY, VIETNAM",Hai Phong,0,VN,
|
||||
GROUPE CHIMIQUE TUNISIEN,,,,"7, RUE DU ROYAUME D'ARABIE-SAOUDITE, 1002 TUNIS BELVEDERE, TUNISIA",Tunis Belvedere,1002,TN,
|
||||
PARANAPANEMA S/A. (S),,,,"VIA DO COBRE N° 3700, AREA INDUSTRIAL OESTE (AIO) - COPEC, CEP 42850-00 DIAS D'AVILA",Bahia,CEP 42850-00,BR,
|
||||
NYRSTAR SALES & MARKETING AG,,,,TESSINERPLATZ 7,Zurich,8002,CH,
|
||||
IPRUDENTIAL SHIPPING AGENCY SERVICES INC,,,,"4TH FLOOR, KALAYAAN BLDG, SALCEDO CORNER DELA ROSA ST., LEGASPI VILLAGE, MAKATI VILLAGE, PHILIPPINES 1229",Makati Village,1229,PH,
|
||||
DAEHO SHIPPING CO LTD,,,,"Room 203(woolim bldg) 19, Daepyeong-ro 28beon-gil, Yeongdo-gu, Busan, Korea",,NULL,KR,
|
||||
SIETEMAR SA,,,,,Monaco,0,MC,
|
||||
WALLEM,,,,,Monaco,0,MC,
|
||||
BACONCO J-V COMPANY,,,,,Monaco,NULL,MC,
|
||||
"CORAL BAY NICKEL CORP. ",,,,"RIO TUBA EXPORT PROCESSING, ZONE RIO TUBA, BATARAZA, PALAWAN, PHILIPPINES 5306, TIN 005-961-540-00000",Palawan,5306,PH,
|
||||
ROSSING URANIUM,,,,"PRIVATE BAG 5005, SWAKOPMUNG, NAMIBIA",Swakopmung,0,NA,
|
||||
INCITEC LTD,,,,"LEVEL 8, 28 SOUTHBANK BOULEVARD, SOUTHBANK, VICTORIA 3006, AUSTRALIA",Victoria,3006,AU,
|
||||
TBD Supplier,,,,TBD,Monaco,TBD,MC,
|
||||
SATCO - Tampa Terminal,,,,,Tampa,NULL,US,
|
||||
NORFALCO LLC,,,,"100 KING STREET WEST, SUITE 6900, PO BOX 403, TORONTO, ON",Toronto,M5X 1E3,CA,
|
||||
INDIAN FARMERS FERTILIZER COOPERATIVE LTD,,,,"PARADEEP UNIT, VILLAGE MUSADIA, P. O. PARADEEP, JAGATSINGHPUR, ORISSA - 754142, INDIA, GST N° 21AAAAI0050M2Z6",Orissa,754142,IN,
|
||||
SUMITOMO CORPORATION AFRICA (GHANA),,,,"SILVER STAR TOWER, 8TH FLOOR, AIRPORT CITY, 8TH FLOOR",Accra,0,GH,
|
||||
SUMITOMO CORPORATION THAILAND,,,,"20TH, 21ST & 22ND FLOOR UNIT 1, M. THAI TOWER, ALL SEASONS PLACE, 87 WIRELESS ROAD, LUMPINI, PHATUMWAN",Bangkok,10330,TH,
|
||||
ORION REEDEREI GMBH & CO KG,,,,,Monaco,0,MC,
|
||||
SOCIEDAD CONTRACTUAL EL ABRA,,,,"CAMINO CONCHI VIEJO S/N, KM 75, CALAMA, CHILE",Calama,0,CL,
|
||||
POLYSERVE,,,,"22 SYRIA ST., MOHANDESIEEN,",Giza,0,EG,
|
||||
MT MARITIME MANAGEMENT USA LLC,,,,,Monaco,0,MC,
|
||||
SUN METALS CORPORATION PTY LTD,,,,1 ZINC AVENUE STUART QLD,STUART,4811,AU,
|
||||
"SINOTRANS DONGWAN WAREHOUSE LOGISTICS CO.,LTD.",,,,"ZHONGHUA RD, GANGKOU DISTRICT, FANGCHENGGANG",Guangxi,0,CN,
|
||||
INTERNATIONAL COBALT CO,,,,"P.O. BOX N° 7539, E. BAY STREET, NASSAU, BAHAMAS",Nassau,0,BS,
|
||||
GULF AGENCY COMPANY LIMITED,,,,,Monaco,0,MC,
|
||||
NIPPON MARINE CO. LTD,,,,,Monaco,0,MC,
|
||||
"Interacid Trading Chile ",,,,"Av. Isidora Goyenechea 3600, Of. 301",Santiago,NULL,CL,
|
||||
DORVAL SC TANKERS INC.,,,,"3RD FL KDX MONZENNAKACHO BLDG, 1-14-1, BOTAN, KOTO-KU",Tokyo,NULL,JP,
|
||||
Interacid Trading SA,,,,Av. des Baumettes 5,Renens,1020,CH,
|
||||
BHP BILLITON,,,,"CERRO EL PLOMO 6000, 18TH FLOOR, LAS CONDES, SANTIAGO, CHILE",Santiago,NULL,CL,
|
||||
ENEOS OCEAN CORPORATION,,,,"THE LANDMARK TOWER YOKOHAMA 48TH FLOOR 2-2-1, MINATOMIRAI, NISHI-KU",Yokohama,220-8148,JP,
|
||||
SEATRANS MARINE PRIVATE LIMITED,,,,"ABIR KUNJ, 158 RAJDANGA NABAPALLY",Kolkata,700107,IN,
|
||||
MAC SHIPPING MANAGEMENT PTE,,,,hk,Hong Kong,NULL,HK,
|
||||
NEXA RESOURCES CAJAMARQUILLA S.A.,,,,"CAR. CENTRAL N° 9.5 CAJAMARQUILLA (CARR. CENTRAL KM 9.5 DESVIO A HUACHIPA), LIMA, LURIGANCHO",Lurigancho,0,PE,
|
||||
SUMITOMO CORPORATION - ACIDS,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
OCEAN FAVOR SHIPPING (SHANGHAI) LTD,,,,"ROOM 517 A BUILDING 3 NO 2588, SOUTH LIANHUA ROAD MINHANG DISTRICT",Shanghai,0,CN,
|
||||
FUJI LAVA MARITIME LTD,,,,,Monaco,0,MC,
|
||||
"CENTRAL ROMANA CORP. LTD. ",,,,"C/O AMERICAS EXPORT COPR, P.O. BOX 3067, WEST PALM BEACH, FLORIDA 33401-3067",West Palm Beach,33401-3067,US,
|
||||
THE BTTCO OVERSEAS,,,,"B-92, OM TOWERS, GHODA CAMP ROAD, SHAHIBAUG",Ahmedabad,380 004,IN,
|
||||
BEYKIM DENIZCILIK GEMI,,,,"IC Kapi 142, Blok 2, Maslak A Blok, AOS 55 Sokak, Maslak Mah, 42, SariyeR",Istanbul,NULL,TR,
|
||||
YARA BRAZIL,,,,"AV. ALMIRANTE MAXIMIANO FONSECA 2001, CEP 96204-040, RIO GRANDE, RS, BRASIL",Rio Grande,CEP 96204-040,BR,
|
||||
SAEHAN MARINE CO LTD (BUSAN),,,,"12TH FLOOR, ANNJAY TOWER 208, TEHERAN-RO, GANGNAM-GU, SEOUL, 06220, KOREA",Seoul,6220,KR,
|
||||
GOLDEN STENA BAYCREST TANKERS PTE LTD,,,,"108 PASIR PANJANG ROAD, #04-15 GOLDEN AGRI PLAZA",Singapore,118535,SG,
|
||||
DUCGIANG CHEMICAL AND DETERGENT,,,,"TANG LOONG INDUSTRIAL ZONE, TANG LOONG, BAO THANG, LAO CAI, VIETNAM",Lao Cai,NULL,VN,
|
||||
VISIONCHEM,,,,"#1515 SAMKOO BUILDING, 40 CHEONGPA-RO, YONGSAN-GU",Seoul,4373,KR,
|
||||
"Shandong Xiangying Chemical Import and Export CO.,LTD.",,,,"715, Tower A, world trade center, Yantai, Handong province,",,NULL,CN,
|
||||
SUMITOMO MYANMAR,,,,"#208~213, 2ND FLOOR, PRIME HILL BUSINESS SQUARE, NO.60 SHWE DAGON PAGODA ROAD, DAGON TOWNSHIP",Yangon,0,MM,
|
||||
WILSON INTERNATIONAL TRADING PRIVATE LTD,,,,"8 Temasek Boulevard 17-02/03 Suntec Tower 3 ",Singapore,038988,SG,
|
||||
PANOLI INTERMEDIATES (INDIA) PVT. LTD.,,,,"'Sara Niwas', 20-21, Harinagar Co.Op. Society, Gotri Road",Vadodara,39007,IN,
|
||||
DHL EXPESS,,,,,Monaco,NULL,MC,
|
||||
"HIBI KYODO SMELTING CO.,LTD.",,,,"1-11-1 OSAKI , SHINAGAWA-KU",Tokyo,141-8584,JP,
|
||||
STERLITE INDUSTRIES,,,,"SIPCOT INDUSTRIAL COMPLEX, MADURAI BYPASS ROAD, THOOTHUKUDI (TAMIL NADU)",Tuticorin,628002,IN,
|
||||
TAIHEI KAIUN KK,,,,,Monaco,0,MC,
|
||||
PT QMB NEW ENERGY MATERIALS,,,,"Sopo Del Office Tower Lantai 22, Unit A, Jalan Mega Kuningan Barat III Lot 10.1-6 Kawasan Mega Kuningan, Kota Adm. Jakarta Selatan, Provinsi DKI Jakarta, Kode Pos: 12950, Indonesia",Jakarta,12950,ID,
|
||||
GLENCORE HOLDING AG,,,,"BAARERMATTSTRASSE 3, P.O. BOX 1363",Baar,6340,CH,
|
||||
"SHANGHAI SUMITOMO CORPORATION CO.,LTD.",,,,"10F, SHANGHAI WORLD FINANCIAL CENTER, 100 CENTURY AVENUE, PUDONG NEW AREA",Shanghai,200120,CN,
|
||||
"INEOS SULPHUR CHEMICALS SPAIN, S.L.U.",,,,"DIQUE DE ZIERBENA, MUELLE AZ-1",Zierbena-Bizkaia,48508,ES,
|
||||
CIECH S.A,,,,UL. WSPOLNA,Warsaw,00-684,PL,
|
||||
SUMITOMO AUSTRALIA,,,,"LEVEL 21, 101 COLLINS STREET, MELBOURNE VIC 3000, AUSTRALIA",Melbourne,VIC 3000,AU,
|
||||
GOLDEN JUPITER NAVIGATION S.A.,,,,,Monaco,0,MC,
|
||||
"Agrifields DMCC, Dubai",,,,"3201, JBC4, Cluster N, Jumeirah Lake Towers",Dubai,NULL,AE,
|
||||
"CVCI ",,,,"AVENUE D'OUCHY 47, 1006 LAUSANNE, SWITZERLAND",Lausanne,CH-1006,CH,
|
||||
"PAN OCEAN CO., LTD",,,,"Tower 8, 7, Jong-ro 5-Gil, Jongro-Gu, Seoul, Korea (Rep of) Korea, Republic Of ",Seoul,NULL,KR,
|
||||
SGS-CSTC STANDARDS TECNICAL SERVICES CO. LTD,,,,,Monaco,0,MC,
|
||||
SINOCHEM INTERNATIONAL CORP.,,,,,Monaco,0,MC,
|
||||
"INFICESS SHIPPING CO., LTD.",,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION VIETNAM LLC (HA),,,,"9TH FLOOR, CORNERSTONE BLDG, 16 PHAN CHU TRINH STREET, HOAN, KIEM DISTRICT",Hanoi,0,VN,
|
||||
SUMITOMO CORPORATION - NO OR LIMITED COMMISSION,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
SEASTAR MARITIME CO. LTD,,,,"Room 2112, Techno-Mart 21 Bldg, 85 Gwangnaru-Ro 56 Gil, Gwangjin-Gu",Seoul,NULL,KR,
|
||||
NFC PUBLIC COMPANY LTD - MAHACHAI,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
INTERACID TRADING (CHILE) S.A.,,,,ISIDORA GOYENECHEA NR. 3600 - OFFICE 301 LAS CONDES,Santiago,,CL,
|
||||
J.M. BAXI & CO,,,,"GODREJ COLISEUM, 8TH FLOOR, 801-C WING, EVERARD NAGAR, SION (EAST),",Mumbai,400022,IN,
|
||||
KONA MARITIME S.A.,,,,,Monaco,0,MC,
|
||||
NewChem Consulting AG,,,,Waldeggweg 6A 6318 Walchwil Switzerland,Walchwil,6318,CH,
|
||||
TAGANITO HPAL NICKEL CORPORATION,,,,"TAGANITO SPECIAL ECONOMIC ZONE (TSEZ) BARANGAY TAGANITO, CLAVER, SURIGAO DEL NORTE, PHILIPPINES 8410",Surigao Del Norte,8410,PH,
|
||||
KHANG TRUNG HIEU CO. LTD,,,,"N° 12/17, VO THI SAU STREET, QUYET THANG WARD, BIEN HOA CITY, DONG NAI PROVINCE, VIETNAM",Dong Nai Province,0,VN,
|
||||
INFICESS SHIPPING CO. LTD.,,,,,Monaco,0,MC,
|
||||
SRF LIMITED,,,,"D II/I, GIDC PCPIR, GIDC PHASE II, TAL VAGRA, VILLAGE DAHEJ, BHARUCH, GUJARAT 392130, INDIA",Gujarat,392130,IN,
|
||||
FLYTE YANGON SA,,,,"23rd Floor, MMG Tower, Paseo del Mar y Boulevard Pacific, Costa del Este",Panama City,NULL,PA,
|
||||
ASIA CHEMICAL TANKER ALLIANCE PTE LTD,,,,"6 Temasek Boulevard #44-01 Suntec Tower Four ",Singapore,038986,SG,
|
||||
TATSUMI SHOKAI (NAOSHIMA/HIBI),,,,"TATSUMI SHOKAI BLDG 3FL 4-1-1, CHIKKO, MINATO-KU",Osaka,552-0021,JP,
|
||||
COROMANDEL INTERNATIONAL LTD,,,,"COROMANDEL HOUSE, 1-2-10, SARDAR PATEL ROAD, SECUNDERABAD, 500003, INDIA",Secunderabad,50003,IN,
|
||||
GLENCORE CHILE SPA,,,,"AV. COSTANERA SUR 2730 OF. 1701, PISO 17, LAS CONDES, SANTIAGO, CHILE",Santiago,0,CL,
|
||||
NIPPON MARINE CO LTD,,,,966-15 OITA CITY,Saganoseki,879-2201,JP,
|
||||
SGS ESPANOLA DE CONTROL,,,,,Monaco,0,MC,
|
||||
LISBON SHIPPING LTD,,,,"Room 550, 5th Floor, Zonghe Lou, 385, Chaoyangshan Lu, Huangdao Qu, Qingdao",Qingdao,266400,CN,
|
||||
FEDERAL EXPRESS (FEDEX/TNT),,,,,Monaco,NULL,MC,
|
||||
SAI SULPHONATES PCT LTD,,,,"21, C. R. AVENUE, WHITE HOUSE, 2ND FLOOR, 700 072 KOLKATA, INDIA",Kolkata,700 072,IN,
|
||||
MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,,,,"5 SHENTON WAY, #15-01",Singapore,068808,SG,
|
||||
SONGA SHIPMANAGMENT LTD,,,,,Monaco,0,MC,
|
||||
NORTON LILLY INTERNATIONAL,,,,433 CALIFORNIA ST,San Francisco,94104,US,
|
||||
Xingtong Shipping (Singapore) Pte Ltd,,,,TEMASEK BOULEVARD #24-05 SUNTEC TOWER FOUR SINGAPORE Tel: +65 9298398 Email: chartering@xtshipping.com,Singapore,038986,SG,
|
||||
GUJARAT FLUOROCHEMICALS LIMITED,,,,"ABS TOWERS, 2ND FLOOR, OLD PADRA ROAD, VADODARA 390007, GUJARAT, INDIA",Vadodara,390007,IN,
|
||||
CELSIUS SHIPPING APS,,,,,Monaco,0,MC,
|
||||
JEIL INTERNATIONAL CO. LTD.,,,,,Monaco,0,MC,
|
||||
TIMAC AGRO IRELAND LIMITED,,,,"4 & 5, PRIORITY COURT THE QUAY, NEW ROSS, CO WEXFORD, IRELAND",Wexford,Y34 HV25,IE,
|
||||
SUMITOMO CORPORATION ASIA & OCEANIA,,,,"35TH FLOOR UBN TOWER, 10 JALAN P RAMLEE",Kuala Lumpur,50250,MY,
|
||||
"INTERACID NORTH AMERICA, INC (Beaumont)",,,,"560 WEST CROSSVILLE ROAD, SUITE N° 204, ROSWELL, GA 30075, USA",Roswell,30075,US,
|
||||
NFC PUBLIC COMPANY LTD - UCHA,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
NUOVA SOLMINE S.P.A.,,,,VIA NUOVA VIGNOLE 38,Serravalle Scrivia AL,15069,IT,
|
||||
LUMUT VENTURE SDN BHD,,,,,Monaco,0,MC,
|
||||
CONE SUL AGENCIA DE NAVEGACAO LTDA,,,,,Monaco,NULL,MC,
|
||||
SUMITOMO CORPORATION (SHANGHAI) LTD,,,,"10F,Shanghai World Financial Center,100 Century Avenue, Pudong New Area, Shanghai ",Shanghai,NULL,CN,
|
||||
MITSUBISHI MATERIALS CORPORATION METALS COMPANY,,,,"11F KEIDANRENKAIKAN, 1-3-2,OHTEMACHI,CHIYODA-KU",Tokyo,100-8117,JP,
|
||||
CREDIT SUISSE (SWITZERLAND) SA,,,,1201 Geneva,Geneva,NULL,CH,
|
||||
"INTERACID NORTH AMERICA, INC (Tampa)",,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
PT PUPUK ISKANDAR MUDA (PIM),,,,JL. MEDAN - BANDA ACEH PO BOX 21,,NULL,ID,
|
||||
"FURUKAWA METALS & RESOURCES CO., LTD.",,,,"2-3 MARUNOUCHI, 2 - CHOME, CHIYODA-KU",Tokyo,0,JP,
|
||||
"SUNWOO TANKER CO. LTD, KOREA",,,,"MI-WON BLDG ROOM 1702, 70 GUKJEGEUMYUNG-RO, YEONGDEUNGPO-GU",Seoul,NULL,KR,
|
||||
SATCO SULPHURIC ACID WC,,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL ",Stockton,33610,US,
|
||||
SUMITOMO CORPORATION VIETNAM LLC (HM),,,,"LANDMARK BUILDING, 6TH FLOOR, 5B TON DUC THANG, BEN NGHE WARD, DISTRICT 1,",Ho Chi Minh,0,VN,
|
||||
ODFJELL TANKERS AS,,,,CONRAD MOHRS VEG 29,Bergen,5072,NO,
|
||||
"SUMITOMO METAL MINING CO., LTD.",,,,"3-5-3, NISHIBARA-CHO, NIIHAMA",EHIME,792-8555,JP,
|
||||
GANSU YONGQI INDUSTRY AND TRADE. CO. (S),,,,"NORTH OF YANAN ROAD, WEST OF HEYA ROAD, JINCHUAN DISTRICT, GANSU PROVINCE,",Jinchang City,0,CN,
|
||||
UPS UNITED PARCEL SERVICE (SCHWEIZ) AG,,,,,Monaco,0,MC,
|
||||
HANSA TANKERS AS OF BERGEN,,,,KALFARVEIEN 57A,Bergen,5022,NO,
|
||||
SC SHIPPING SINGAPORE PTE LTD,,,,,Monaco,0,MC,
|
||||
FOSPHORY SP Z.O.O,,,,IL. KUJAWSKA 2,Gdansk,80-550,PL,
|
||||
VELOCITE SARL,,,,"RUE DR CESAR-ROUX 29, 1003 LAUSANNE, SWITZERLAND",Lausanne,1003,CH,
|
||||
TACHIBANAYA CO. LTD.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION TAIWAN LTD,,,,"SUMITOMO-FLYSUN BLDG. 8TH FL., NO. 260, TUN HWA N. ROAD",Taipei,NULL,TW,
|
||||
TAIKO MARKETING SDN,,,,"B2-6-01, BLOK B2, MERITUS @ OASIS CORPORATE N° 2, JALAN PJU 1A/2, ARA DAMANSARA, 47301 PETALING JAYA, SELANGOR, MALAYSIA",Petaling Jaya,47301,MY,
|
||||
"SAEHAN MARINE CO, LTD (MARINE)",,,,10 ANSON ROAD #26-02 INTERNATIONAL PLAZA,Singapore,79903,SG,
|
||||
DILMAS CO LTD,,,,"ul Pogranichnaya 72, Nakhodka, Primorskiy kray, 692910, Russia.",Primorsk,692910,RU,
|
||||
SAYBOLT POLAND SP. Z O.O.,,,,1 PODLASKA STR.,Gdynia,81-325,PL,
|
||||
AKR SEA TRANSPORT PT,,,,"Lot 7, Wisma AKR, Jalan Panjang 5, Kec Kebon Jeruk",Jakarta,11530,ID,
|
||||
NAVIG8 CHEMICAL POOLS INC.,,,,"3 TEMASEK AVENUE, #25-01 CENTINNIAL TOWER",Singapore,039190,SG,
|
||||
RAETSMARINE INSURANCE B.V.,,,,,Monaco,0,MC,
|
||||
AR SAVAGE AND SON,,,,"202 SOUTH ROME AVE, SUITE 200",Tampa,33606,US,
|
||||
MARCOBRE S.A.C.,,,,"Giovanni Batista Lorenzo Bernini 149, Piso 3, Oficina 301, San Borja, Lima, Peru",Lima,NULL,PE,
|
||||
LS-NIKKO COPPER INC.,,,,"15FI., (LS YONGSAN TOWER), 92, HANGANG-DAERO, YONSAN-GU",Seoul,4386,KR,
|
||||
JLS MONKEY CO LTD,,,,"Workshop B, 3rd Floor, Manning Industrial Building, 116-118, How Ming Street, Kwun Tong, Kowloon",Hong Kong,NULL,CN,
|
||||
SUMITOMO CORPORATION - FURUKAWA,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
CHEMMASTER TANKERS COMPANY LIMITED,,,,"Room 1510,wing Tuck Commerical Centre, 177-183 Wing Lok Street ",Hong Kong,NULL,HK,
|
||||
BTM MUFG Bank Ltd,,,,"Ropemaker Place, 25 Ropemaker Street London EC2Y 9AN, United Kingdom",London,EC2Y 9AN,GB,
|
||||
PT PETROKIMIA (PKG),,,,"JI. JEND. AHMAD YANI, GRESIK 61119",Gresik,61119,ID,
|
||||
"Sulfuric Acid Trading Company , Inc (SATCO)",,,,3710 Corporex Park Drive - Suite 205,Tampa,NULL,US,
|
||||
TWO LIONS ZHANGJIAGANG,,,,DONGHUA ROAD YANGTZE RIVER INTERNATIONAL CHEMICAL INDUSTRIAL PARK,Zhangjiagang,0,CN,
|
||||
ACE TANKERS MANAGEMENT B.V.,,,,STRAWINSKYLAAN 1057,Amsterdam,STRAWINSKYLAAN 1057,NL,
|
||||
PHILIPPINE ASSOCIATED SMELTING AND REFINING CORP.,,,,"LEYTE INDUSTRIAL DEVELOPMENT ESTATE, ISABEL",Leyte,0,PH,
|
||||
SC SHIPPING SINGAPORE PTE LTD,,,,"6 Temasek Blvd #22/04-05 Suntec Tower 4 ",Singapore,038986,SG,
|
||||
TBD Shipowner,,,,TBD,Monaco,TBD,MC,
|
||||
HICRI ERCILI,,,,600 EVLER MAHALLESI BALIKESIR ASFALTI SAĞ TARAF NO: 72,Bandirma,10200,TR,
|
||||
BARWIL AGENCIES LTD,,,,,Monaco,NULL,MC,
|
||||
FERTILORE SAS,,,,"40 TER AVENUE DE SUFFREN, 75015 PARIS, FRANCE",Paris,75015,FR,
|
||||
FLYTE MANILA SA,,,,"23rd Floor, MMG Tower, Paseo del Mar y Boulevard Pacific, Costa del Este",Panama City,NULL,PA,
|
||||
BOLIDEN MINERAL AB,,,,,Skelleftehamn,932 81,SE,
|
||||
SHOKUYU TANKER CO. LTD.,,,,,Monaco,0,MC,
|
||||
"ATLANTIC COPPER ",,,,"P° DE LA CASTELLANA, 95 - 21ST FLOOR",Madrid,28046,ES,
|
||||
MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,,,,"5 SHENTON WAY, #15-01",Singapore,068808,SG,
|
||||
TATSUMI MARINE (S) PTE LTD,,,,,Singapore,NULL,SG,
|
||||
SUMITOMO CORPORATION DIS TICARET A.S.,,,,"Ferko Plaza, Esentepe district, Büyükdere Street No.175, A Blok 20th Floor, Şişli",Istanbul,NULL,TR,
|
||||
Scrap Co. Ltd,,,,Blvd des Moulins,Monaco,NULL,MC,
|
||||
"FOCUS SHIPPING CO.,LTD",,,,12/F 3 LOCKHART RD. WANCHAI,Hong Kong,0,HK,
|
||||
NAMHAE CHEMICAL CORP,,,,"1506, 17, Seobinggo-ro, Yongsan-gu",Seoul,04387,KR,
|
||||
SHANGHAI ORIENT INTERTEK TESTING SERVICES CO.,,,,,Monaco,0,MC,
|
||||
MARNAVI SHIPPING MANAGEMENT PVT LTD,,,,,Monaco,0,MC,
|
||||
SGS TESTING KOREA CO. LTD.,,,,,Monaco,0,MC,
|
||||
FERALCO (UK) LTD,,,,"DITTON ROAD, WIDNES, CHESHIRE WA 8 OPH, ENGLAND",Cheshire,WA 8 OPH,GB,
|
||||
ACE QUANTUM CHEMICAL TANKERS CV,,,,,Monaco,0,MC,
|
||||
HONGKONG ZHOUSHAN YIHAI SHIPPING CO. LTD.,,,,,Monaco,0,MC,
|
||||
AET SHIP MANAGEMENT (M) SDN. BHD.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD - PROMMITR,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
SUMIMA SDN BHD,,,,,Monaco,0,MC,
|
||||
"TOROS AGRI INDUSTRY AND TRADE CO, INC. (C )",,,,"TEKFEN TOWER,4. LEVENT",Istanbul,34394,TR,
|
||||
"TRICON ENERGY, LTD",,,,"1500 Post Oak Blvd., 18th Floor, Houston, Texas 77056 USA",Houston,77056,US,
|
||||
HUTAMA TRANS KONTINENTAL PT,,,,"Lantai 26, Mangkuluhur City Tower One, Jalan Jendral Gatot Subroto Kav 1-3, Kel Karet Semanggi",Jakarta,12930,ID,
|
||||
GRACEFUL STAR SHIPPING CO LTD,,,,"Room 2109, 21st Floor, C C Wu Building, 302-308, Hennessy Road, Wan Chai, Hong Kong, China.",Hong Kong,NULL,CN,
|
||||
MAADEN PHOSPHATE COMPANY,,,,"P.O. Box 11110, Ras Al Khair, Industrial City",Jubail,31961,SA,
|
||||
SUMITOMO CORP. ASIA,,,,"KUALA LUMPUR BRANCH, UBN TOWER, 35TH FLOOR, 10 JALAN P. RAMLEE, P.O. BOX 10297, 50710 KUALA LUMPUR, MALAYSIA",Kuala Lumpur,50710,MY,
|
||||
"COOGEE CHEMICALS PTY LTD ",,,,"CNR PATTERSON & KIWANA BEACH ROADS, KWINANA, P.O. BOX 5051 ROCKINGHAM BEACH, WA 6969, AUSTRALIA",Brand,6969,AU,
|
||||
MAKINO KAIUN CO LTD & MAKI OCEAN SHIPPING SA,,,,,Monaco,0,MC,
|
||||
PROCYON SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
VENATOR ASIA SDN BHD (EX HUNTSMAN),,,,"KAWASAN INDUSTRI TELUK KALUNG, P.O. BOX 29, KEMAMA 24007, TERENGGANU, MALAYSIA",Kemaman,24000,MY,
|
||||
WOMAR LOGISTICS PTE LTD,,,,"8 TEAMASEK BOULEVARD, #22-06, SUNTEC TOWER 3",Singapore,038988,SG,
|
||||
EASTERN TANKERS CO. LTD.,,,,,Monaco,0,MC,
|
||||
AS INVENTOR SHIPPING,,,,,Monaco,0,MC,
|
||||
HICRI ERCILI DENIZCILIK AS,,,,,Monaco,0,MC,
|
||||
BAO PHUNG COMPANY,,,,,Monaco,NULL,MC,
|
||||
CIE MARITIME BELGE SA,,,,,Monaco,0,MC,
|
||||
Accounting Matching,,,,internal,,NULL,CH,
|
||||
IND-AUST MARITIME PVT LTD,,,,"715, JK Chambers, Sector 17, Vashi, Navi Mumbai, Maharashtra",,400703,IN,
|
||||
SAG Surveyors Alliance Group,,,,"Calle Bolívar 472 Of 705, Miraflores – Lima 18, Perú",Lima,0,PE,
|
||||
STAINLESS TANKERS INC C/O WOMAR LOGISTICS PTE LTD,,,,,Monaco,0,MC,
|
||||
HINDALCO INDUSTRIES LTD,,,,"P.O. DAHEJ, DIST : BHARUCH, PIN CODE",Gujarat,392 130,IN,
|
||||
INCHCAPE SHIPPING SERVICES (JAPAN) LTD.,,,,"HAMAMATSUCHO BLDG. 6F, 1-1-1, SHIBAURA, MINATO-KU",Tokyo,105-0023,JP,
|
||||
GOLDEN FORTUNE SHIPHOLDING S.A.,,,,,Monaco,0,MC,
|
||||
SU NAVIGATION PTE. LTD,,,,"200 CANTONMENT ROAD, #14-04 SOUTHPOINT,",Singapore,089763,SG,
|
||||
PEREZ Y CIA MADRID,,,,FORTUNY 9,Madrid,28010,ES,
|
||||
INTER QUIMICA S.A.,,,,"AV. SAN MARTIN 209, EDIF. JARABA IMPORT, SANTO DOMINGO, DOMINICAN REPUBLIC",Santo Domingo,0,DO,
|
||||
"B&M AGENCIA MARITIMA S.A. ",,,,AVENIDA ANDALICAN 881,Mejillones,NULL,CL,
|
||||
PROQUIGEL QUIMICA S.A.,,,,"FAZENDA CAROBA S/N, 43.813-300 CANDEIAS, BA, BRASIL",Candeias,43.813-300,BR,
|
||||
SUMITOMO CORPORATION MANILA,,,,,Manila,0,PH,
|
||||
TIOXIDE (MALAYSIA) SDN BHD,,,,"KAWASAN INDUSTRI TELUK KALUNG, P.O. BOX 29, KEMAMA 24007, TERENGGANU, MALAYSIA",Terengganu,24007,MY,
|
||||
DAITOH TRADING CO. LTD.,,,,,Monaco,0,MC,
|
||||
TBD Customer,,,,TBD,Monaco,TBD,MC,
|
||||
BETTY MILD MARITIME S.A,,,,,Monaco,0,MC,
|
||||
IINO KAIUN KAISHA LTD OF TOKYO.,,,,"IINO BUILDING, 2-1-1 UCHISAIWAICHO, CHIYODA-KU",Tokyo,100-0011,JP,
|
||||
IINO SINGAPORE PTE. LTD,,,,168 ROBINSON ROAD #13-02 CAPITAL TOWER,Singapore,068912,SG,
|
||||
"JINLONG COPPER CO., LTD",,,,"1, Jinshan West Road Tongling Anhui",,244021,CN,
|
||||
MOSAIC FERTILIZANTES BRAZIL,,,,"AV. ROQUE PETRONI JUNIOR 999, 14° ANDAR BROOKLYN, CEP 04707-910 SAO PAULO, BRAZIL",Sao Paulo,CEP 04707-910,BR,
|
||||
|
@@ -0,0 +1,77 @@
|
||||
source_id,source_line_id,number,reference,our_reference,party_name,currency_code,purchase_date,payment_term,warehouse_code,weight_basis,tol_min_pct,tol_max_pct,tol_min_qty,tol_max_qty,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,origin,line_quantity,line_unit_code,line_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date,trader,operator,concentration,book,strategy,period_at,demurrage,laytime_hours,nor_extra_hours,pumping_rate,use_only_min_max,drop_remaining_quantity
|
||||
C00E4A1E-4743-4C73-8F92-FEA123D2DCCE,C00E4A1E-4743-4C73-8F92-FEA123D2DCCE,1761,24972A,S24.025,SUMITOMO CORPORATION - MMC,USD,5/15/2024,NET 30,,NCSW,0,0,5000,5000,Onahama,Map Ta Phut,FOB,manual,,1761 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,33.36,5000 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
A65BA960-E7EC-44FD-A945-8554C8D9B89B,A65BA960-E7EC-44FD-A945-8554C8D9B89B,1849,24976,P25.001,ATLANTIC COPPER ,USD,11/1/2024,NET ,,NCSW,5,5,18050,19950,Huelva,Tampa,FOB,manual,,1849 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,95,19000 Mt of sulphuric acid - Tel-quel,4/15/2025,4/21/2025,bldate,4/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
D890D3AF-EB6C-4B06-9F20-E8C92D2514D1,D890D3AF-EB6C-4B06-9F20-E8C92D2514D1,1866,24977,P25.004/1 & P25.012,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,NCSW,5.08,5.08,14000,15500,Callao,Mejillones,CIF,manual,,1866 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14750,Mt,148.17,14750 Mt of sulphuric acid - Tel-quel,4/18/2025,4/28/2025,bldate,4/18/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
0ED2A6A8-16DD-4CD5-8114-EEE96F711225,0ED2A6A8-16DD-4CD5-8114-EEE96F711225,1867,24991,P25.004/2,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,NCSW,5.08,5.08,14000,15500,Callao,Mejillones,CIF,manual,,1867 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,7/1/2025,7/10/2025,bldate,7/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
AA9BF6DB-3C96-491D-B0E8-3C22C3FE13BB,AA9BF6DB-3C96-491D-B0E8-3C22C3FE13BB,1868,25000,P25.004/3,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 15,,NCSW,5.08,5.08,14000,15500,Callao,Mejillones,CIF,manual,,1868 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,8/20/2025,8/30/2025,bldate,8/20/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
7F86EA2E-2409-4A59-A09E-0B3B0B3C0FFE,7F86EA2E-2409-4A59-A09E-0B3B0B3C0FFE,1869,25016,P25.004/4,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,NCSW,5.08,5.08,14000,15500,Callao,Mejillones,CIP,manual,,1869 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,12/3/2025,12/13/2025,bldate,12/3/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
3B181AD5-AEF1-4DD1-B3BF-835DCAA3B460,3B181AD5-AEF1-4DD1-B3BF-835DCAA3B460,1876,24971AB,P25.005 - 120kt,JINCHUAN,USD,12/5/2024,NET ,,NCSW,10,10,27000,33000,Fangcheng,Mejillones,FOB,manual,,1876 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,40,30000 Mt of sulphuric acid - Tel-quel,4/13/2025,5/2/2025,bldate,4/13/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,0,1
|
||||
87D65A8B-E366-4F61-A684-F16A9E380D6D,87D65A8B-E366-4F61-A684-F16A9E380D6D,1877,24986ABC,P25.005 - 120kt,JINCHUAN,USD,12/5/2024,LC 30 DAYS,,NCSW,10,10,27000,33000,Fangcheng,Mejillones,FOB,manual,,1877 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,40,30000 Mt of sulphuric acid - Tel-quel,7/5/2025,7/16/2025,bldate,7/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,0,1
|
||||
F1B16A79-23DE-4BC3-AA05-98FF85120CE2,F1B16A79-23DE-4BC3-AA05-98FF85120CE2,1878,25020ABC,P25.005 - 120kt,JINCHUAN,USD,12/5/2024,NET ,,NCSW,0,0,27000,27000,Fangcheng,Mejillones,FOB,manual,,1878 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,27000,Mt,40,27000 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,1,1
|
||||
0079657A-2247-4D04-98F5-B111191D935A,0079657A-2247-4D04-98F5-B111191D935A,1881,24970,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,1881 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
FCB43E88-A643-422B-95B6-B68CE61C4015,FCB43E88-A643-422B-95B6-B68CE61C4015,1883,24979,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,1883 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,5/18/2025,5/31/2025,bldate,5/18/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
E5E96218-48BA-46E5-99C8-9E4086AF12A5,E5E96218-48BA-46E5-99C8-9E4086AF12A5,1885,24985,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,1885 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,6/15/2025,6/30/2025,bldate,6/17/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
AA912607-7B83-4DAC-A4EE-E9F24F55B58E,AA912607-7B83-4DAC-A4EE-E9F24F55B58E,1887,24996,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Hibi,Stockton,FOB,manual,,1887 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,8/15/2025,8/29/2025,bldate,8/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
EC24CFB1-7260-4963-A452-F9B4FA60CC0C,EC24CFB1-7260-4963-A452-F9B4FA60CC0C,1889,24989,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Hibi,Stockton,FOB,manual,,1889 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,7/12/2025,7/25/2025,bldate,7/12/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
E7B7D5E1-72E1-490B-85BE-13BC3849ECC2,E7B7D5E1-72E1-490B-85BE-13BC3849ECC2,1891,24999,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Hibi,Stockton,FOB,manual,,1891 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,9/7/2025,9/22/2025,bldate,9/7/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
A547D4C5-F163-4B27-9A88-F66175AEF3E6,A547D4C5-F163-4B27-9A88-F66175AEF3E6,1893,25003,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,1893 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,72,20000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
14292C24-948C-4E7A-8E9C-85F9A0890877,14292C24-948C-4E7A-8E9C-85F9A0890877,1895,25008,P25.006,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET ,,NCSW,10,10,18000,22000,Hibi,Stockton,FOB,manual,,1895 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,64,20000 Mt of sulphuric acid - Tel-quel,11/1/2025,11/30/2025,bldate,11/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
694E3179-4DC2-4C9B-B3D3-B436BA304F28,694E3179-4DC2-4C9B-B3D3-B436BA304F28,1904,24990AB,P25.009 - 60kt,JINCHUAN,USD,12/16/2024,NET ,,NCSW,10,10,27000,33000,Fangcheng,Mejillones,FOB,manual,,1904 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,35,30000 Mt of sulphuric acid - Tel-quel,9/10/2025,9/30/2025,bldate,9/10/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,0,1
|
||||
EA973053-2D21-4D6C-915D-0212F9C92E94,EA973053-2D21-4D6C-915D-0212F9C92E94,1912,24975,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET ,,NCSW,10,10,9000,11000,Rugao,Isabel,FOB,manual,,1912 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,65.63,10000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/7/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
24306B99-A9C3-4E9D-9961-2E2B98C515AB,24306B99-A9C3-4E9D-9961-2E2B98C515AB,1913,24987,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET ,,NCSW,0,10,10000,11000,YIZHENG,Gladstone,FOB,manual,,1913 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,73.5,10000 Mt of sulphuric acid - Tel-quel,6/8/2025,6/18/2025,bldate,6/8/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
446ABDCD-C09A-4911-88D3-4CD8BDECBBF9,446ABDCD-C09A-4911-88D3-4CD8BDECBBF9,1914,24992,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,NCSW,10,10,9000,11000,Rugao,Tuticorin,FOB,manual,,1914 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,86.5,10000 Mt of sulphuric acid - Tel-quel,7/18/2025,7/30/2025,bldate,7/18/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
38B61B69-789C-4D9A-8A37-1B475BE69C50,38B61B69-789C-4D9A-8A37-1B475BE69C50,1915,24992,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,LC 30 DAYS,,NCSW,10,10,9000,11000,Rugao,Tuticorin,FOB,manual,,1915 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,91.19,10000 Mt of sulphuric acid - Tel-quel,7/18/2025,7/30/2025,bldate,7/18/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
169FEC9E-34DD-488E-9323-7EBE720A641F,169FEC9E-34DD-488E-9323-7EBE720A641F,1916,25006,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET ,,NCSW,5,5,9500,10500,Rugao,Isabel,FOB,manual,,1916 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,91.81,10000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
F14935F1-2D50-4167-9823-2DA248C23D7C,F14935F1-2D50-4167-9823-2DA248C23D7C,1917,25022A,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET ,,NCSW,22.58,22.58,6000,9500,YIZHENG,Visakhapatnam,FOB,manual,,1917 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,7750,Mt,77.31,7750 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,1,1
|
||||
2A9D4883-8822-467D-8C1E-30ECC64C7000,2A9D4883-8822-467D-8C1E-30ECC64C7000,1919,24980ABC,P25.008,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET ,,NCSW,10,10,27000,33000,YIZHENG,Mejillones,FOB,manual,,1919 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,73.63,30000 Mt of sulphuric acid - Tel-quel,5/30/2025,6/15/2025,bldate,6/2/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
A7285649-0E96-4640-A23B-1876E403F95E,A7285649-0E96-4640-A23B-1876E403F95E,1920,24994,P25.008,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET ,,NCSW,10,10,18000,22000,YIZHENG,Tuticorin,FOB,manual,,1920 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,20000,Mt,92.94,20000 Mt of sulphuric acid - Tel-quel,8/16/2025,8/28/2025,bldate,8/16/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
66B2E095-336E-416C-AA63-55BDAAFAEE2C,66B2E095-336E-416C-AA63-55BDAAFAEE2C,1921,25001ABCD,P25.008,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET ,,NCSW,10,10,27000,33000,Rugao,Mejillones,FOB,manual,,1921 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,73.125,30000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
ADB3789C-0844-46D8-B24C-B92A5DCEF5D0,ADB3789C-0844-46D8-B24C-B92A5DCEF5D0,1925,24988,P25.010,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,NCSW,5.26,5.26,18000,20000,Pori,,FOB,manual,,1925 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19000,Mt,84.57,19000 Mt of sulphuric acid - 100%,6/25/2025,7/4/2025,bldate,6/25/2025,Jeremie Collot, ,100,H2SO4 FY 2025,Default,Laycan,,0,6,550,1,1
|
||||
C7B91036-E67E-49FA-AD5B-BBC5DA64E89F,C7B91036-E67E-49FA-AD5B-BBC5DA64E89F,1926,24997AB,P25.010,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,NCSW,5.26,5.26,18000,20000,Ronnskar,Beaumont,FOB,manual,,1926 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19000,Mt,88.91,19000 Mt of sulphuric acid - Tel-quel,8/22/2025,9/4/2025,bldate,9/3/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,550,1,1
|
||||
8C22A21C-C3D8-4C09-9440-274E0546196D,8C22A21C-C3D8-4C09-9440-274E0546196D,1928,25011ABCDE,P25.010,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,NCSW,5.26,5.26,18000,20000,Pori,Mejillones,FOB,manual,,1928 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,19000,Mt,63.49,19000 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,550,1,1
|
||||
23734B0E-5AC0-4364-8CBA-58505E3B33A1,23734B0E-5AC0-4364-8CBA-58505E3B33A1,1929,25005,P25.010,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET ,,NCSW,3.9,3.9,18500,20000,Ronnskar,Beaumont,FOB,manual,,1929 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19250,Mt,74.56,19250 Mt of sulphuric acid - Tel-quel,10/11/2025,10/20/2025,bldate,10/11/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,550,1,1
|
||||
EB9CBF03-3B8E-4453-B6DB-1A234C1C8D4E,EB9CBF03-3B8E-4453-B6DB-1A234C1C8D4E,1933,24984,,SUMITOMO CORPORATION - PPC,USD,1/6/2025,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Tuticorin,FOB,manual,,1933 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,42,20000 Mt of sulphuric acid - Tel-quel,6/9/2025,6/19/2025,bldate,6/12/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
E139786E-55C2-46AF-B4FA-0FB613FD14CD,E139786E-55C2-46AF-B4FA-0FB613FD14CD,1937,24983,P25.008,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,1/10/2025,NET ,,NCSW,3.3,3.3,11000,11750,YIZHENG,,FOB,manual,,1937 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,11375,Mt,72.19,11375 Mt of sulphuric acid - Tel-quel,5/15/2025,5/25/2025,bldate,5/15/2025,Stephane Monnard,Oliver Gysler,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,1,1
|
||||
C5047512-8D3A-4D0C-9187-51F361E8031F,C5047512-8D3A-4D0C-9187-51F361E8031F,1944,24974AB,P25.013,"TRICON ENERGY, LTD",USD,1/31/2025,NET 30,,NCSW,10.71,10.71,12500,15500,Ilo,Mejillones,CFR,manual,,1944 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14000,Mt,145.75,14000 Mt of sulphuric acid - Tel-quel,5/5/2025,5/11/2025,bldate,5/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
A78EF4D1-3579-4F0E-ABF6-484550F4361F,A78EF4D1-3579-4F0E-ABF6-484550F4361F,1949,24972B,S24.025,SUMITOMO CORPORATION - MMC,USD,2/26/2025,NET ,,NCSW,0,0,2700,2700,Onahama,Samut Prakan,FOB,manual,,1949 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2700,Mt,33.36,2700 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
730B0B84-3980-4A56-B259-BC24C3BE0766,730B0B84-3980-4A56-B259-BC24C3BE0766,1956,25002,P25.009 - 60kt,JINCHUAN,USD,3/19/2025,NET ,,NCSW,10,10,27000,33000,Fangcheng,Tuticorin,FOB,manual,,1956 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,30000,Mt,35,30000 Mt of sulphuric acid - Tel-quel,10/10/2025,10/30/2025,bldate,10/26/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,9,350,0,1
|
||||
4B20B434-6B9D-4A57-BFC2-2FD4EBB71919,4B20B434-6B9D-4A57-BFC2-2FD4EBB71919,1957,24981,P25.014,HEXAGON GROUP AG,USD,3/21/2025,NET ,,NCSW,22.33,22.33,10000,15750,Rugao,Mejillones,CFR,manual,,1957 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,,12875,Mt,156,12875 Mt of sulphuric acid - Tel-quel,4/28/2025,5/7/2025,bldate,5/1/2025,Jeremie Collot,Gregory Gondeau,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
1B11F28C-7AC5-4C89-8444-8E38D04A07C6,1B11F28C-7AC5-4C89-8444-8E38D04A07C6,1959,24978,P25.015,"TRICON ENERGY, LTD",USD,3/28/2025,NET ,,NCSW,10,10,17100,20900,Bandirma,Beaumont,CFR,manual,,1959 / H2SO4 FY 2025 / Default,line,H2SO4-Turkey,Turkey,19000,Mt,138.9,19000 Mt of sulphuric acid - Tel-quel,4/28/2025,5/8/2025,bldate,5/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
C7E0CFA0-05C9-441C-975F-EFABDDD93198,C7E0CFA0-05C9-441C-975F-EFABDDD93198,1962,24973B,P25.004/1,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,4/2/2025,NET ,,NCSW,0,0,20,20,Callao,Mejillones,CIF,manual,,1962 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,20,Mt,155,20 Mt of sulphuric acid - Tel-quel,3/20/2025,4/4/2025,bldate,4/3/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
4412C861-90D3-43D2-B37A-A4BD54DB7416,4412C861-90D3-43D2-B37A-A4BD54DB7416,1970,24982/B,S24.025,SUMITOMO CORPORATION - MMC,USD,4/17/2025,NET 7,,NCSW,0,0,2350,2350,Onahama,Samut Prakan,FOB,manual,,1970 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2350,Mt,45.25,2350 Mt of sulphuric acid - Tel-quel,5/9/2025,5/16/2025,bldate,5/9/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,0,300,1,1
|
||||
23B0BD08-70B2-476F-B9E2-A272FAE41199,23B0BD08-70B2-476F-B9E2-A272FAE41199,1972,24982/A,S24.025,SUMITOMO CORPORATION - MMC,USD,4/17/2025,NET ,,NCSW,0,0,5000,5000,Onahama,Map Ta Phut,FOB,manual,,1972 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,45.25,5000 Mt of sulphuric acid - Tel-quel,5/9/2025,5/16/2025,bldate,5/9/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,0,300,1,1
|
||||
9B44DBC7-6AC0-4D07-A413-A0FB29DF03ED,9B44DBC7-6AC0-4D07-A413-A0FB29DF03ED,1986,24993,P25.017,"TRICON ENERGY, LTD",USD,6/19/2025,NET ,,NCSW,10,10,4950,6050,Onsan,Mejillones,CFR,manual,,1986 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-South Korea,South Korea,5500,Mt,169,5500 Mt of sulphuric acid - Tel-quel,8/7/2025,8/15/2025,bldate,8/7/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
85206D6D-4FDD-4D89-BF1F-301EC6CD73F9,85206D6D-4FDD-4D89-BF1F-301EC6CD73F9,1987,25009,P25.008,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,6/24/2025,NET ,,NCSW,10,10,7200,8800,Rugao,LHOKSEUMAWE,FOB,manual,,1987 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,8000,Mt,77.31,8000 Mt of sulphuric acid - Tel-quel,11/5/2025,11/11/2025,bldate,11/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
D49AC946-9F56-4B82-A3A1-2D49F66A5E31,D49AC946-9F56-4B82-A3A1-2D49F66A5E31,1989,24998,P25.018,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 7,,NCSW,0,10,7000,7700,Onahama,LHOKSEUMAWE,FOB,manual,,1989 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,7000,Mt,64,7000 Mt of sulphuric acid - Tel-quel,8/21/2025,8/30/2025,bldate,8/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,0,1
|
||||
1C27B4DA-D386-40EC-8A18-C8BE1E5AB673,1C27B4DA-D386-40EC-8A18-C8BE1E5AB673,1991,25007A,P25.018,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 7,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,FOB,manual,,1991 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,51,5000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
6453C83B-FF65-4EFA-B123-341FA4B393B5,6453C83B-FF65-4EFA-B123-341FA4B393B5,1993,25013A,P25.018,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET ,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,FOB,manual,,1993 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,55.2,5000 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
64D5642D-2B3B-48FB-AE6C-EF7109D0000C,64D5642D-2B3B-48FB-AE6C-EF7109D0000C,1995,25024A,P25.018,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 7,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,FOB,manual,,1995 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,92,5000 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
1B4518DD-0C0E-43DC-AF81-D36A71801CA8,1B4518DD-0C0E-43DC-AF81-D36A71801CA8,2001,24995,P25.020,"TRICON ENERGY, LTD",USD,7/11/2025,NET 30,,NCSW,22.22,22.22,3500,5500,Zhangjiagang,Mejillones,CFR,manual,,2001 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,4500,Mt,171,4500 Mt of sulphuric acid - Tel-quel,8/19/2025,8/26/2025,bldate,8/19/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
13C5150B-0D4A-49E4-9903-98DD1DA22336,13C5150B-0D4A-49E4-9903-98DD1DA22336,2028,25004,P25.021,SAS International LLC,USD,8/29/2025,NET ,,NCSW,5,5,6650,7350,Rugao,Mejillones,CFR,manual,,2028 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,7000,Mt,150,7000 Mt of sulphuric acid - Tel-quel,10/5/2025,10/14/2025,bldate,10/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
88C62D2D-19AB-49B7-8284-E8C641C26F5A,88C62D2D-19AB-49B7-8284-E8C641C26F5A,2034,25022B,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,9/5/2025,NET ,,NCSW,10,10,8550,10450,YIZHENG,Kakinada,FOB,manual,,2034 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,9500,Mt,114.29,9500 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
CF3B673D-5B5E-4DA9-98BD-91CCDD586B34,CF3B673D-5B5E-4DA9-98BD-91CCDD586B34,2039,25015,P25.022,BOLIDEN HARJAVALTA OY,USD,9/26/2025,NET 30,,NCSW,5.26,5.26,18000,20000,Ronnskar,Beaumont,FOB,manual,,2039 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19000,Mt,65.83,19000 Mt of sulphuric acid - 100%,11/25/2025,12/4/2025,bldate,11/25/2025,Jeremie Collot, ,100,H2SO4 FY 2025,Default,Laycan,,0,6,550,1,1
|
||||
1E2CD211-032B-4746-A9BE-0CAB195653A3,1E2CD211-032B-4746-A9BE-0CAB195653A3,2040,25010,P25.023,"TRICON ENERGY, LTD",USD,9/26/2025,NET 30,,NCSW,5,5,18050,19950,Bandirma,Tampa,CFR,manual,,2040 / H2SO4 FY 2025 / Default,line,H2SO4-Turkey,Turkey,19000,Mt,102,19000 Mt of sulphuric acid - Tel-quel,10/15/2025,10/25/2025,bldate,10/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
9AD52EA8-B7A3-42FE-B6EA-AF8F9624514F,9AD52EA8-B7A3-42FE-B6EA-AF8F9624514F,2043,25007B,P25.018,SUMITOMO CORPORATION - MMC,USD,9/26/2025,NET 7,,NCSW,20,20,1600,2400,Naoshima,Samut Prakan,FOB,manual,,2043 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2000,Mt,51,2000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
F1121A5B-1CD9-4F69-9B1C-16AB21AE6566,F1121A5B-1CD9-4F69-9B1C-16AB21AE6566,2044,25007C,P25.018,SUMITOMO CORPORATION - MMC,USD,9/26/2025,NET 7,,NCSW,0,0,1000,1000,Naoshima,Samut Prakan,FOB,manual,,2044 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,1000,Mt,51,1000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
74CE5927-E06C-4EA6-81C3-FF56C560DA68,74CE5927-E06C-4EA6-81C3-FF56C560DA68,2047,25014,P25.024,HEXAGON GROUP AG,USD,9/26/2025,NET 20,,NCSW,5,5,18050,19950,Huelva,Tampa,CFR,manual,,2047 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,105.9,19000 Mt of sulphuric acid - Tel-quel,11/15/2025,11/20/2025,bldate,11/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
4CF28C0F-36D6-4C97-866B-C3132F69F3E4,4CF28C0F-36D6-4C97-866B-C3132F69F3E4,2050,25023,P26.001,ATLANTIC COPPER ,USD,10/3/2025,NET 15,,NCSW,5,5,18050,19950,Huelva,Beaumont,FOB,manual,,2050 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,84.5,19000 Mt of sulphuric acid - Tel-quel,1/13/2026,1/19/2026,bldate,1/13/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
627F6B99-510F-41D6-B5C1-CA37024A50E0,627F6B99-510F-41D6-B5C1-CA37024A50E0,2053,25018,P25.025,SUMITOMO CORPORATION - MMC,USD,10/16/2025,NET 7,,NCSW,10,10,17100,20900,Naoshima,Stockton,FOB,manual,,2053 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,19000,Mt,63,19000 Mt of sulphuric acid - Tel-quel,12/15/2025,12/24/2025,bldate,12/20/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
16F9F10A-D40D-42E9-BCA7-62DB34BFDDC3,16F9F10A-D40D-42E9-BCA7-62DB34BFDDC3,2057,25012,P25.026,HEXAGON GROUP AG,USD,10/27/2025,NET 5,,NCSW,0,0,6000,6000,Antwerpen,Mejillones,CFR,manual,,2057 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Switzerland,Switzerland,6000,Mt,159.59,6000 Mt of sulphuric acid - Tel-quel,11/10/2025,11/20/2025,bldate,11/10/2025,Jeremie Collot,Gregory Gondeau,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,0
|
||||
DD7221A8-0922-463D-9D0E-BA89E41882B2,DD7221A8-0922-463D-9D0E-BA89E41882B2,2058,25013B,P25.018,SUMITOMO CORPORATION - MMC,USD,10/29/2025,NET 7,,NCSW,0,0,3600,3600,Naoshima,Samut Prakan,FOB,manual,,2058 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,3600,Mt,55.2,3600 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
9616EB93-26B4-47D6-8D21-5B3A645496EE,9616EB93-26B4-47D6-8D21-5B3A645496EE,2060,25017,P25.027,SAS International LLC,USD,10/31/2025,NET ,,NCSW,5.26,5.26,9000,10000,Pori,Mejillones,CFR,manual,,2060 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,9500,Mt,165,9500 Mt of sulphuric acid - Tel-quel,12/15/2025,12/31/2025,bldate,12/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
EB771CB4-9D09-47EA-922B-54DA025BD67C,EB771CB4-9D09-47EA-922B-54DA025BD67C,2084,25019A,P25.028,"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",USD,11/17/2025,NET ,,NCSW,10,10,6750,8250,Taichung,Gladstone,FOB,manual,,2084 / H2SO4 FY 2025 / Default,line,H2SO4-Taiwan,Taiwan,7500,Mt,101,7500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,200,0,1
|
||||
02C94974-BCB9-4C4F-803D-CAB76EFED668,02C94974-BCB9-4C4F-803D-CAB76EFED668,2085,25026ABC,P26.003,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,11/18/2025,NET 30,,NCSW,5,5,16150,17850,Callao,Mejillones,CIF,manual,,2085 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,17000,Mt,171,17000 Mt of sulphuric acid - Tel-quel,1/29/2026,2/9/2026,bldate,1/29/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,0,1
|
||||
E0611902-DD4D-436C-8AA1-53E59FF7FA6C,E0611902-DD4D-436C-8AA1-53E59FF7FA6C,2093,25028ABC,P26.004,"Shandong Xiangying Chemical Import and Export CO.,LTD.",USD,12/1/2025,NET ,,NCSW,10,10,27000,33000,Laizhou,Mejillones,FOB,manual,,2093 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,30000,Mt,100,30000 Mt of sulphuric acid - Tel-quel,2/19/2026,2/24/2026,bldate,2/19/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,300,0,0
|
||||
D5B7A728-7CB8-4FF7-B946-EBB9970C52F5,D5B7A728-7CB8-4FF7-B946-EBB9970C52F5,2097,25024B,P25.018,SUMITOMO CORPORATION - MMC,USD,12/3/2025,NET 7,,NCSW,0,0,1899.669,1899.669,Naoshima,Samut Prakan,FOB,manual,,2097 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,1899.67,Mt,55.2,1899.67 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
E5CB510A-ACCB-4BC2-B657-7504DCDAC98D,E5CB510A-ACCB-4BC2-B657-7504DCDAC98D,2105,25024C,P26.011,SUMITOMO CORPORATION - MMC,USD,12/4/2025,NET 7,,NCSW,0,0,3600,3600,Naoshima,Samut Prakan,FOB,manual,,2105 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,3600,Mt,45,3600 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,1
|
||||
674FB2F0-A66B-4066-A80C-EAC3E4EDEBBE,674FB2F0-A66B-4066-A80C-EAC3E4EDEBBE,2107,MMC FEB LXML ,P26.011,SUMITOMO CORPORATION - MMC,USD,12/5/2025,NET ,,NCSW,0,0,5000,5000,Naoshima,Samut Prakan,FOB,manual,,2107 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,0,5000 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,0
|
||||
DD5A61C0-ABC7-48A6-BFD9-7BD2337779A1,DD5A61C0-ABC7-48A6-BFD9-7BD2337779A1,2109,MMC MAR MWA buy price for NFC,P26.011,SUMITOMO CORPORATION - MMC,USD,12/5/2025,NET ,,NCSW,0,0,2400,2400,,Samut Prakan,FOB,manual,,2109 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2400,Mt,0,2400 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,0
|
||||
C0B06F33-B62C-4935-B129-E77F9A223F9A,C0B06F33-B62C-4935-B129-E77F9A223F9A,2122,25021,P26.006,SUMITOMO CORPORATION - PPC,USD,12/10/2025,NET ,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,2122 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,71,20000 Mt of sulphuric acid - Tel-quel,1/7/2026,1/21/2026,bldate,1/7/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
0DA01A1B-5718-42A1-8C2B-8FB010DFDC53,0DA01A1B-5718-42A1-8C2B-8FB010DFDC53,2124,25025,P26.006,SUMITOMO CORPORATION - PPC,USD,12/18/2025,NET 7,,NCSW,10,10,18000,22000,Hibi,Stockton,FOB,manual,,2124 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,69,20000 Mt of sulphuric acid - Tel-quel,1/30/2026,2/13/2026,bldate,2/3/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
D2481F55-7696-4D2D-95BB-49F13CE400A5,D2481F55-7696-4D2D-95BB-49F13CE400A5,2126,25029,P26.006,SUMITOMO CORPORATION - PPC,USD,12/18/2025,NET 7,,NCSW,10,10,18000,22000,Saganoseki,Stockton,FOB,manual,,2126 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,71,20000 Mt of sulphuric acid - Tel-quel,2/26/2026,3/12/2026,bldate,2/26/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,0
|
||||
1EE52682-41EF-4A02-BB55-B49B611110D1,1EE52682-41EF-4A02-BB55-B49B611110D1,2130,25027,P26.008,SAS International LLC,USD,12/22/2025,NET 15,,NCSW,5,5,12825,14175,Ilo,Mejillones,CFR,manual,,2130 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,13500,Mt,184,13500 Mt of sulphuric acid - Tel-quel,2/15/2026,2/26/2026,bldate,2/15/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,0
|
||||
6EA11257-D4B3-4D82-9AE2-D759A0FC0495,6EA11257-D4B3-4D82-9AE2-D759A0FC0495,2131,MMC - Feb shipment ,P25.018,SUMITOMO CORPORATION - MMC,USD,12/29/2025,NET 7,,NCSW,0,0,2100,2100,Naoshima,Map Ta Phut,FOB,manual,,2131 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2100,Mt,0,2100 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,300,1,0
|
||||
9789E145-8EEB-4E86-9805-64B158464B49,9789E145-8EEB-4E86-9805-64B158464B49,2151,25020E,P26.009 - 6kt,JINCHUAN,USD,1/5/2026,NET ,,NCSW,33.36,33.36,2996.46,5996.46,Fangcheng,Mejillones,FOB,manual,,2151 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,4496.46,Mt,160,4496.46 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,1,1
|
||||
79E29B5D-57B9-4723-977A-771D581994CA,79E29B5D-57B9-4723-977A-771D581994CA,2153,25022C,P25.007 - 100kt,"JINLONG COPPER CO., LTD",USD,1/5/2026,NET ,,NCSW,4.76,4.76,3500,3849.787,YIZHENG,Kakinada,FOB,manual,,2153 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,3674.89,Mt,77.31,3674.89 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,1,1
|
||||
EE1092B5-A611-4C21-BE22-FB08A91E9976,EE1092B5-A611-4C21-BE22-FB08A91E9976,2155,25019B,P25.028,"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",USD,1/6/2026,NET 5,,NCSW,10,10,2250,2750,Taichung,Gladstone,FOB,manual,,2155 / H2SO4 FY 2025 / Default,line,H2SO4-Taiwan,Taiwan,2500,Mt,101,2500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,200,0,1
|
||||
9B3D44F0-1322-471E-ADBA-D74C134F9842,9B3D44F0-1322-471E-ADBA-D74C134F9842,2157,25020D,P26.009 - 6kt,JINCHUAN,USD,1/7/2026,NET ,,NCSW,0,0,3000,3000,Fangcheng,Mejillones,FOB,manual,,2157 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,3000,Mt,160,3000 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,9,350,1,1
|
||||
|
6
Reference Data/python_project/loaders/Purchase_Fees.csv
Normal file
6
Reference Data/python_project/loaders/Purchase_Fees.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
contract_number,contract_ref,line_sequence,product,supplier,currency,p_r,mode,price,unit
|
||||
1881,24970,1,Maritime Freight,MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,USD,PAY,Per qt,74.5,MT
|
||||
1881,24970,1,Profit sharing,SUMITOMO CORPORATION - PPC,USD,PAY,Per qt,0.5,MT
|
||||
1881,24970,1,BAF,MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,USD,REC,Per qt,2.43,MT
|
||||
1881,24970,1,Finance,TBD Supplier,USD,PAY,Per qt,0.25,MT
|
||||
1881,24970,1,P&I charterer's liability,FILHET - ALLARD MARITIME,USD,PAY,Per qt,0.06,MT
|
||||
|
@@ -0,0 +1,97 @@
|
||||
source_id,source_line_id,number,reference,our_reference,party_name,currency_code,sale_date,payment_term,warehouse_code,weight_basis,tol_min_pct,tol_max_pct,tol_min_qty,tol_max_qty,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,origin,line_quantity,line_unit_code,line_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date,trader,operator,concentration,book,strategy,period_at,demurrage,laytime_hours,nor_extra_hours,pumping_rate,use_only_min_max,drop_remaining_quantity
|
||||
66D5F597-97BD-4EFD-A2A9-038BF6C73226,66D5F597-97BD-4EFD-A2A9-038BF6C73226,2059,25013B,S25.022,SUMITOMO CORPORATION THAILAND LTD.,USD,10/29/2025,NET 30,,NCSW,0,0,3600,3600,Naoshima,Samut Prakan,CFR,manual,,2059 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,3600,Mt,92.63,3600 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
54D2F300-3171-44F0-AED3-06CE02EB52CD,54D2F300-3171-44F0-AED3-06CE02EB52CD,1979,24980C,S25.009,INTERACID TRADING (CHILE) S.A.,USD,5/15/2025,NET ,,NCSW,0.15,0.15,6800,6820.844,YIZHENG,Mejillones,CFR,manual,,1979 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,6810.42,Mt,151,6810.42 Mt of sulphuric acid - Tel-quel,5/30/2025,6/15/2025,bldate,6/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
E26CDF3E-2D71-4BA7-9F44-0754E467AEE4,E26CDF3E-2D71-4BA7-9F44-0754E467AEE4,1990,24998,S25.022,PT PUPUK ISKANDAR MUDA (PIM),USD,7/2/2025,NET ,,NCSW,0,10,7000,7700,Onahama,LHOKSEUMAWE,CFR,manual,,1990 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,7000,Mt,122,7000 Mt of sulphuric acid - Tel-quel,8/21/2025,8/30/2025,bldate,8/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,150,0,1
|
||||
A81C611E-4FB4-4424-AF6F-09288C5DA260,A81C611E-4FB4-4424-AF6F-09288C5DA260,1865,24993,S25.007 - 5'400MT,INTERACID TRADING (CHILE) S.A.,USD,12/2/2024,NET ,,NCSW,3.8,3.8,5004.264,5400,Onsan,Mejillones,CFR,manual,,1865 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-South Korea,South Korea,5202.13,Mt,155.25,5202.13 Mt of sulphuric acid - Tel-quel,8/7/2025,8/15/2025,bldate,8/7/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
1337C892-B044-4D49-A407-10A48642248B,1337C892-B044-4D49-A407-10A48642248B,2029,25004,S25.009,INTERACID TRADING (CHILE) S.A.,USD,8/29/2025,NET ,,NCSW,0,0,7000,7000,,Mejillones,CFR,manual,,2029 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,,7000,Mt,151,7000 Mt of sulphuric acid - Tel-quel,10/5/2025,10/14/2025,bldate,10/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
E8649BF7-509E-4331-B68E-11ADCD4D47BD,E8649BF7-509E-4331-B68E-11ADCD4D47BD,1857,24984,S25.002 - 2/4,WILSON INTERNATIONAL TRADING PRIVATE LTD,USD,11/18/2024,NET ,,NCSW,10,10,18000,22000,Saganoseki,Tuticorin,CFR,manual,,1857 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,89.5,20000 Mt of sulphuric acid - Tel-quel,6/9/2025,6/19/2025,bldate,6/12/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,250,0,1
|
||||
A0212DCA-F052-41B9-8E65-12C02F7DAD93,A0212DCA-F052-41B9-8E65-12C02F7DAD93,2024,25002B,S25.030,WILSON INTERNATIONAL TRADING PRIVATE LTD,USD,8/14/2025,NET ,,NCSW,0,0,10000,10000,Fangcheng,Tuticorin,CFR,manual,,2024 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,109,10000 Mt of sulphuric acid - Tel-quel,10/10/2025,10/30/2025,bldate,10/26/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,250,1,1
|
||||
C37652E5-6428-4FA0-A96F-15B4FD202D8C,C37652E5-6428-4FA0-A96F-15B4FD202D8C,2154,25022BC,,COROMANDEL INTERNATIONAL LTD,USD,1/5/2026,NET ,,NCSW,2.62,2.62,13000,13699.574,YIZHENG,Kakinada,CFR,manual,,2154 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,13349.79,Mt,150,13349.8 Mt of sulphuric acid - Tel-quel,11/20/2025,1/5/2026,bldate,11/29/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,1,1
|
||||
1878BFAA-DD1E-4DDE-AFD6-16A140779834,1878BFAA-DD1E-4DDE-AFD6-16A140779834,1872,24990B,S25.009,INTERACID TRADING (CHILE) S.A.,USD,12/5/2024,NET ,,NCSW,0,0,18000,18000,Fangcheng,Mejillones,CFR,manual,,1872 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,18000,Mt,151,18000 Mt of sulphuric acid - Tel-quel,9/1/2025,9/30/2025,bldate,9/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
6479690A-1F96-40C6-9BF5-18EF976F1A23,6479690A-1F96-40C6-9BF5-18EF976F1A23,1981,24987,,SUMITOMO AUSTRALIA,USD,5/15/2025,NET 30,,NCSW,0,10,10000,11000,YIZHENG,Gladstone,CFR,manual,,1981 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,156,10000 Mt of sulphuric acid - Tel-quel,6/8/2025,6/18/2025,bldate,6/8/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
5CF1D8F3-B422-4A75-BDAE-1906180D0686,5CF1D8F3-B422-4A75-BDAE-1906180D0686,1863,25001B,S25.006,INTERACID TRADING (CHILE) S.A.,USD,12/2/2024,NET ,,NCSW,0,0,3013.861,3013.861,Rugao,Mejillones,CFR,manual,,1863 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,3013.86,Mt,153.75,3013.86 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
8DFF92DE-7487-464C-8AFC-19968680C1A6,8DFF92DE-7487-464C-8AFC-19968680C1A6,2035,25020B,S25.027,INTERACID TRADING (CHILE) S.A.,USD,9/5/2025,NET 30,,NCSW,0,0,1000,1000,Fangcheng,Mejillones,CFR,manual,,2035 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,1000,Mt,166.5,1000 Mt of sulphuric acid - Tel-quel,12/24/2025,1/5/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
98E12607-43B1-4CDE-B90D-1CE85520DC5B,98E12607-43B1-4CDE-B90D-1CE85520DC5B,2010,25019A,,SUMITOMO AUSTRALIA,USD,7/23/2025,NET ,,NCSW,10,10,6750,8250,Taichung,Gladstone,CFR,manual,,2010 / H2SO4 FY 2025 / Default,line,H2SO4,,7500,Mt,188,7500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,0,350,0,1
|
||||
62DF7DF0-B346-4D51-B68B-1F60D3D156FF,62DF7DF0-B346-4D51-B68B-1F60D3D156FF,1860,24991,S25.003,INTERACID TRADING (CHILE) S.A.,USD,11/18/2024,NET 30,,NCSW,10,10,13500,16500,Callao,Mejillones,CFR,manual,,1860 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,15000,Mt,154,15000 Mt of sulphuric acid - Tel-quel,7/1/2025,7/10/2025,bldate,7/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,0,1
|
||||
7AE95778-8A2E-4C29-9A7F-20D97E96135D,7AE95778-8A2E-4C29-9A7F-20D97E96135D,1864,24986B,S25.005,INTERACID TRADING (CHILE) S.A.,USD,12/2/2024,NET 30,,NCSW,0,0,7002.904,7002.904,Fangcheng,Mejillones,CFR,manual,,1864 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,7002.9,Mt,151.5,7002.9 Mt of sulphuric acid - Tel-quel,7/5/2025,7/20/2025,bldate,7/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
B235588D-27C5-4BA0-BEBE-2118CE9E4066,B235588D-27C5-4BA0-BEBE-2118CE9E4066,2055,25012,S25.028,INTERACID TRADING (CHILE) S.A.,USD,10/27/2025,NET ,,NCSW,0,0,6000,6000,Antwerpen,Mejillones,CFR,manual,,2055 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Switzerland,Switzerland,6000,Mt,166.5,6000 Mt of sulphuric acid - Tel-quel,11/1/2025,11/30/2025,bldate,11/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,0
|
||||
6E2B3633-FD54-48E1-AB85-21CA60985DB5,6E2B3633-FD54-48E1-AB85-21CA60985DB5,2022,25011E,S25.028,INTERACID TRADING (CHILE) S.A.,USD,8/13/2025,NET 30,,NCSW,0,0,3761.818,3761.818,Pori,Mejillones,CFR,manual,,2022 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,3761.82,Mt,166.5,3761.82 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
8A8829E9-FDFC-4313-A2ED-2247AE21DF9E,8A8829E9-FDFC-4313-A2ED-2247AE21DF9E,1882,24970,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,1882 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,112.07,20000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
E3939A75-BA84-4372-95F6-229A1B93E0FD,E3939A75-BA84-4372-95F6-229A1B93E0FD,2036,24997B,S25.013,"INTERACID NORTH AMERICA, INC (Beaumont)",USD,9/8/2025,NET 30,,NCSW,0,0,16991.693,16991.693,Pori,Beaumont,CFR,manual,,2036 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,16991.69,Mt,134.16,16991.7 Mt of sulphuric acid - Tel-quel,8/22/2025,9/4/2025,bldate,9/4/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,1,1
|
||||
95054417-F7E7-4B2E-8C30-2326D09A2A7F,95054417-F7E7-4B2E-8C30-2326D09A2A7F,1969,24988,S25.020,"TRICON ENERGY, LTD",USD,4/16/2025,NET 30,,NCSW,5,5,18050,19950,Pori,Pori,FOB,manual,,1969 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19000,Mt,100.5,19000 Mt of sulphuric acid - Tel-quel,6/25/2025,7/1/2025,bldate,6/25/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,450,0,1
|
||||
D1D5873F-AEE8-4FC3-8E3E-246C0690E421,D1D5873F-AEE8-4FC3-8E3E-246C0690E421,2041,25010,S25.032,"INTERACID NORTH AMERICA, INC (Tampa)",USD,9/26/2025,NET 30,,NCSW,5,5,18050,19950,Bandirma,Tampa,CFR,manual,,2041 / H2SO4 FY 2025 / Default,line,H2SO4-Turkey,Turkey,19000,Mt,105,19000 Mt of sulphuric acid - Tel-quel,10/15/2025,10/25/2025,bldate,10/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
750C37B2-488A-4649-A246-2B1F75D1E973,750C37B2-488A-4649-A246-2B1F75D1E973,1900,24980A,S25.011,INTERACID TRADING (CHILE) S.A.,USD,12/5/2024,NET 30,,NCSW,5,5,19000,21000,YIZHENG,Mejillones,CFR,manual,,1900 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,20000,Mt,179,20000 Mt of sulphuric acid - Tel-quel,5/30/2025,6/15/2025,bldate,6/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
1E35F517-989C-41A2-9A31-2B22AD6CC378,1E35F517-989C-41A2-9A31-2B22AD6CC378,2011,25022A,,COROMANDEL INTERNATIONAL LTD,USD,7/23/2025,NET ,,NCSW,0,0,6000,6000,YIZHENG,Visakhapatnam,CFR,manual,,2011 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,6000,Mt,150,6000 Mt of sulphuric acid - Tel-quel,11/20/2025,1/5/2026,bldate,11/29/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,1,1
|
||||
8C859564-DBBD-4B8B-8D05-2B545E56759F,8C859564-DBBD-4B8B-8D05-2B545E56759F,2108,LXML FEB 2026,S25.010,SUMITOMO CORPORATION THAILAND LTD.,USD,12/5/2025,NET ,,NCSW,0,0,5000,5000,Naoshima,Samut Prakan,CFR,manual,,2108 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,0,5000 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,0
|
||||
F98601FE-6702-45D5-8162-2EC487DFB70F,F98601FE-6702-45D5-8162-2EC487DFB70F,1899,24971A,S25.011,INTERACID TRADING (CHILE) S.A.,USD,12/5/2024,NET 30,,NCSW,5,5,19000,21000,Fangcheng,Mejillones,CFR,manual,,1899 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,20000,Mt,168.5367,20000 Mt of sulphuric acid - Tel-quel,4/13/2025,5/2/2025,bldate,4/13/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
0CD8121A-C444-4A99-B628-325C172F8BCE,0CD8121A-C444-4A99-B628-325C172F8BCE,1886,24985,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,1886 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,113.6,20000 Mt of sulphuric acid - Tel-quel,6/15/2025,6/30/2025,bldate,6/17/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
1D38048B-3374-47B2-BF0D-33107388AB66,1D38048B-3374-47B2-BF0D-33107388AB66,1974,24983,,SAS International LLC,USD,4/22/2025,NET 20,,NCSW,3.3,3.3,11000,11750,YIZHENG,Jorf Lasfar,FOB,manual,,1974 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,11375,Mt,74,11375 Mt of sulphuric acid - Tel-quel,5/15/2025,5/24/2025,bldate,5/15/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,0,300,1,1
|
||||
B68DEB06-187A-4254-A4DF-334F1BCB36AC,B68DEB06-187A-4254-A4DF-334F1BCB36AC,2054,25018,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,10/16/2025,NET 30,,NCSW,10,10,17100,20900,Naoshima,Stockton,CFR,manual,,2054 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,19000,Mt,141.11,19000 Mt of sulphuric acid - Tel-quel,12/15/2025,12/24/2025,bldate,12/20/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
80856C25-A3C3-48A7-A4F3-3539EF7361C1,80856C25-A3C3-48A7-A4F3-3539EF7361C1,2075,25020C,S26.003,INTERACID TRADING (CHILE) S.A.,USD,11/17/2025,NET 30,,NCSW,0,0,6000,6000,Fangcheng,Mejillones,CFR,manual,,2075 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,6000,Mt,173,6000 Mt of sulphuric acid - Tel-quel,12/25/2025,1/5/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
EE5C012C-90A6-482A-8A2F-354016B9F912,EE5C012C-90A6-482A-8A2F-354016B9F912,2163,25026B,S26.010,INTERACID TRADING (CHILE) S.A.,USD,1/19/2026,NET ,,NCSW,0,0,2761.818,2761.818,Callao,Mejillones,CFR,manual,,2163 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,2761.82,Mt,172,2761.82 Mt of sulphuric acid - Tel-quel,1/29/2026,2/9/2026,bldate,1/29/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
2C2E3879-D5F1-4039-A2D1-35C1F243DCDC,2C2E3879-D5F1-4039-A2D1-35C1F243DCDC,2026,25011C,S25.003,INTERACID TRADING (CHILE) S.A.,USD,8/25/2025,NET 30,,NCSW,0,0,989.659,989.659,Callao,Mejillones,CFR,manual,,2026 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,989.66,Mt,154,989.659 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
DB8FA818-430A-4783-91A7-41B4A9D82C7B,DB8FA818-430A-4783-91A7-41B4A9D82C7B,1982,25001C,S25.023,INTERACID TRADING (CHILE) S.A.,USD,6/2/2025,NET ,,NCSW,0,0,5000,5000,Rugao,Mejillones,CFR,manual,,1982 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,5000,Mt,161.5,5000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
956A2622-D5C4-4EB0-9780-4227829661DD,956A2622-D5C4-4EB0-9780-4227829661DD,1952,24977,S25.006,INTERACID TRADING (CHILE) S.A.,USD,3/3/2025,NET 30,,NCSW,1.69,1.69,14500,15000,Callao,Mejillones,CIF,manual,,1952 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14750,Mt,153.75,14750 Mt of sulphuric acid - Tel-quel,4/18/2025,4/28/2025,bldate,4/18/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
92423C06-CBEB-4032-B848-4634C7640B46,92423C06-CBEB-4032-B848-4634C7640B46,2037,25011D,S25.031,INTERACID TRADING (CHILE) S.A.,USD,9/23/2025,NET 30,,NCSW,0,0,5000,5000,Pori,Mejillones,CFR,manual,,2037 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,5000,Mt,150,5000 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
5B7A5B00-78A5-48E4-BB20-467AC75FA58C,5B7A5B00-78A5-48E4-BB20-467AC75FA58C,2152,25020E,S26.012,INTERACID TRADING (CHILE) S.A.,USD,1/5/2026,NET 30,,NCSW,33.39,33.39,2996.46,6000,Fangcheng,Mejillones,CFR,manual,,2152 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,4498.23,Mt,172,4498.23 Mt of sulphuric acid - Tel-quel,12/25/2025,1/5/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
3746BE78-C40E-44C9-9435-4997BED9B2E5,3746BE78-C40E-44C9-9435-4997BED9B2E5,1923,24974A,S25.012,INTERACID TRADING (CHILE) S.A.,USD,12/19/2024,NET 30,,NCSW,0,0,12000,12000,Ilo,Mejillones,CFR,manual,,1923 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Mexico,Mexico,12000,Mt,145,12000 Mt of sulphuric acid - Tel-quel,5/5/2025,5/11/2025,bldate,5/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
B0D34AB6-6D63-48A7-9A44-4D70E76DC9DF,B0D34AB6-6D63-48A7-9A44-4D70E76DC9DF,2169,NFC - PROMMITR Feb/mar,S25.022,NFC PUBLIC COMPANY LTD - PROMMITR,USD,2/11/2026,NET ,,NCSW,0,0,2100,2100,Naoshima,Map Ta Phut,CFR,manual,,2169 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2100,Mt,0,2100 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,0
|
||||
0BDB0387-1BD5-4089-997E-4E65C3159FC9,0BDB0387-1BD5-4089-997E-4E65C3159FC9,2089,25015,S26.007,"INTERACID NORTH AMERICA, INC (Tampa)",USD,11/21/2025,NET 30,,NCSW,5,5,18050,19950,Ronnskar,Beaumont,CFR,manual,,2089 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19000,Mt,133,19000 Mt of sulphuric acid - Tel-quel,11/25/2025,12/4/2025,bldate,11/25/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
F1443208-2B7D-4608-97F5-54E286DED73B,F1443208-2B7D-4608-97F5-54E286DED73B,2164,25027,S26.010,INTERACID TRADING (CHILE) S.A.,USD,1/19/2026,NET 30,,NCSW,0,0,13500,13500,Laizhou,Mejillones,CFR,manual,,2164 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,13500,Mt,172,13500 Mt of sulphuric acid - Tel-quel,2/15/2026,2/26/2026,bldate,2/15/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,0
|
||||
8504C8B4-FA3D-472F-A480-598E8603025B,8504C8B4-FA3D-472F-A480-598E8603025B,2023,25006,S25.016,"Agrifields DMCC, Dubai",USD,8/14/2025,NET ,,NCSW,5,5,9500,10500,Rugao,Isabel,CIF,manual,,2023 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,110,10000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,200,0,1
|
||||
F7599096-F3E6-4C9B-8BBF-5AE632CD5BC7,F7599096-F3E6-4C9B-8BBF-5AE632CD5BC7,2119,25020D,S26.003,INTERACID TRADING (CHILE) S.A.,USD,12/9/2025,NET 30,,NCSW,0,0,3000,3000,Fangcheng,Mejillones,CFR,manual,,2119 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,3000,Mt,173,3000 Mt of sulphuric acid - Tel-quel,12/25/2025,1/6/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
64BC5C49-3E1C-4539-883D-61B88878FD68,64BC5C49-3E1C-4539-883D-61B88878FD68,2074,25026,S26.004,INTERACID TRADING (CHILE) S.A.,USD,11/12/2025,NET 30,,NCSW,0,0,14000,14000,,Mejillones,CFR,manual,,2074 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,14000,Mt,168.75,14000 Mt of sulphuric acid - Tel-quel,1/29/2026,2/9/2026,bldate,1/29/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
24B02E69-1AEA-43D8-B61B-61D530C93990,24B02E69-1AEA-43D8-B61B-61D530C93990,1669,24986A,S25.009,INTERACID TRADING (CHILE) S.A.,USD,2/29/2024,NET 30,,NCSW,0,0,20997.096,20997.096,Fangcheng,Mejillones,CFR,manual,,1669 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,20997.1,Mt,151,20997.1 Mt of sulphuric acid - Tel-quel,7/5/2025,7/20/2025,bldate,7/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
DA1CF8A6-AF4B-4CA8-AD7F-626E8CA9BAB2,DA1CF8A6-AF4B-4CA8-AD7F-626E8CA9BAB2,1961,24973B,S25.009 - 120kt,INTERACID TRADING (CHILE) S.A.,USD,4/2/2025,NET 30,,NCSW,0,0,20,20,Callao,Mejillones,CFR,manual,,1961 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,20,Mt,151,20 Mt of sulphuric acid - Tel-quel,3/20/2025,4/4/2025,bldate,4/3/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
A146D2F0-B3BD-46D1-860F-6372963A9E34,A146D2F0-B3BD-46D1-860F-6372963A9E34,1996,25024A,S25.022,NFC PUBLIC COMPANY LTD - PROMMITR,USD,7/2/2025,NET 30,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,CFR,manual,,1996 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,132.91,5000 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
C5B9F1F5-9A11-429B-8D6F-63BA7EFB01C1,C5B9F1F5-9A11-429B-8D6F-63BA7EFB01C1,1874,25011A,S25.009,INTERACID TRADING (CHILE) S.A.,USD,12/5/2024,NET 30,,NCSW,0,0,8090.858,8090.858,Pori,Mejillones,CFR,manual,,1874 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,8090.86,Mt,151,8090.86 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
95E2978F-EE5F-4DA8-A68C-63CE130FDB3C,95E2978F-EE5F-4DA8-A68C-63CE130FDB3C,2021,25016,S25.027,INTERACID TRADING (CHILE) S.A.,USD,8/12/2025,NET 30,,NCSW,3.33,3.33,14500,15500,Callao,Mejillones,CFR,manual,,2021 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Peru,Peru,15000,Mt,166.5,15000 Mt of sulphuric acid - Tel-quel,12/3/2025,12/13/2025,bldate,12/3/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,1
|
||||
58F2BAC4-54FE-4FB7-A986-67490F1F2362,58F2BAC4-54FE-4FB7-A986-67490F1F2362,2004,25020A,S26.005,INTERACID TRADING (CHILE) S.A.,USD,11/17/2025,NET 30,,NCSW,5.26,5.26,18000,20000,Fangcheng,Mejillones,CFR,manual,,2004 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,19000,Mt,178,19000 Mt of sulphuric acid - Tel-quel,12/24/2025,1/5/2026,bldate,1/5/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
2F8EBC2B-DA6C-4317-9709-6A6E8DA0E7D7,2F8EBC2B-DA6C-4317-9709-6A6E8DA0E7D7,2020,25001D,S25.009,INTERACID TRADING (CHILE) S.A.,USD,8/11/2025,NET ,,NCSW,18.64,18.64,2400,3500,Rugao,Mejillones,CFR,manual,,2020 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,2950,Mt,151,2950 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
6720D794-1509-42A2-8C30-6BFC83E30CC8,6720D794-1509-42A2-8C30-6BFC83E30CC8,1994,25013A,S25.022,NFC PUBLIC COMPANY LTD - PROMMITR,USD,7/2/2025,NET ,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,CFR,manual,,1994 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,89.62,5000 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
F858EE88-3AF3-45AB-99F4-6CE3A3B5C388,F858EE88-3AF3-45AB-99F4-6CE3A3B5C388,2165,25028C,S26.006,INTERACID TRADING (CHILE) S.A.,USD,1/19/2026,NET 30,,NCSW,0,0,6770.364,6770.364,,Mejillones,CFR,manual,,2165 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,,6770.36,Mt,172.5,6770.36 Mt of sulphuric acid - Tel-quel,2/19/2026,2/28/2026,bldate,2/19/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,0
|
||||
4915E268-8C1A-4D13-A41C-6D7727F888DF,4915E268-8C1A-4D13-A41C-6D7727F888DF,2092,25028B,S26.008 - 5'100MT,INTERACID TRADING (CHILE) S.A.,USD,12/1/2025,NET 30,,NCSW,0,0,5100,5100,,Mejillones,CFR,manual,,2092 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,5100,Mt,174.75,5100 Mt of sulphuric acid - Tel-quel,2/20/2026,2/28/2026,bldate,2/20/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,0
|
||||
8133993D-4139-489E-BCC7-724984D2E9A8,8133993D-4139-489E-BCC7-724984D2E9A8,1960,24978,S25.018,"INTERACID NORTH AMERICA, INC (Beaumont)",USD,3/28/2025,NET 30,,NCSW,10,10,17100,20900,Bandirma,Beaumont,CFR,manual,,1960 / H2SO4 FY 2025 / Default,line,H2SO4-Turkey,Turkey,19000,Mt,141.9,19000 Mt of sulphuric acid - Tel-quel,4/28/2025,5/8/2025,bldate,5/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
E254C586-E637-4ED8-B518-74555103DBB1,E254C586-E637-4ED8-B518-74555103DBB1,1948,24972B,S24.025,SUMITOMO CORPORATION THAILAND LTD.,USD,2/26/2025,NET ,,NCSW,0,0,2700,2700,Onahama,Samut Prakan,CFR,manual,,1948 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2700,Mt,82.85,2700 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
4A840D36-9667-4627-87BB-774990BEA3EA,4A840D36-9667-4627-87BB-774990BEA3EA,1950,24971B,S25.005,INTERACID TRADING (CHILE) S.A.,USD,3/3/2025,NET 30,,NCSW,13.04,13.04,10000,13000,Fangcheng,Mejillones,CFR,manual,,1950 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,11500,Mt,151.5,11500 Mt of sulphuric acid - Tel-quel,4/13/2025,5/2/2025,bldate,4/13/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
D3624F6F-03B4-4C34-A059-790D82BEDF14,D3624F6F-03B4-4C34-A059-790D82BEDF14,1951,24974B,S25.006,INTERACID TRADING (CHILE) S.A.,USD,3/3/2025,NET ,,NCSW,0,0,2479.256,2479.256,Ilo,Mejillones,CFR,manual,,1951 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Mexico,Mexico,2479.26,Mt,153.75,2479.26 Mt of sulphuric acid - Tel-quel,5/5/2025,5/11/2025,bldate,5/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
A644740E-F2FA-4057-862A-7A109E2C24B7,A644740E-F2FA-4057-862A-7A109E2C24B7,1858,24992,S25.002 - 3/4,WILSON INTERNATIONAL TRADING PRIVATE LTD,USD,11/18/2024,NET 30,,NCSW,10,10,18000,22000,Rugao,Tuticorin,CFR,manual,,1858 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,20000,Mt,89.5,20000 Mt of sulphuric acid - Tel-quel,7/18/2025,7/30/2025,bldate,7/18/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,250,0,1
|
||||
C3FA8621-31AA-4F00-A27F-7A1E856AC0E4,C3FA8621-31AA-4F00-A27F-7A1E856AC0E4,1762,24972A,S24.025,NFC PUBLIC COMPANY LTD - PROMMITR,USD,5/15/2024,NET ,,NCSW,0,0,5000,5000,,Map Ta Phut,CFR,manual,,1762 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,86.3,5000 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
1BE70D36-8272-405A-9820-7AF8F7B70D95,1BE70D36-8272-405A-9820-7AF8F7B70D95,2132,NFC - PROMMITR Feb,S25.022,NFC PUBLIC COMPANY LTD - PROMMITR,USD,12/29/2025,NET 30,,NCSW,0,0,2400,2400,,Map Ta Phut,CFR,manual,,2132 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2400,Mt,0,2400 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,0
|
||||
B3A78D90-FADD-4004-A50C-7CCA6FB8A785,B3A78D90-FADD-4004-A50C-7CCA6FB8A785,2046,25007C,S25.022,SUMITOMO CORPORATION THAILAND LTD.,USD,9/26/2025,NET 30,,NCSW,0,0,1000,1000,Naoshima,Samut Prakan,CFR,manual,,2046 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,1000,Mt,91.93,1000 Mt of sulphuric acid - Tel-quel,9/30/2025,10/5/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
5A0514CB-E74F-4149-A4CE-82367A3F75C8,5A0514CB-E74F-4149-A4CE-82367A3F75C8,2025,25011B,S25.007 - 5'400MT,INTERACID TRADING (CHILE) S.A.,USD,8/18/2025,NET ,,NCSW,0,0,395.746,395.746,Pori,Mejillones,CFR,manual,,2025 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,395.75,Mt,155.25,395.746 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
22557B77-FE65-455C-B6D0-8764AD98F44F,22557B77-FE65-455C-B6D0-8764AD98F44F,1892,24999,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Hibi,Stockton,CFR,manual,,1892 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,111.25,20000 Mt of sulphuric acid - Tel-quel,9/7/2025,9/22/2025,bldate,9/7/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
5D7BCD40-5B9B-4916-BCFC-879853C515A2,5D7BCD40-5B9B-4916-BCFC-879853C515A2,2048,25014,S25.034,"INTERACID NORTH AMERICA, INC (Tampa)",USD,9/26/2025,NET 30,,NCSW,5,5,18050,19950,Huelva,Tampa,CFR,manual,,2048 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,108.9,19000 Mt of sulphuric acid - Tel-quel,11/14/2025,11/20/2025,bldate,11/14/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,0,1
|
||||
476625FC-678E-4C66-A624-8E19B7CD62B1,476625FC-678E-4C66-A624-8E19B7CD62B1,2002,24995,S25.023,INTERACID TRADING (CHILE) S.A.,USD,7/21/2025,NET 30,,NCSW,0,0,5500,5500,YIZHENG,Mejillones,CFR,manual,,2002 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,5500,Mt,161.5,5500 Mt of sulphuric acid - Tel-quel,8/19/2025,8/26/2025,bldate,8/19/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
B71AD060-A147-4C3E-8862-99F6074CA991,B71AD060-A147-4C3E-8862-99F6074CA991,2056,25026C,S25.028,INTERACID TRADING (CHILE) S.A.,USD,10/27/2025,NET 20,,NCSW,0,0,238.182,238.182,Pori,Mejillones,CFR,manual,,2056 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,238.18,Mt,166.5,238.182 Mt of sulphuric acid - Tel-quel,1/1/2026,1/31/2026,bldate,1/1/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
F667050F-DCF4-469D-A929-9A29664F47E8,F667050F-DCF4-469D-A929-9A29664F47E8,1927,24997A,S25.013,"INTERACID NORTH AMERICA, INC (Beaumont)",USD,12/20/2024,NET ,,NCSW,0,0,7000,7000,Pori,Tampa,CFR,manual,,1927 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,7000,Mt,134.16,7000 Mt of sulphuric acid - Tel-quel,8/22/2025,9/4/2025,bldate,9/4/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,300,1,1
|
||||
D810DBBB-905E-4CEB-B51A-9FF1B74E9E8B,D810DBBB-905E-4CEB-B51A-9FF1B74E9E8B,2106,25024C,S25.010,SUMITOMO CORPORATION THAILAND LTD.,USD,12/4/2025,NET 30,,NCSW,0,0,3600,3600.2,Naoshima,Samut Prakan,CFR,manual,,2106 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,3600.1,Mt,82.4,3600.1 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
CA5FAFBE-D721-4475-BE14-A8CAE2F722FF,CA5FAFBE-D721-4475-BE14-A8CAE2F722FF,1992,25007A,S25.022,NFC PUBLIC COMPANY LTD - PROMMITR,USD,7/2/2025,NET 30,,NCSW,0,0,5000,5000,Naoshima,Map Ta Phut,CFR,manual,,1992 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,92.29,5000 Mt of sulphuric acid - Tel-quel,9/30/2025,10/5/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
561448D8-F135-4120-AE13-A92FEC264793,561448D8-F135-4120-AE13-A92FEC264793,1985,24994,S25.025,WILSON INTERNATIONAL TRADING PRIVATE LTD,USD,6/10/2025,NET ,,NCSW,16.28,16.28,18000,25000,YIZHENG,Tuticorin,CFR,manual,,1985 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,21500,Mt,127.5,21500 Mt of sulphuric acid - Tel-quel,8/16/2025,8/28/2025,bldate,8/16/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,250,1,1
|
||||
2CDC3D15-AA94-4E61-A0E3-B099028D3820,2CDC3D15-AA94-4E61-A0E3-B099028D3820,1890,24989,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Hibi,Stockton,CFR,manual,,1890 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,111,20000 Mt of sulphuric acid - Tel-quel,7/12/2025,7/25/2025,bldate,7/12/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
B2E3C472-214E-44AA-90C8-B3F724BCECCC,B2E3C472-214E-44AA-90C8-B3F724BCECCC,1901,25001A,S25.011,INTERACID TRADING (CHILE) S.A.,USD,12/5/2024,NET ,,NCSW,5,5,19000,21000,Rugao,Mejillones,CFR,manual,,1901 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,20000,Mt,144.5,20000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,0,1
|
||||
A1F4F994-9973-4C9B-8388-B73C867BBA4E,A1F4F994-9973-4C9B-8388-B73C867BBA4E,1859,25002A,S25.002 - 4/4,WILSON INTERNATIONAL TRADING PRIVATE LTD,USD,11/18/2024,NET ,,NCSW,10,10,18000,22000,Fangcheng,Tuticorin,CFR,manual,,1859 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,20000,Mt,89.5,20000 Mt of sulphuric acid - Tel-quel,10/10/2025,10/30/2025,bldate,10/26/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,250,0,1
|
||||
535991DB-A1C1-468A-A0FD-BBDC2A472D10,535991DB-A1C1-468A-A0FD-BBDC2A472D10,1930,25005,S25.013,"INTERACID NORTH AMERICA, INC (Beaumont)",USD,12/20/2024,NET 30,,NCSW,3.9,3.9,18500,20000,Ronnskar,Beaumont,CFR,manual,,1930 / H2SO4 FY 2025 / Default,line,H2SO4-Finland,Finland,19250,Mt,115.31,19250 Mt of sulphuric acid - Tel-quel,10/11/2025,10/20/2025,bldate,10/11/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,1,1
|
||||
31E490C4-9FDC-49FA-8FC6-BE2492F40E8F,31E490C4-9FDC-49FA-8FC6-BE2492F40E8F,1955,24975,S25.016,"Agrifields DMCC, Dubai",USD,3/19/2025,NET ,,NCSW,10,10,9000,11000,Rugao,Isabel,CIF,manual,,1955 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,10000,Mt,98.5,10000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/7/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,200,0,1
|
||||
596AB953-7CF3-4D54-A1BB-C166D124DA0D,596AB953-7CF3-4D54-A1BB-C166D124DA0D,2098,25024B,S25.022,SUMITOMO CORPORATION THAILAND LTD.,USD,12/3/2025,NET 30,,NCSW,0,0,1899.669,1899.669,Naoshima,Samut Prakan,CFR,manual,,2098 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,1899.67,Mt,97.63,1899.67 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
D8EC09D9-A4D6-454B-A28F-C2D4EE6DA43B,D8EC09D9-A4D6-454B-A28F-C2D4EE6DA43B,2045,25007B,S25.022,SUMITOMO CORPORATION THAILAND LTD.,USD,9/26/2025,NET 30,,NCSW,20,20,1600,2400,Naoshima,Samut Prakan,CFR,manual,,2045 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2000,Mt,91.56,2000 Mt of sulphuric acid - Tel-quel,9/30/2025,10/5/2025,bldate,10/1/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
2EE91F22-B849-45EA-B730-C46E5ADF0A42,2EE91F22-B849-45EA-B730-C46E5ADF0A42,1968,24980B,S25.019,INTERACID TRADING (CHILE) S.A.,USD,4/6/2025,NET ,,NCSW,0,0,4000,4000,YIZHENG,Mejillones,CFR,manual,,1968 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,4000,Mt,166.5,4000 Mt of sulphuric acid - Tel-quel,5/30/2025,6/15/2025,bldate,6/2/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
AD02F6AD-7BC9-4C74-A0DB-C522F8849811,AD02F6AD-7BC9-4C74-A0DB-C522F8849811,1940,24986C,S25.015,INTERACID TRADING (CHILE) S.A.,USD,1/23/2025,NET ,,NCSW,0,0,5000,5000,Fangcheng,Mejillones,CFR,manual,,1940 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,5000,Mt,151,5000 Mt of sulphuric acid - Tel-quel,7/5/2025,7/20/2025,bldate,7/5/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
2866889F-60E9-4F99-A683-C61DFC477384,2866889F-60E9-4F99-A683-C61DFC477384,2123,25021,S26.014,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/10/2025,NET ,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,2123 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,137.22,20000 Mt of sulphuric acid - Tel-quel,1/7/2026,1/21/2026,bldate,1/7/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
55170A07-EF66-444B-BA8B-C8DBEC2B798C,55170A07-EF66-444B-BA8B-C8DBEC2B798C,1888,24996,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Hibi,Stockton,CFR,manual,,1888 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,110.65,20000 Mt of sulphuric acid - Tel-quel,8/15/2025,8/29/2025,bldate,8/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
514D2631-3EE9-4823-AFF9-D35FD04FA837,514D2631-3EE9-4823-AFF9-D35FD04FA837,1973,24982/A,,NFC PUBLIC COMPANY LTD - PROMMITR,USD,4/17/2025,NET ,,NCSW,0,0,5000,5000,Onahama,Map Ta Phut,CFR,manual,,1973 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,5000,Mt,81.55,5000 Mt of sulphuric acid - Tel-quel,5/8/2025,5/16/2025,bldate,5/9/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,0
|
||||
38807FED-CFCB-46A4-92CB-D486FF03A0E9,38807FED-CFCB-46A4-92CB-D486FF03A0E9,2127,25029,S26.014,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/18/2025,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,2127 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,138.5,20000 Mt of sulphuric acid - Tel-quel,2/26/2026,3/12/2026,bldate,2/26/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,0
|
||||
AA26D240-DE88-4CFB-8F10-D87906F8F06D,AA26D240-DE88-4CFB-8F10-D87906F8F06D,1884,24979,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,1884 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,115,20000 Mt of sulphuric acid - Tel-quel,5/18/2025,5/31/2025,bldate,5/18/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
2AD80800-2F9D-4385-A450-DAFAC03161DC,2AD80800-2F9D-4385-A450-DAFAC03161DC,2078,25028A,S26.005,INTERACID TRADING (CHILE) S.A.,USD,11/17/2025,NET 30,,NCSW,0,0,20000,20000,Longkou,Mejillones,CFR,manual,,2078 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,20000,Mt,185,20000 Mt of sulphuric acid - 100%,2/18/2026,2/28/2026,bldate,2/18/2026,Jeremie Collot, ,100,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,1,0
|
||||
B8E115F4-A2F0-4A69-9321-DBF8BEC5D4D8,B8E115F4-A2F0-4A69-9321-DBF8BEC5D4D8,2042,25009,S25.033,PT PUPUK ISKANDAR MUDA (PIM),USD,9/26/2025,NET ,,NCSW,10,10,7200,8800,Onahama,LHOKSEUMAWE,CFR,manual,,2042 / H2SO4 FY 2025 / Default,line,H2SO4-China,China,8000,Mt,99,8000 Mt of sulphuric acid - Tel-quel,11/5/2025,11/15/2025,bldate,11/5/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,150,0,1
|
||||
49A55704-7DD9-4E46-8CAF-DC5CDF378831,49A55704-7DD9-4E46-8CAF-DC5CDF378831,2049,25023,S26.001,"INTERACID NORTH AMERICA, INC (Tampa)",USD,10/3/2025,NET 30,,NCSW,5,5,18050,19950,Huelva,Beaumont,CFR,manual,,2049 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,119.5,19000 Mt of sulphuric acid - Tel-quel,1/12/2026,1/20/2026,bldate,1/12/2026,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
06ABB5D2-FBE7-40C8-9AB1-DE08949B4AFB,06ABB5D2-FBE7-40C8-9AB1-DE08949B4AFB,1861,25000,S25.003,INTERACID TRADING (CHILE) S.A.,USD,11/18/2024,NET ,,NCSW,10,10,13500,16500,Callao,Mejillones,CFR,manual,,1861 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,15000,Mt,154,15000 Mt of sulphuric acid - Tel-quel,8/20/2025,8/30/2025,bldate,8/20/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,400,0,1
|
||||
E657EC0E-1B75-4358-BE49-DE5E92D1C2EE,E657EC0E-1B75-4358-BE49-DE5E92D1C2EE,1850,24976,S25.001,"INTERACID NORTH AMERICA, INC (Tampa)",USD,11/1/2024,NET 30,,NCSW,5,5,18050,19950,Huelva,Tampa,CFR,manual,,1850 / H2SO4 FY 2025 / Default,line,H2SO4-Spain,Spain,19000,Mt,130.25,19000 Mt of sulphuric acid - Tel-quel,4/15/2025,4/21/2025,bldate,4/15/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,6,350,0,1
|
||||
1F3BE955-ECEF-4EA9-A017-DF317957E602,1F3BE955-ECEF-4EA9-A017-DF317957E602,1971,24982/B,,SUMITOMO CORPORATION THAILAND LTD.,USD,4/17/2025,NET 30,,NCSW,0,0,2350,2350,Onahama,Samut Prakan,CFR,manual,,1971 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4-Japan,Japan,2350,Mt,86.13,2350 Mt of sulphuric acid - Tel-quel,5/8/2025,5/16/2025,bldate,5/9/2025,Stephane Monnard, ,0,H2SO4 FY 2025,Thailand FY25,Laycan,,0,6,250,1,1
|
||||
A733030D-A266-4551-BF33-E6364A4EDA36,A733030D-A266-4551-BF33-E6364A4EDA36,2156,25019B,,SUMITOMO AUSTRALIA,USD,1/6/2026,NET 30,,NCSW,10,10,2250,2750,Taichung,Gladstone,CFR,manual,,2156 / H2SO4 FY 2025 / Default,line,H2SO4,,2500,Mt,179,2500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,0,350,0,0
|
||||
1E8C4039-E74B-4728-B2F6-E8A42FFACEC5,1E8C4039-E74B-4728-B2F6-E8A42FFACEC5,1924,24981,S25.005,INTERACID TRADING (CHILE) S.A.,USD,12/19/2024,NET 30,,NCSW,22.33,22.33,10000,15750,Rugao,Mejillones,CFR,manual,,1924 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,12875,Mt,151.5,12875 Mt of sulphuric acid - Tel-quel,4/28/2025,5/7/2025,bldate,5/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
68013CF3-A4A1-4BE7-96FA-EFF922BDBF62,68013CF3-A4A1-4BE7-96FA-EFF922BDBF62,1958,24990A,S25.017,INTERACID TRADING (CHILE) S.A.,USD,3/21/2025,NET ,,NCSW,0,0,15000,15000,Fangcheng,Mejillones,CFR,manual,,1958 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-China,China,15000,Mt,159,15000 Mt of sulphuric acid - Tel-quel,9/1/2025,9/30/2025,bldate,9/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
10994DA8-B0C7-4847-874D-F2E049D618CA,10994DA8-B0C7-4847-874D-F2E049D618CA,2091,25017,S26.009,"TRICON ENERGY, LTD",USD,12/1/2025,NET 30,,NCSW,5.26,5.26,9000,10000,Pori,Mejillones,CFR,manual,,2091 / H2SO4 FY 2025 / Chile FY25,line,H2SO4-Finland,Finland,9500,Mt,172.5,9500 Mt of sulphuric acid - Tel-quel,12/1/2025,12/31/2025,bldate,12/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Chile FY25,Laycan,,0,6,350,1,1
|
||||
2BF69450-9BF0-4EEB-A235-F518F4B300DB,2BF69450-9BF0-4EEB-A235-F518F4B300DB,1894,25003,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,1894 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,146.23,20000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
5C4CA64F-F7A1-4599-8B69-F699B4605669,5C4CA64F-F7A1-4599-8B69-F699B4605669,2125,25025,S26.014,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/18/2025,NET 30,,NCSW,10,10,18000,22000,Saganoseki,Stockton,CFR,manual,,2125 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,138.5,20000 Mt of sulphuric acid - Tel-quel,1/30/2026,2/13/2026,bldate,2/3/2026,Stephane Monnard, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
03D21F99-0AA1-4690-8C51-FC6F6FA25BE3,03D21F99-0AA1-4690-8C51-FC6F6FA25BE3,1897,25008,S25.008,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET ,,NCSW,10,10,18000,22000,Hibi,Stockton,CFR,manual,,1897 / H2SO4 FY 2025 / Default,line,H2SO4-Japan,Japan,20000,Mt,140.32,20000 Mt of sulphuric acid - Tel-quel,11/1/2025,11/30/2025,bldate,11/1/2025,Jeremie Collot, ,0,H2SO4 FY 2025,Default,Laycan,,0,12,350,0,1
|
||||
|
41
Reference Data/python_project/loaders/Services.csv
Normal file
41
Reference Data/python_project/loaders/Services.csv
Normal file
@@ -0,0 +1,41 @@
|
||||
code,name,category,uom,sale_price,cost_price,description
|
||||
SWAP,SWAP,SERVICES,Mt,0,0,
|
||||
Time Charter Hire,Time Charter Hire,SERVICES,Mt,0,0,
|
||||
Paper allocation,Paper allocation,SERVICES,Mt,0,0,
|
||||
FX Difference,FX Difference,SERVICES,Mt,0,0,
|
||||
Freight Commission,Freight Commission,SERVICES,Mt,0,0,
|
||||
CFD,CFD,SERVICES,Mt,0,0,
|
||||
Deal Expense,Invoice Validation Deal Expense,SERVICES,Mt,0,0,
|
||||
Spot Owner Demurrage,Spot Owner Demurrage,SERVICES,Mt,0,0,
|
||||
Product Commission,Commission,SERVICES,Mt,0,0,
|
||||
Claim Amount,Claim Amount,SERVICES,Mt,0,0,
|
||||
Import Tax,Import Tax,SERVICES,Mt,0,0,
|
||||
Freight,Freight,SERVICES,Mt,0,0,
|
||||
Other,Other,SERVICES,Mt,0,0,
|
||||
Port Costs,Port Costs,SERVICES,Mt,0,0,
|
||||
Demurrage,Demurrage,SERVICES,Mt,0,0,
|
||||
Finance,Finance,SERVICES,Mt,0,0,
|
||||
Hedging,Hedging,SERVICES,Mt,0,0,
|
||||
Agency Fees,Agency Fees,SERVICES,Mt,0,0,
|
||||
Bunker Costs,Bunker Costs,SERVICES,Mt,0,0,
|
||||
EU ETS,EU ETS,SERVICES,Mt,0,0,
|
||||
Inspection,Inspection,SERVICES,Mt,0,0,
|
||||
Banking Charges,Banking Charges,SERVICES,Mt,0,0,
|
||||
Insurance,Insurance,SERVICES,Mt,0,0,
|
||||
Broker Fees,Broker Fees,SERVICES,Mt,0,0,
|
||||
Commision,Commision,SERVICES,Mt,0,0,
|
||||
LC Fees,LC Fees,SERVICES,Mt,0,0,
|
||||
Address Commission,Address Commission,SERVICES,Mt,0,0,
|
||||
Additional berth,Additional berth,SERVICES,Mt,0,0,
|
||||
P&I charterer's liability,P&I charterer's liability,SERVICES,Mt,0,0,
|
||||
BAF,Bunker adjustment factor,SERVICES,Mt,0,0,
|
||||
Negative ACCT,Negative ACCT,SERVICES,Mt,0,0,
|
||||
Positive ACCT,Positive ACCT,SERVICES,Mt,0,0,
|
||||
METI freight,METI,SERVICES,Mt,0,0,
|
||||
COAnalysis,COAnalysis,SERVICES,Mt,0,0,
|
||||
Expected Revenue,Expected Revenue,SERVICES,Mt,0,0,
|
||||
Premium + 5% Tax,Premium + 5% Tax,SERVICES,Mt,0,0,
|
||||
Future allocation,Future allocation,SERVICES,Mt,0,0,
|
||||
Profit sharing,Profit sharing,SERVICES,Mt,0,0,
|
||||
Deal Revenue,Invoice Validation Deal Revenue,SERVICES,Mt,0,0,
|
||||
Contract discount / premium,Contract discount / premium,SERVICES,Mt,0,0,
|
||||
|
@@ -0,0 +1,34 @@
|
||||
name,type,lat,lon
|
||||
Antwerpen,supplier,51.2309677570105,4.37805175781251
|
||||
Bandirma,supplier,40.369697136983,27.9607200622559
|
||||
Callao,supplier,-12.0144723358403,-77.1260833740236
|
||||
Donghae,supplier,0,0
|
||||
Fangcheng,supplier,21.6140258994842,108.322792053223
|
||||
Hamburg,supplier,53.57532,10.01534
|
||||
Hazira,supplier,21.125681,82.794998
|
||||
Hibi,supplier,34.39572,133.78713
|
||||
Huelva,supplier,37.26638,-6.94004
|
||||
Ilo,supplier,-17.6361697242517,-71.4935302734376
|
||||
Japan,supplier,37.3002752813444,138.515625
|
||||
Laizhou,supplier,37.2882577829837,119.795265197754
|
||||
Lazaro Cardenas,supplier,17.9073656185063,-102.127704620362
|
||||
Longkou,supplier,34.668138,104.165802
|
||||
Naoshima,supplier,34.4612772884371,134.009170532227
|
||||
Niihama,supplier,34.0407116420786,133.319778442383
|
||||
Ningde,supplier,26.6965451115852,119.752349853516
|
||||
Odda,supplier,52.133057,5.29525
|
||||
Onahama,supplier,36.9268411923102,140.907897949219
|
||||
Onsan,supplier,35.4601106672107,129.37671661377
|
||||
Pori,supplier,61.6450645681561,21.3828277587891
|
||||
Putian,supplier,0,0
|
||||
Qingdao,supplier,36.06605,120.36939
|
||||
Rizhao,supplier,0,0
|
||||
Ronnskar,supplier,64.6574030409482,21.2832641601563
|
||||
Rugao,supplier,32.0645374947014,120.495300292969
|
||||
Saganoseki,supplier,33.2411282959313,131.885418891907
|
||||
Taichung,supplier,0,0
|
||||
Tampa,supplier,27.897652653541,-82.4285316467287
|
||||
Ulsan,supplier,35.5366963783951,129.458084106446
|
||||
Yeosu,supplier,34.7416124988318,127.705078125
|
||||
YIZHENG,supplier,32.2244855374297,119.227289967737
|
||||
Zhangjiagang,supplier,31.96815,120.40019
|
||||
|
796
Reference Data/python_project/loaders/Vessels.csv
Normal file
796
Reference Data/python_project/loaders/Vessels.csv
Normal file
@@ -0,0 +1,796 @@
|
||||
vessel_name,vessel_year,vessel_imo
|
||||
Fairchem Blue Shark,2019,9804837
|
||||
Woojin Evelyn,2002,9269594
|
||||
Sigaia Theresa,2015,NULL
|
||||
Sc Hongkong,2001,9187904
|
||||
Chem Sol,2017,9739276
|
||||
Southern Vulture,2018,9852274
|
||||
Chembulk Yokohama,2003,9276248
|
||||
Fuji Galaxy,2010,9490301
|
||||
Ctg Bismuth,2016,9739290
|
||||
Navig8 Violette,2015,9690626
|
||||
Stolt Sun,2000,9149512
|
||||
Houyoshi Park,2016,9725847
|
||||
Beatrice,2013,9674763
|
||||
Yc Azalea,2004,9272682
|
||||
Argent Hibiscus,2010,9414266
|
||||
Fairchem Success,2017,9798648
|
||||
Pvt Sunrise,2011,9565742
|
||||
Stolt Inspiration,1997,9102083
|
||||
Tiger Reliance,2018,9800776
|
||||
Bow Hercules,2017,9752046
|
||||
Stolt Strength,2005,9311024
|
||||
Lila Vancouver,2008,9407067
|
||||
Southern Puma,2016,9792008
|
||||
Niseko Galaxy,2020,9804930
|
||||
Sun Freesia,2019,9570591
|
||||
Navig8 Ammolite,2015,9727534
|
||||
Qikiqtaaluk W.,2011,9421221
|
||||
Ginga Lynx,2009,9442550
|
||||
Lime Galaxy,2008,9380972
|
||||
Skarven,2009,9400394
|
||||
Davide B,2016,9721750
|
||||
Ginga Leopard,2008,9425992
|
||||
Eastern Quest,2009,9472749
|
||||
Halcon Trader,2016,9742053
|
||||
Bow Orion,2019,9818515
|
||||
Navig8 Azotic,2016,9719757
|
||||
Chemstar Sapphire,2019,9804904
|
||||
Chem Altamira,2015,9705744
|
||||
Chemroad Queen,2015,9737151
|
||||
Chem Sea 1,2016,9731729
|
||||
Dongyang Chemi,2001,9255969
|
||||
Shamrock Jupiter,2009,9416082
|
||||
Bow Faith,1997,9114232
|
||||
Brillante,2017,9743825
|
||||
Bow Summer,2005,9215270
|
||||
Team Sapphire,2004,9312406
|
||||
Mtm Santos,2015,9712606
|
||||
Asl Orchid,2011,9594157
|
||||
Sichem Mississipi,2008,9376658
|
||||
Chemroute Brilliant,2009,9442562
|
||||
Chemstar Jewel,2012,9624782
|
||||
Pacific Endeavor,2011,9490325
|
||||
Sky Ploeg,2015,9724441
|
||||
Chemroute Pegasus,2012,9566162
|
||||
Stolt Ajisai,2011,9477555
|
||||
Stolt Greenshank,2011,9518799
|
||||
Golden Hachi,2020,9874583
|
||||
Mtm Tortola,2016,9742065
|
||||
Sg Pegasus,2011,9494876
|
||||
Bow Sirius,2006,9215294
|
||||
Chemroad Echo,2004,9284685
|
||||
No.2 Asian Pioneer,2016,9730983
|
||||
Condor Trader,2016,9742077
|
||||
Sichem Palace,2004,9304318
|
||||
Mtm Colorado,2004,9278052
|
||||
Alessandro Dp,2007,9384162
|
||||
Kitikmeot W.,2010,9421219
|
||||
Nordic Ami,2019,9800051
|
||||
Golden Resolution,2014,9710074
|
||||
Stolt Kashi,2003,9266243
|
||||
Chem Wolverine,2006,9340439
|
||||
Golden Prelude,2021,9881079
|
||||
Cs Onsan,2013,9659684
|
||||
Mtm Amsterdam,2018,9776444
|
||||
Stolt Endurance,2004,9284697
|
||||
Celsius Mumbai,2005,9304332
|
||||
Stolt Invention,1997,9102100
|
||||
Chem Barcelona,2016,9725835
|
||||
Komodo Park,2024,9981465
|
||||
Eva Tokyo,2019,9865520
|
||||
Heung-a Pioneer,2008,9415478
|
||||
Golden Sky,2017,9792151
|
||||
Nordic Copenhagen,2019,9796810
|
||||
Stolt Pondo,2007,9374521
|
||||
Emanuele S,2007,9298363
|
||||
Giancarlo D,2016,9721748
|
||||
Chemraod Hawk,2018,9790622
|
||||
Sichem Marseill,2007,9378199
|
||||
Navig8 Victoria,2015,9690614
|
||||
Stolt Kiri,2003,9266231
|
||||
Royal Crystal 7,2007,9381330
|
||||
Stolt Groenland,2009,9414072
|
||||
Ensemble,2017,9749453
|
||||
Golden Yosa,2008,9407081
|
||||
Golden Wave,2019,9819911
|
||||
Sichem Iris,2008,9392183
|
||||
Fairchem Katana,2016,9749685
|
||||
Chembulk Virgin Gorda,2004,9294288
|
||||
Ginga Puma,2006,9343780
|
||||
Arpeggio,2017,9749441
|
||||
Southern Bull,2007,9378785
|
||||
Mtm New York,2016,9749386
|
||||
Golden Ace,2015,9736626
|
||||
Chemroute Sky,2010,9508160
|
||||
Zy Galaxy,1997,9143221
|
||||
Daesan Chemi,2004,9303273
|
||||
Marquette,2016,9732785
|
||||
Fairchem Thresher,2019,9829746
|
||||
Sun Diana,2009,9409508
|
||||
Xena,2007,9360958
|
||||
Chem Stream,2010,9479979
|
||||
Navig8 Axinite,2016,9719771
|
||||
Bay Pride,2017,9806665
|
||||
Mtm Penang,2015,9712591
|
||||
Chem Tiger,2003,9287297
|
||||
Argent Daisy,2009,9382061
|
||||
Navig8 Andesine,2015,9711559
|
||||
Chem Mia,2008,9407093
|
||||
Stolt Hagi,2016,9750206
|
||||
Gwen,2008,9407067
|
||||
Bow Saga,2007,9215309
|
||||
Amagi Galaxy,2010,9490313
|
||||
Chem Mercury,2018,9815276
|
||||
Bow Precision,2018,9790646
|
||||
Chem Spica,2017,9739264
|
||||
Jal Kisan,2002,9223851
|
||||
Stolt Loyalty,2017,9680114
|
||||
Sc Golden Fortune Lx,2000,9233870
|
||||
Bow Sea,2006,9215282
|
||||
Chembulk Columbus,2011,9515319
|
||||
Stolt Creativity,1997,9102095
|
||||
Chem Lithium,2017,9815252
|
||||
Tiger Integrity,2018,9760574
|
||||
Stolt Innovation,1996,9102069
|
||||
Chem New York,2014,9705732
|
||||
Navig8 Amber,2015,9714056
|
||||
Golden Taka,2004,9305544
|
||||
Chembulk Singapore,2007,9330587
|
||||
Stolt Bobcat,2009,9511167
|
||||
Easterly As Omaria,2007,9363819
|
||||
Southern Mermaid,2014,9724166
|
||||
Golden Australis,2020,9882748
|
||||
Giovanni Dp,2003,9261516
|
||||
Navig8 Almandine,2015,9714068
|
||||
Genuine Venus,2013,9613965
|
||||
Trf Miami,2008,9416056
|
||||
Stolt Vestland,1992,8911669
|
||||
Hafnia Azurite,2016,9727560
|
||||
Chemroad Sea,2011,9565730
|
||||
Stolt Acer,2004,9272668
|
||||
"Chem Spark ",2016,9731743
|
||||
Naeba Galaxy,2018,9791169
|
||||
Southern Turkey,2018,9749740
|
||||
Sg Friendship,2003,9288576
|
||||
Bow Santos,2004,9303651
|
||||
Dreggen,2008,9416070
|
||||
Golden Pioneer,2010,9421594
|
||||
Fsl London,2006,9340465
|
||||
Stolt Yuri,2016,9750218
|
||||
Roseanne,2003,9300544
|
||||
Golden Grace,2005,9317030
|
||||
Mtm Gibraltar,2003,9282924
|
||||
Southern Owl,2016,9773143
|
||||
Sichem Melbourne,2007,9376921
|
||||
Argent Aster,2007,9379959
|
||||
Golden Orion,2014,9712333
|
||||
Stolt Jaeger,1997,9114775
|
||||
Bow Olympus,2019,9818527
|
||||
Sc Chongqing,2010,9425045
|
||||
Ocean Hope,2006,9340453
|
||||
Celsius Middelfart,2015,9733349
|
||||
Easterly As Olivia,2007,9340489
|
||||
Woojin Kelly,2006,9330408
|
||||
Stolt Effort,1999,9178202
|
||||
Stolt Virtue,2004,9274317
|
||||
Eva Usuki,2020,9865532
|
||||
Golden Jupiter,2019,9837573
|
||||
Dh Diligency,2019,9813060
|
||||
Singapore Pioneer,2009,9478262
|
||||
Bow Flower,1994,9047491
|
||||
Alden,2016,9733363
|
||||
Bunga Lily,2011,9542178
|
||||
Stolt Megami,2008,9425980
|
||||
Bochem Singapura,2011,9565625
|
||||
Tiger Perseverance,2019,9800788
|
||||
Dm Emerald,2010,9412763
|
||||
Celsius Miami,2005,9304320
|
||||
Bochem Bucephalas,2023,9760550
|
||||
Akra 103,2000,9187538
|
||||
Stream Baltic,2019,9838668
|
||||
Bow Clipper,1995,9047518
|
||||
Saehan Nuria,2010,NULL
|
||||
Golden Betelgeuse,2019,9458315
|
||||
Chem New Orleans,2015,9705756
|
||||
Yangon,2003,9250165
|
||||
Chembulk Tortola,2007,9342786
|
||||
Chem Sirius,2011,9558397
|
||||
Bow Compass,2009,9412737
|
||||
Stellar Lilac,2008,9499943
|
||||
Stream Pacific,2019,9838670
|
||||
Fairchem Fortitude,2020,9805910
|
||||
Orchid Sylt,2009,9367413
|
||||
Manila I,2003,9242326
|
||||
Chem Star,1997,9156541
|
||||
Stolt Courage,2004,9296731
|
||||
Sichem Beijing,2007,9397042
|
||||
Greenwich Park,2011,9505998
|
||||
Stolt Stream,2000,9169940
|
||||
Jutlandia Swan,2015,9736638
|
||||
Hafnia Topaz,2016,9753686
|
||||
No2. Heung-a Pioneer,2008,9415480
|
||||
Bow Dalian,2012,9504205
|
||||
Chemocean Orion,2018,9777412
|
||||
Albatross Trader,2015,9724063
|
||||
Yc Pansy,2005,9311256
|
||||
Bow Palladium,2017,9777371
|
||||
Sc Chengdu,2010,9572185
|
||||
Ulriken,2006,9325843
|
||||
Mtm Hamburg,2008,9379844
|
||||
Jipro Isis,2008,9370719
|
||||
"Graceful Star ",1995,9102928
|
||||
Chem Venus,2004,9324215
|
||||
Awasan Pioneer,2009,9438925
|
||||
Stolt Larix,2015,9617650
|
||||
Stolt Achievement,1999,9124469
|
||||
Rudolf Schulte,2011,9576765
|
||||
Jipro Neftis,2011,9459292
|
||||
Bow Gemini,2017,9752034
|
||||
Guanaco,2003,9256834
|
||||
Stolt Satsuki,2017,9781114
|
||||
Mtm Dublin,2008,9335824
|
||||
Stolt Flamenco,2010,9391995
|
||||
Bow Nangang,2013,9504217
|
||||
Songa Winds,2009,9416109
|
||||
Concerto,2017,9743837
|
||||
Stanley Park,2008,9363845
|
||||
Stolt Confidence,1996,9102071
|
||||
Bow Firda,2003,9250751
|
||||
Bow Flora,1998,9143207
|
||||
Reinhold Schulte,2012,9576789
|
||||
Stolt Zulu,2006,9351531
|
||||
Sinar Malahayati,2006,9349643
|
||||
Chemroad Polaris,2014,9536923
|
||||
Stolt Ilex,2010,9505936
|
||||
Stolt Sisto,2010,9359375
|
||||
Bristol Trader,2016,9737101
|
||||
Bochem Mumbai,2010,9565637
|
||||
Great Epsilon,2020,9873644
|
||||
Chemstar Iris,2018,9827463
|
||||
Fairchem Integrity,2019,9860192
|
||||
Chemroad Rose,2005,9317846
|
||||
Stolt Lerk,2017,9719252
|
||||
Bow Hector,2009,9363493
|
||||
Sichem Amethyst,2006,9354571
|
||||
Sichem Ruby,2006,9344174
|
||||
Koryu,2013,9668283
|
||||
Nordic Aqua,2018,9800116
|
||||
As Orelia,2008,9363821
|
||||
Sunrise Ray,2018,9829679
|
||||
Chem Saiph,2017,9731781
|
||||
Dh Diligency,2019,9813060
|
||||
Golden Denise,2006,9366196
|
||||
Navig8 Gauntlet,2019,9853228
|
||||
Tiger Tenacity,2017,9760550
|
||||
Genuine Hercules,2013,9597147
|
||||
Saehan Jasper,2009,9416111
|
||||
Korea Chemi,2004,9274276
|
||||
Astra,2003,9273387
|
||||
Sichem Manila,2007,9322097
|
||||
Dionne,2018,9814909
|
||||
Jbu Sapphire,2009,9412725
|
||||
Ginga Jaguar,2005,9321873
|
||||
Goldengate Park,2013,9493145
|
||||
Chemroad Aqua,2018,9790610
|
||||
Xing Tong Kai Ming,2024,9988059
|
||||
Mtm Newport,2018,9774575
|
||||
Harsanadi,1999,9220196
|
||||
Fairchem Sabre,2013,9657478
|
||||
Wawasan Jade,2010,9565613
|
||||
Damiania,2006,9308235
|
||||
Uacc Manama,2010,9458822
|
||||
Stolt Concept,1999,9178197
|
||||
Eastern Liberty,2002,9276236
|
||||
Stolt Perseverance,2001,9124471
|
||||
Caribbean 1,2009,9416094
|
||||
Ginga Cougar,2005,9321861
|
||||
Serene Monaco,2005,9309629
|
||||
Tablones,2003,9043093
|
||||
Capella,2003,9278650
|
||||
Ginga Lion,2004,9278727
|
||||
Sc Petrel,2016,9746176
|
||||
Scarlet Ray,2020,9799654
|
||||
Bow Architect,2005,9319480
|
||||
Eastern Neptune,2006,9370630
|
||||
Bay Yasu,2008,9363869
|
||||
Stolt Aguila,2009,9391983
|
||||
Royal Aqua,2008,9381366
|
||||
Xanthia,2003,9246152
|
||||
Sfl Aruba,2022,9919761
|
||||
Mtm Shanghai,2006,9345908
|
||||
Lincoln Park,2012,9640097
|
||||
Chem Rotterdam,2014,9640140
|
||||
Stolt Magnesium,2017,9739317
|
||||
Ginga Cheetah,2007,9414216
|
||||
Mac London,2003,9296872
|
||||
Eva Hongkong,2017,9800001
|
||||
Southern Shark,2016,9821299
|
||||
Navig8 Tanzanite,2016,9753703
|
||||
Octaden,2007,9340477
|
||||
Swan Pacific,2016,9749805
|
||||
Mtm Southport,2008,9416032
|
||||
Stolt Island,2009,9414058
|
||||
Jbu Opal,2009,9400409
|
||||
Songa Challenge,2009,9409510
|
||||
Ammolit,1995,9016870
|
||||
Ariane Makara,2009,9442548
|
||||
Intermezzo,2019,9804825
|
||||
Bow Capricorn,2016,9752010
|
||||
Jkt Shanghai,2000,9175535
|
||||
Sun Ploeg,2015,9724439
|
||||
Stolt Norland,2009,9414060
|
||||
Bay Spirit,2019,9852286
|
||||
Southern Giraffe,2008,9415014
|
||||
Chemocean Leo,2018,9777424
|
||||
Sun Triton,2017,9781097
|
||||
Indigo Ray,2016,9716016
|
||||
Argent Gerbera,2010,9424596
|
||||
Sc Virgo,2017,9801093
|
||||
Ivory Ray,2011,9505986
|
||||
Sichem Lily,2009,9393395
|
||||
Songa Dream,2010,9505948
|
||||
Stolt Distributor,2002,9276145
|
||||
Celsius Eagle,2010,9423750
|
||||
Chem Argon,2016,9716004
|
||||
Sc Taurus,2017,9801081
|
||||
Fairchem Victory,2016,9773179
|
||||
Stolt Glory,2005,9311012
|
||||
Stolt Tenacity,2017,9680102
|
||||
Clarice,2014,9674775
|
||||
Stolt Cobalt,2016,9739305
|
||||
Chem Antares,2004,9286554
|
||||
Om Shanghai,2007,9358632
|
||||
Stolt Spruce,1993,8919037
|
||||
Ginga Ocelot,2013,9581423
|
||||
Ulsan Chemi,2003,9279927
|
||||
Stolt Cedar,1994,8919049
|
||||
Sichem Challenge,1998,9196448
|
||||
Navig8 Amethyst,2015,9714501
|
||||
Patalya,2005,9305180
|
||||
Mtm Rotterdam,2011,9477567
|
||||
Stolt Surf,2000,9168623
|
||||
Chembulk Kobe,2002,9263136
|
||||
Nordic Masa,2009,9451410
|
||||
Hakone Galaxy,2018,9791171
|
||||
Stolt Skua,1999,9199311
|
||||
Songa Breeze,2009,9423645
|
||||
Southern Xantis,2020,9883493
|
||||
Stolt Sakura,2010,9432969
|
||||
Nordic Marita,2012,9558402
|
||||
Stolt Sea,1999,9149495
|
||||
Ginga Saker,2003,9258155
|
||||
Chemstar Stellar,2012,9624770
|
||||
Fg Rotterdam,2012,9485863
|
||||
Fairchem Blade,2014,9692416
|
||||
Chemstar Tierra,2018,9827451
|
||||
Purple Ray,2020,9804899
|
||||
Chem Bulldog,2010,9587790
|
||||
Mtm Singapore,2011,9477529
|
||||
Moyra,2005,9271999
|
||||
Chem Stellar,2016,9731755
|
||||
Clayton,2025,1019979
|
||||
Stolt Sanderling,2011,9518804
|
||||
Ginga Panther,2007,9379985
|
||||
Sichem Hong Kong,2007,9397054
|
||||
Hyde Park,2017,9725861
|
||||
Fairchem Endurance,2020,9800441
|
||||
Bow Faith,1997,5408609
|
||||
Alpaca,2010,9403293
|
||||
Golden Vega,2016,9773167
|
||||
Melderskin,2016,9737577
|
||||
Stolt Sneland,2008,9352212
|
||||
Doris Ruby,2003,9279939
|
||||
Moquegua,2002,9262869
|
||||
Sc Mercury,2016,9746188
|
||||
Mid Eagle,2007,9330795
|
||||
Crimson Ray,2007,9347152
|
||||
Stolt Efficiency,1998,9102112
|
||||
Stolt Basuto,2006,9351543
|
||||
Stolt Alm,2016,9719238
|
||||
Jazz,2019,9804849
|
||||
Sun Jupiter,2019,9837573
|
||||
"Magenta Ray ",2019,9829722
|
||||
Gt Star,2012,9485849
|
||||
Fairchem Tiger,2019,9829760
|
||||
Stolt Voyager,2003,9297292
|
||||
Stolt Quetzal,2009,9376660
|
||||
G Silver,2004,9324215
|
||||
Chem Silicon,2018,9829681
|
||||
Concon Trader,2018,9800037
|
||||
Huemul,2008,9371775
|
||||
As Omaria,2007,9363819
|
||||
Rayong Chemi,2002,9257125
|
||||
Navig8 Aventurine,2015,9711547
|
||||
Stolt Mercury,2017,9739329
|
||||
Fairchem Falcon,2018,9799642
|
||||
Fairchem Copper,2019,9829758
|
||||
Sea Ploeg,2016,9724453
|
||||
Stolt Breland,2010,9414084
|
||||
Mtm Tokyo,2003,9279111
|
||||
Chemroad Journey,2009,9414254
|
||||
Intrepid Seahawk,2011,9576777
|
||||
Mtm North Sound,2006,9360946
|
||||
Stolt Lotus,2014,9617648
|
||||
Carole M,2016,9732797
|
||||
Stolt Integrity,2017,9680097
|
||||
Dh Fealty,2018,9829772
|
||||
Stolt Maple,2017,9764491
|
||||
Southern Falcon,2008,9414993
|
||||
Bow Jaguar,2024,9989209
|
||||
Melito Carrier,1993,8920581
|
||||
Saehan Intrasia,2005,9330460
|
||||
Golden Cygnus,2010,9498080
|
||||
Chemraod Lily,2006,9325855
|
||||
Hafnia Turquoise,2016,9753674
|
||||
Golden Mind,2020,9881067
|
||||
Cutlass Galaxy,2015,9746164
|
||||
Navig8 Tourmaline,2016,9753698
|
||||
Mac Singapore,2001,9244386
|
||||
Radiant Ray,2018,9749697
|
||||
Stolt Momiji,2010,9470545
|
||||
Sun Dahlia,2018,9570577
|
||||
Zoey,2011,9624548
|
||||
Mtm Amazon,2007,9374533
|
||||
Southern Quokka,2017,9792010
|
||||
Chembulk Barcelona,2004,9278662
|
||||
Navig8 Aragonite,2015,9727558
|
||||
Shamrock Mercury,2010,9477531
|
||||
Ginga Caracal,2009,9426300
|
||||
Mid Nature,2011,9542154
|
||||
Ginga Hawk,2000,9222651
|
||||
Stolt Lind,2017,9719264
|
||||
Stolt Pelican,1996,9016882
|
||||
Central Park,2015,9725823
|
||||
Amelia,2011,9624768
|
||||
Bow Harmony,2008,9379909
|
||||
Bow Fortune,1999,9168635
|
||||
Golden Leader,2017,9805130
|
||||
Chemroad Quest,2010,9451288
|
||||
Mtm Vancouver,2019,9867607
|
||||
Celsius Manhattan,2006,9323766
|
||||
Chem Houston,2014,9705720
|
||||
Golden Aspirant,2016,9758313
|
||||
Lumphini Park,2013,9640114
|
||||
Navig8 Adamite,2015,9727546
|
||||
Mandal,2016,9732773
|
||||
Nordic Callao,2019,9340439
|
||||
Stolt Sypress,1998,9150315
|
||||
Tsukuba Galaxy,2020,9796834
|
||||
Chemroad Orchid,2019,9790608
|
||||
Valentine,2008,9504023
|
||||
Trf Bergen,2015,9692246
|
||||
Nordic Americas,2004,9304306
|
||||
Stolt Focus,2001,9214305
|
||||
Golden Ray,2012,9640102
|
||||
Stolt Kingfisher,1998,9154323
|
||||
Fairchem Bronco,2007,9360960
|
||||
Sc Falcon,2016,9746190
|
||||
Nordic Ace,2018,9800104
|
||||
Tivoli Park,2018,9780536
|
||||
Bow Tungsten,2018,9777400
|
||||
Fanfare,2018,9760562
|
||||
Chemroad Sirius,2018,9757979
|
||||
Yelena,2011,9613616
|
||||
Susana S,2009,9406714
|
||||
Chemroad Hope,2011,9565754
|
||||
Spring Ploeg,2017,9774707
|
||||
Edge Galaxy,2017,9788954
|
||||
Forest Park,2013,9640126
|
||||
Southern Hawk,2009,9534901
|
||||
Zao Galaxy,2012,9566150
|
||||
Pvt Jupiter,2008,9408803
|
||||
Stolt Redshank,2011,9566746
|
||||
Bochem Ghent,2011,9565649
|
||||
Chemstar River,2017,9758026
|
||||
Golden Creation,NULL,NULL
|
||||
Golden Vega,0,NULL
|
||||
Chem Selenium,0,NULL
|
||||
Bunga Laurel,NULL,NULL
|
||||
Bow Condor,0,NULL
|
||||
Ncc Mekka,1995,9047752
|
||||
Mac Singapore,2001,9244386
|
||||
Southern Lion,NULL,NULL
|
||||
Navi8 Spark,NULL,NULL
|
||||
Sea Ploeg,NULL,NULL
|
||||
Golden Creation,NULL,NULL
|
||||
Mtm Rotterdam,NULL,NULL
|
||||
Bow Andes,0,NULL
|
||||
Southern Lion,2011,9567752
|
||||
"Genuine Galaxy ",0,NULL
|
||||
Fairchem Triumph,2017,9758038
|
||||
Polaris Stardom,20089,9470246
|
||||
Conti Chivalry,NULL,NULL
|
||||
Bunga Lucerne,2012,9508938
|
||||
Bw Helium,NULL,NULL
|
||||
Chembulk Sydney,NULL,NULL
|
||||
Dh Glory,2020,9815628
|
||||
Nordic Maya,2005,9339351
|
||||
Ginga Liger,2021,9893448
|
||||
Esteem Sango,2021,9900461
|
||||
Sun Iris,2020,9873254
|
||||
Goldstar Shine,2004,9279707
|
||||
Cnc Dream,2004,9305544
|
||||
Golden Procyon,2015,9750476
|
||||
Bochem Chennai,2012,9565766
|
||||
Bochem Chennai,2012,9565766
|
||||
Bochem Chennai,2012,9565766
|
||||
Sc Brilliant,2007,9340702
|
||||
Fairchem Angel,2020,9809394
|
||||
Itsa Ship Accounting,NULL,None
|
||||
Forshun,2001,9236054
|
||||
Pacific Star,2008,9363481
|
||||
Chermoute Pegasus,2012,9566162
|
||||
Barbouni,2007,9416020
|
||||
Lisbon,2000,9223916
|
||||
Benten Galaxy,2022,NULL
|
||||
Pvt Azura,2009,9423683
|
||||
Southern Narwhal,2015,9715995
|
||||
Mid Fortune,2009,9423683
|
||||
Bum Shin,2003,9263095
|
||||
G Bright,2004,9294276
|
||||
Golden Unity,2011,9572575
|
||||
T Procyon,2021,9569499
|
||||
Chem Leona,2010,9505948
|
||||
Chemroute Sun,2008,9414228
|
||||
Fairchem Fynbos,2021,9914292
|
||||
Chem Sceptrum,2017,9731793
|
||||
Begonia,2005,9330381
|
||||
Takao Galaxy,2022,9920069
|
||||
Daeho Sunny,2009,9511105
|
||||
Bow Cecil,1998,9143219
|
||||
Sc Scorpio,2017,9801079
|
||||
Stolt Argon,2016,9739288
|
||||
Thai Chemi,2006,9330393
|
||||
Easterly As Omaria,2007,9340489
|
||||
Taruca,2005,9331402
|
||||
Lila Confidence,2006,9340427
|
||||
Eva Fuji,2021,9914242
|
||||
Bochem Marengo,2017,9749025
|
||||
Whitney,2009,9551337
|
||||
Hakuba Galaxy,2021,9804916
|
||||
Kamui Galaxy,2022,9942653
|
||||
Chem Sea 1,2016,9731729
|
||||
Golden Axis,2022,9930832
|
||||
Chem Star 1,2016,9731731
|
||||
Stolt Bismuth,2016,9739290
|
||||
Tsurugi Galaxy,2020,9875501
|
||||
Tsukuba Galaxy,NULL,9796834
|
||||
Chem Sceptrum,2017,9731793
|
||||
Bangkok Chemi,2006,9330410
|
||||
Bochem London,2016,9743849
|
||||
Stream Arctic,2019,9817509
|
||||
Woojin Elvis,2009,9442665
|
||||
Stolt Fulmar,2000,9148972
|
||||
Fairchem Conquest,2017,9798648
|
||||
Ginga Tiger,2003,9278715
|
||||
Mumbai,2003,9242338
|
||||
Stolt Facto,2010,9359363
|
||||
Chem Taurus,2010,9477505
|
||||
Silver Ray,2013,9493133
|
||||
Navig8 Aronaldo,2015,9711561
|
||||
Blue Arrow,2012,9630444
|
||||
Slogen,2016,9733375
|
||||
Hodaka Galaxy,2018,9791157
|
||||
Bow Prosper,2020,9866770
|
||||
Mtm Kobe,2018,9776456
|
||||
Marex Sara,2016,9773997
|
||||
Rlo Explorer,1998,9148958
|
||||
Chemroute Oasis,2011,9512173
|
||||
Chemroad Dita,2009,9414242
|
||||
Karruca,2001,9216470
|
||||
Stolt Span,1999,9149524
|
||||
Lavender Ray,2017,9740794
|
||||
Bow Cedar,1996,9087013
|
||||
Malbec Legend,2016,9732814
|
||||
Stolt Ocelot,2008,9459539
|
||||
Stolt Orca,2012,9565699
|
||||
Ct Ace,2006,9352597
|
||||
Mar Camino,2010,9573892
|
||||
Hari Akash,1998,9156553
|
||||
Bow Performer,2018,9790658
|
||||
Bochem Oslo,2010,9420710
|
||||
Fairchem Valor,2019,9791195
|
||||
"Jal Garuda ",2000,9196709
|
||||
Fairchem Kiso,2011,9527075
|
||||
Argent Sunrise,2008,9392377
|
||||
Golden Creation,2015,9738662
|
||||
Stolt Kikyo,1998,9156565
|
||||
Ami,2006,9360934
|
||||
Panagia Thalassini,2017,9730335
|
||||
Stolt Renge,2017,9781126
|
||||
Sun Venus,2016,9774410
|
||||
Eastern Oasis,2007,9383986
|
||||
Ogino Park,2017,9725873
|
||||
Golden Deneb,2019,9859258
|
||||
Patrona I,2004,9305178
|
||||
Navig8 Achroite,2016,9727584
|
||||
Golden Chie,2010,9566203
|
||||
Bay Dignity,2017,9806706
|
||||
Marmotas,2005,9304344
|
||||
Global Pioneer,2010,9542142
|
||||
Ds Cougar,2009,9515292
|
||||
Mtm Houston,2010,9505924
|
||||
Stolt Commitment,2000,9168647
|
||||
Songa Peace,2009,9409522
|
||||
Nave Polaris,2011,9457749
|
||||
Southern Unicorn,2018,9749702
|
||||
Tiger Glory,2017,9749025
|
||||
Rabigh Sun,2008,9392365
|
||||
Stolt Pride,2016,9680073
|
||||
Ct Confidence,2006,9340427
|
||||
Bow Neon,2017,9777369
|
||||
"Griya Bugis ",1998,9191280
|
||||
Nocturne,2020,9804863
|
||||
Raon Teresa,2002,9244984
|
||||
Nordic Ann,2010,9422665
|
||||
Yc Daisy,2005,9304344
|
||||
Stolt Seagull,1997,9125645
|
||||
Fairchem Pinnacle,2025,NULL
|
||||
Azalea Galaxy,2006,9343778
|
||||
Stolt Rindo,2005,9314765
|
||||
Navig8 Sky,2016,9731731
|
||||
Rt Star,2011,9523835
|
||||
Ctg Argon,2016,9739288
|
||||
Navig8 Ametrine,2015,9714513
|
||||
Stolt Palm,2018,9764506
|
||||
Hicri Kaan,1998,9171474
|
||||
Chemtrans Mobile,2016,9732802
|
||||
Stolt Sycamore,2000,9198563
|
||||
Bow Condor,2000,9214032
|
||||
Bunga Lilac,2011,9542166
|
||||
Southern Robin,2018,9749714
|
||||
Bow Star,2004,9197296
|
||||
Stolt Excellence,2018,9720081
|
||||
Sagami,2008,9379911
|
||||
Stolt Tsubaki,2011,9477543
|
||||
Bow Chain,2002,9214317
|
||||
Nq Laelia,2008,9393383
|
||||
Rainbow Island 88,2004,9286542
|
||||
Chembulk Vancouver,2003,9282364
|
||||
Mtm Big Apple,2018,9774563
|
||||
Bunga Laurel,2010,9529645
|
||||
Stolt Apal,2016,9719240
|
||||
Wawasan Topaz,2010,9565601
|
||||
Sichem New York,2007,9337834
|
||||
Fairchem Sword,2013,9673678
|
||||
Shenghui Glory,2015,9724037
|
||||
Navig8 Amazonite,2015,9719769
|
||||
Bow Sun,2003,9197284
|
||||
Mid Osprey,2006,9330783
|
||||
Stolt Vision,2005,9274329
|
||||
Celsius Messina,2007,9349655
|
||||
Nordic Aki,2011,9505974
|
||||
Stolt Selje,1993,8919051
|
||||
Navig8 Goal,2019,9853216
|
||||
Ginga Fortitude,2020,9805910
|
||||
Yc,NULL,NULL
|
||||
Stolt Spray,2000,9168611
|
||||
Nq Morina,2002,9243382
|
||||
Nordic Mari,2010,9422677
|
||||
Golden Mercury,2003,9272802
|
||||
Asian Pioneer,2015,9730971
|
||||
Linken,2016,9733351
|
||||
Lila Evia,2006,9330771
|
||||
Mtm Fairfield,2002,9264465
|
||||
Stolt Ebony,2017,9744908
|
||||
Stolt Teal,1999,9199323
|
||||
Mtm New Orleans,2016,9749398
|
||||
Bow Engineer,2006,9317860
|
||||
Lavraki,2007,9323077
|
||||
Stolt Sandpiper,2011,9566758
|
||||
Golden Sirius,2009,9478274
|
||||
Fuji Lava,2010,9468528
|
||||
Flumar Maceio,2006,9345893
|
||||
Ginga Merlin,2002,9254252
|
||||
Bow Glory,2017,9758038
|
||||
Mtm Savannah,2015,9726750
|
||||
Malbec Legacy,2016,9732826
|
||||
Wolverine,2006,9043081
|
||||
Forte Galaxy,2018,NULL
|
||||
Uacc Marah,2013,9489091
|
||||
Chem Neon,2018,9815264
|
||||
"Blue Arrow ",2012,9630444
|
||||
Xt Sea Lion,2025,1042378
|
||||
Navig8 Universe,2013,9489106
|
||||
Sunny Orion,2010,9511143
|
||||
Stolt Betula,2003,9266267
|
||||
Stolt Suisen,1998,9156577
|
||||
Bow Spring,2004,9215256
|
||||
Bow Cardinal,1997,9114244
|
||||
Bow Platinum,2018,9777383
|
||||
Mtm Hudson,2004,9278064
|
||||
Chem Cobalt,2016,9740770
|
||||
Eva Bergen,2018,9800013
|
||||
Latana,2000,9186352
|
||||
Eastern Prosperity,2008,9383998
|
||||
"Bellis Theresa ",2000,9192375
|
||||
Chem Singapore,2015,9705768
|
||||
Floyen,2016,9749790
|
||||
Kaimon Galaxy,2012,9552472
|
||||
Sunny Orion,2010,9511143
|
||||
Chemstar River,2017,9758026
|
||||
Fairchem Loyalty,2019,9791183
|
||||
Nave Cosmos,2010,9457024
|
||||
Bomar Lynx,2003,9043093
|
||||
Bow Fagus,1995,9047764
|
||||
Stolt Osprey,1998,9147461
|
||||
Bunga Lavender,2010,9542130
|
||||
Lauren,2002,9266229
|
||||
Tiger Harmony,2016,9725859
|
||||
Fairchem Hawk,2019,9804813
|
||||
Sandro,2017,9390525
|
||||
Bow Odyssey,2020,9818539
|
||||
Horin Trader,2015,9724051
|
||||
Bow Caroline,2009,9367554
|
||||
Chem Gallium,2017,9740782
|
||||
Ginga Kite,2001,9228291
|
||||
Summer Ploeg,2017,9790464
|
||||
Wawasan Ruby,2010,9477517
|
||||
Menuett,2019,9790634
|
||||
Acadia Park,2018,9780548
|
||||
Sun Neptune,2007,9363807
|
||||
Marigold,2000,9221669
|
||||
Chemroad Haya,2004,9303649
|
||||
Ginga Bobcat,2010,9472737
|
||||
Key Wind,1999,9148960
|
||||
Uacc Riyadh,2011,9458834
|
||||
Bochem Bayard,2017,9749025
|
||||
Bow Titanium,2018,9777395
|
||||
Ebony Ray,2008,9363857
|
||||
Stolt Viking,2001,9196711
|
||||
Mtm Westport,2005,9317858
|
||||
Mtm Key West,2020,9790672
|
||||
Navig8 Aquamarine,2015,9711573
|
||||
Chem Amsterdam,2013,9640138
|
||||
Fairchem Honor,2019,9860207
|
||||
Stolt Sincerity,2016,9680085
|
||||
Woojin Chemi,2003,9272814
|
||||
Sichem Mumbai,2006,9322085
|
||||
Pacific Sapphire,2008,9508158
|
||||
Jbu Onyx,2008,9392999
|
||||
Southern Wolf,2019,9867267
|
||||
Stolt Calluna,2017,9744893
|
||||
Bow Sky,2005,9215268
|
||||
Chem Polaris,2008,9416044
|
||||
Southern Condor,2007,9441659
|
||||
Golden Sagittarius,2019,9867073
|
||||
Chemroad Sakura,2018,9757967
|
||||
Navig8 Alabaster,2015,9727572
|
||||
Prabhu Parvati,2008,NULL
|
||||
Sun Edelweiss,2018,9570589
|
||||
Fairchem Aldebaran,2017,9552666
|
||||
Stolt Capability,1998,9102124
|
||||
Straum,2010,9406726
|
||||
Cypress Galaxy,2007,9379973
|
||||
Stolt Vanguard,2004,9274305
|
||||
Mtm Antwerp,2004,9291456
|
||||
Diva,2017,9760548
|
||||
Bow Fuling,2012,9504190
|
||||
Navig8 Amessi,2015,9719745
|
||||
Stolt Sequoia,2003,9235062
|
||||
Golden Dream,2002,9255971
|
||||
Bochem Luxembourg,2015,9737565
|
||||
Stolt Sagaland,2008,9352200
|
||||
Harmonics,2019,9799680
|
||||
Alessandro Dp,2007,9384162
|
||||
Battersea Park,2002,9255983
|
||||
Gallop,2019,9799678
|
||||
Kiso,2008,9379894
|
||||
Chemroad Wing,2005,9309502
|
||||
Sc Citrine,2025,9989027
|
||||
Vari Trader,2018,9800025
|
||||
Bow Aquarius,2016,9753791
|
||||
Fairchem Charger,2009,9367401
|
||||
Elm Galaxy,2006,9331256
|
||||
Stolt Auk,2001,9164108
|
||||
Beech Galaxy,2007,9340441
|
||||
Lilac Ray,2020,9883481
|
||||
Fairchem Mako,2018,9826574
|
||||
Argent Iris,2011,9459280
|
||||
Celsius Birdie,2009,9423724
|
||||
Golden Altair,2016,9792163
|
||||
Celsius Mexico,2008,9408798
|
||||
Woojin Frank,2005,9317262
|
||||
Birdie Trader,2016,9724099
|
||||
Liberty,2009,9423542
|
||||
|
@@ -0,0 +1,43 @@
|
||||
name,type,lat,lon
|
||||
Adelaide,customer,0,0
|
||||
BAHODOPI-POSO,customer,0,0
|
||||
Bandirma,customer,40.369697136983,27.9607200622559
|
||||
Bangkok,customer,13.75398,100.50144
|
||||
Barquito,customer,-26.35,-70.65
|
||||
Beaumont,customer,30.077338673234,-94.0840816497804
|
||||
Budge Budge,customer,22.47514,88.17767
|
||||
Fangcheng,customer,21.6140258994842,108.322792053223
|
||||
Gladstone,customer,-23.81603,151.26279
|
||||
Go Dau,customer,15.9742225,105.806431
|
||||
Gresik,customer,-7.14994933032079,112.60986328125
|
||||
Hai Phong,customer,20.86774,106.69179
|
||||
Hibi,customer,34.39572,133.78713
|
||||
Huelva,customer,37.26638,-6.94004
|
||||
Isabel,customer,6.70748678037316,121.965022087097
|
||||
Japan,customer,37.3002752813444,138.515625
|
||||
Jorf Lasfar,customer,-18.7771925,46.854328
|
||||
Kakinada,customer,16.9957254352666,82.4434661865236
|
||||
Kandla,customer,22.80078,69.70705
|
||||
Kemaman,customer,4.23333,103.45
|
||||
LHOKSEUMAWE,customer,0,0
|
||||
Limas,customer,40.7389332411361,29.6246337890626
|
||||
Map Ta Phut,customer,12.648713210033,101.143569946289
|
||||
Mejillones,customer,-22.8215665806022,-70.5869030289666
|
||||
Naoshima,customer,34.4612772884371,134.009170532227
|
||||
New Mangalore,customer,0,0
|
||||
New Orleans,customer,29.95465,-90.07507
|
||||
Onahama,customer,36.9268411923102,140.907897949219
|
||||
Onsan,customer,35.4601106672107,129.37671661377
|
||||
Paradip,customer,20.2644516477765,86.7077064514162
|
||||
Pori,customer,61.6450645681561,21.3828277587891
|
||||
Port Hedland,customer,0,0
|
||||
Ras Al Khair,customer,0,0
|
||||
Rio Grande,customer,-32.082574559546,-52.0916748046876
|
||||
Saganoseki,customer,33.2411282959313,131.885418891907
|
||||
Samut Prakan,customer,13.5051524908849,100.599746704102
|
||||
San Juan,customer,18.46633,-66.10572
|
||||
Stockton,customer,37.9527255601159,-121.316356658936
|
||||
Tampa,customer,27.897652653541,-82.4285316467287
|
||||
Tema,customer,-0.828097,11.598909
|
||||
Tuticorin,customer,8.75436,78.20429
|
||||
Visakhapatnam,customer,17.4764321971955,83.5180664062501
|
||||
|
17
Reference Data/python_project/loaders/backup/Employees.csv
Normal file
17
Reference Data/python_project/loaders/backup/Employees.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
name,code
|
||||
Laura Girardot,EMP1
|
||||
Jean-Francois Muse,EMP2
|
||||
ShiHan Tay,EMP3
|
||||
Max Baiwir,EMP4
|
||||
Rodrigue Thomas,EMP5
|
||||
Angela Arbenz,EMP6
|
||||
Javier Miraba,EMP7
|
||||
Stephane Monnard,EMP8
|
||||
Oliver Gysler,EMP9
|
||||
Mario Isely,EMP10
|
||||
Gregory Gondeau,EMP11
|
||||
Sylviane Dubuis,EMP12
|
||||
Steve Zaccarini,EMP13
|
||||
Smult Kouane,EMP14
|
||||
David Susanto,EMP15
|
||||
Jeremie Collot,EMP16
|
||||
|
372
Reference Data/python_project/loaders/backup/Parties.csv
Normal file
372
Reference Data/python_project/loaders/backup/Parties.csv
Normal file
@@ -0,0 +1,372 @@
|
||||
name,tax_identifier,vat_code,address_name,street,city,zip,country_code,subdivision_code
|
||||
"HEUNG-A SHIPPING CO., LTD.",,,,"3F, HEUNG-A BLDG, 21, SAEMAL-RO 5-GIL, SONGPA-GU",Seoul,0,KR,
|
||||
SUMITOMO CORPORATION TOKYO,,,,"INORGANIC CHEMICALS TEAM, INORGANIC AND PERFORMANCE CHEMICALS DEPT. 3-2, OTEMACHI 2-CHROME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
TRANSAMMONIA AG,,,,"PRIME TOWER, HARDSTRASSE 201, 8005 ZURICH, SWITZERLAND",Zurich,8005,CH,
|
||||
AMEROPA,,,,"ONE TEMASEK AVENUE, MILLENIA TOWER HEX 31-02",Singapore,039192,SG,
|
||||
NISSHIN SHIPPING CO. LTD,,,,,Monaco,0,MC,
|
||||
KORINDO PELAYARAN PT,,,,"12th Floor, Wisma Korindo, Jalan Letjen MT Haryono 62, Kel Pancoran",Jakarta,NULL,ID,
|
||||
PARANAPANEMA S/A (C),,,,"VIA DO COBRE N° 3700, AREA INDUSTRIAL OESTE (AIO) - COPEC, CEP 42850-00 DIAS D'AVILA, BAHIA, BRAZIL",Bahia,CEP 42850-00,BR,
|
||||
AZTEC MARINE LTD,,,,,Monaco,NULL,MC,
|
||||
MALAYSIAN PHOSPHATE ADDITIVES SB.,,,,"SUITE 609, LEVEL 6, BLOCK F, PHILEO DAMANSARA 1, N° 9, JALAN 16/11, 46350 PETALING JAYA, SELANGOR DE",Petaling Jaya,46350,MY,
|
||||
BSI INSPEC. SPAIN (BUREAU VERITAS),,,,,Monaco,NULL,MC,
|
||||
MH PROGRESS LINE S.A.,,,,,Monaco,0,MC,
|
||||
NAVIERA PETRAL SA,,,,"Oficina 102, Calle Alcanflores 393, Miraflores",Lima,15074,PE,
|
||||
NAVIG8 LIMITED,,,,,Monaco,0,MC,
|
||||
INTEROCEANIC (C),,,,7 RENAISSANCE SQUARE 7TH FLOOR,New York,10601,US,
|
||||
LYNX SHIPPING PTE. LTD,,,,"16 Raffles Quay #41-02, Hong Leong Building, Singapore ",,048581,SG,
|
||||
ABO SINGAPORE PTE LTD,,,,,Monaco,0,MC,
|
||||
FERTIPAR FERTILIZANTES,,,,"RUA COMENDADOR CORREA JUNIOR, 1178, BAIRRO 29 DE JULHO, CEP 83.203-762 PARANAGUA, PR BRASIL",Paranagua,CEP 83.203-762,BR,
|
||||
NATIONAL CHEMICAL CARRIERS LTD,,,,,Monaco,0,MC,
|
||||
BALLANCE AGRI-NUTRIENTS,,,,"HEWLETT'S ROAD, MOUNT MAUNGANUI, NEW ZEALAND",Mount Maunganui,NULL,NZ,
|
||||
JX OCEAN CO. LTD,,,,"THE LANDMARK TOWER YOKOHAMA 48TH FLOOR 2-2-1, MINATOMIRAI, NISHI-KU",Yokohama,220-8148,JP,
|
||||
SUKSES OSEAN KHATULISTIWA LINE,,,,"Blok J, Jalan Mangga Dua Dalam 5&8, Jakarta Pusat",Jakarta,10730,ID,
|
||||
PARADEEP PHOSPHATES LIMITED,,,,"BAYAN BHAWAN, PANDIT J. N. MARG, BHUBANESWAR 751001, ORISSA, INDIA",Orissa,751001,IN,
|
||||
NEW EAGLE SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
KGHM POLSKA MIEDZ S.A.,,,,"UL. M. SKLODOWSKIEJ-CURIE 48, 59-301 LUBIN, POLAND",Lubin,59-301,PL,
|
||||
SUMITOMO CORPORATION - TOHO,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
INTERTEK CALEB BRETT INDIA,,,,,Monaco,0,MC,
|
||||
PTT ASAHI CHEMICAL CO. LTD.,,,,"8 PHANGMUANG CHAPOH 3-1 ROAD, HUAYPONG SUB-DISTRICT, MUANG DISTRICT, RAYONG 21150, THAILAND",Rayong,21150,TH,
|
||||
NANJING MEDIATOR PETROCHEMICAL TECHNOLOGY CO. LTD.,,,,"NANJING MEDIATOR PETROCHEMICAL TECHNOLOGY CO., LTD, Shuanglong road in Nanjing city of Jiangning Development Zone No 1700, 7th floor, A block, P.R. China China. ",,NULL,CN,
|
||||
BOLIDEN HARJAVALTA OY,,,,TEOLLISUUSKATU 1,Harjavalta,29200,FI,
|
||||
TEAM TANKERS INTERNATIONAL LTD.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION INDONESIA (SSRI),,,,Jl. Jend. Sudirman Kav. 61-62,Jakarta,12190,ID,
|
||||
P&F MARINE CO LTD,,,,,Monaco,0,MC,
|
||||
LA POSTE,,,,"WANKDORFALLEE 4, 3030 BERNE, SWITZERLAND",Bern,3030,CH,
|
||||
AURUBIS AG,,,,HOVESTRASSE 50,Hamburg,20539,DE,
|
||||
ERSHIP SAU,,,,CALLE MARINA 26,Huelva,21001,ES,
|
||||
"KAWASAKI KISEN KAISHA, LTD.",,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION - MMC,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
UBE CHEMICALS ASIA PCL (Ex TCL),,,,"18TH FLOOR, SATHORN SQUARE OFFICE TOWER, N° 98 NORTH SATHORN ROAD, SILON SUB-DISTRICT, BANGKOK 10500, THAILAND",Bangkok,10500,TH,
|
||||
KOREA ZINC COMPANY LTD,,,,"ONSAN COMPLEX, 505 DAEJONG-RI, ONSAN-EUP, ULJU-GUN ULSAN, KOREA",Ulsan,NULL,KR,
|
||||
MTM TRADING LLC OF MARSHALL ISLANDS,,,,"2960 POST ROAD, CU",Southport,06890,US,
|
||||
"HOPECHEM CO., LTD (S)",,,,"ROOM 302, NO.233 KENGBEI VILLAGE, WUTONG-SHAN ART TOWN, LUOHU DISTRICT,",Shenzhen,518114,CN,
|
||||
CS MARINE CO LTD,,,,,Monaco,0,MC,
|
||||
"HAFNIA POOLS PTE LTD ",,,,"18-01, MapleTree Business City, 10, Pasir Panjang Road, Singapore 117438",,117438,SG,
|
||||
SAEHAN MARINE CO LTD. (ULSAN),,,,"502, KYONG DONG E&S #304 JANGSENGPO-DONG, NAM-KU, ULSAN 44780, KOREA",Ulsan,44780,KR,
|
||||
CIBRAFERTIL COMP. BRASILIERA DE FERTILIZANTES,,,,"#19-01 SUNTEC TOWER THREE,",Camacari,42810-290,BR,
|
||||
FAIRFIELD CHEMICAL CARRIERS BV,,,,"8 TEMASEK BOULEVARD, #19-01 SUNTEC TOWER THREE,",Singapore,038988,SG,
|
||||
"ASHAPURA PERFOCLAY LTS. ",,,,"JEEVAN ADYOG BUILDING, 3RD FLOOR, 278 D. N. ROAD, FORT, MUMBAI 400 001",Mumbai,400 001,IN,
|
||||
TATA CHEMICALS LIMITED,,,,"DURGACHAK, HALDIA, WEST BENGAL 721 602, INDIA",West Bengal,721 602,IN,
|
||||
DOE RUN PERU S.R.L.,,,,"#14-04 SOUTHPOINT,",Lima,27,PE,
|
||||
INTEROCEANIC (S),,,,7 RENAISSANCE SQUARE 7TH FLOOR,New York,10601,US,
|
||||
"SAEHAN MARINE CO., LTD",,,,,Monaco,NULL,MC,
|
||||
MAROC-PHOSPHORE S.A.,,,,"2, RUE AL ABTAL, HAY ERRAHA, 20200 CASABLANCA, MOROCCO",Casablanca,20200,MA,
|
||||
ACE SULFERT (LIANYUNGANG) CO. LTD,,,,,Monaco,NULL,MC,
|
||||
INCHCAPE SHIPPING SERVICES,,,,,Monaco,0,MC,
|
||||
AONO MARINE CO LTD,,,,"1-1-17, SHINDEN-CHO",Niihama,792-0003,JP,
|
||||
BANDIRMA GÜBRE FABRIKALARI A.S. (S),,,,SUSAM SOK. NO:22,Cihangir-Istanbul,34433,TR,
|
||||
HEXAGON GROUP AG,,,,Bleicherweg 33,Zurich,8002,CH,
|
||||
INNOPHOS,,,,"DOMICILIO CONOCIDO SN, S.T., PAJARITOS, CP 96384, COATZACOALCOS, VER. MEXICO",Coatzacoalcos,CP 96384,MX,
|
||||
TODA KISEN KK,,,,,,NULL,MC,
|
||||
FARMHANNONG,,,,"#523 Maeam-dong Nam-ku, Ulsan 680-050 South Korea",Ulsan,680-050,KR,
|
||||
"AGROINDUSTRIAS DEL BALSAS ",,,,"ISLA DE EN MEDIO SIN NUMERO, INTERIOR RECINTO PORTUARIO, LAZARO CARDENAS, MICH. MEXICO",Mexico City,NULL,MX,
|
||||
"A.R. SULPHONATES PVT. LIMITED ",,,,"21, PRINCEP STREET, KOLKATA 700072, INDIA",Kolkata,700072,IN,
|
||||
WINA TECHNOLOGIES,,,,"9 KOFI PORTUPHY STREET, WESTLANDS, WEST LEGON, ACCRA, GHANA",Accra,0,GH,
|
||||
ARTHUR J. GALLAGHER (UK) LTD,,,,,Monaco,NULL,MC,
|
||||
STOLT TANKERS B.V.,,,,"460 ALEXANDRA ROAD, # 10-01 PSA BUILDING",Singapore,119963,SG,
|
||||
SAS International LLC,,,,"26865 Interstate 45 South - Suite 200 The Woodlands,",,TX 77380,US,
|
||||
TUFTON OCEANIC LTD.,,,,,Monaco,0,MC,
|
||||
"D3 CHEMIE ",,,,,Monaco,NULL,MC,
|
||||
Interacid Trading SA (accounting),,,,En Budron H14,Le Mont-sur-Lausanne,CH-1052,CH,
|
||||
"SHANGHAI DINGHENG SHIPPING CO.,LTD.",,,,,Monaco,0,MC,
|
||||
MCC RAMU NICO LTD,,,,"PO BOX 1229, MADANG SECTION 95, LOT 18-19, MODILON RD PAPUA, NEW GUINEA",Modilon RD Papua,0,PG,
|
||||
FERTIMAC,,,,"AV. PLINIO BRASIL MILANO, 289 CONJ. 301, PORTO ALEGRE - RS - BRASIL, CNPJ : 92194026/0001-36",Porto Alegre,0,BR,
|
||||
THE PHOSPHATE CO. LTD. .,,,,"14, NETAJI SUBHAS ROAD, KOLKATA 700001, INDIA",Kolkata,700001,IN,
|
||||
CORP. NACIONAL DEL COBRE,,,,HUÉRFANOS 1270,Santiago,8320000,CL,
|
||||
"KIMYA MARKET SANAYI.TIC.LTD.STI. ",,,,600 Evler Mah.Balıkesir Asfaltı Sağ Taraf No:72 BANDIRMA BALIKESIR Bandırma Tax Office,Bandirma,NULL,TR,
|
||||
Marsh / Navigators Insurance Company,,,,1 Penn PLZ FL 55,New York,10119-5500,US,
|
||||
DE POLI TANKERS BV,,,,TRONDHEIM 20,Barendrecht,2993LE,NL,
|
||||
"SUNWOO SHIPPING CO., LTD.",,,,"KEB HANA BANK, MAPO-YEOK BRANCH 041-68 52, MAPO-DAERO, MAPO-GU, SEOUL, SOUTH KOREA ACCOUNT NO. 176-910020-20638 SWIFT : KOEXKRSE IN FAVOUR OF SUNWOO SHIPPING CO., LTD. ",Seoul,NULL,KR,
|
||||
NYRSTAR PORT PIRIE PTY LTD.,,,,HOOFDSTRAAT 1,DORPLEIN,6024 AA BUDEL,NL,
|
||||
INSPECTORATE (SUISSE) SA,,,,,Monaco,0,MC,
|
||||
GANSU YONGQI INDUSTRY AND TRADE. CO. (C),,,,"NORTH OF YANAN ROAD, WEST OF HEYA ROAD, JINCHUAN DISTRICT, JINCHANG CITY, GANSU PROVINCE, CHINA",Gansu Province,0,CN,
|
||||
"ASOCIACION DE COOPERATIVAS ARGENTINAS COOP. ",,,,"AV. EDUARDO MADERO 942, 4° - 5° - 6° - 7° PISO/FLOOR, (1106) BUENOS AIRES - ARGENTINA, C.U.I.T. 30-50012088-2",Buenos Aires,1106,AR,
|
||||
DM SHIPPING CO LTD,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION - PPC,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
FILHET - ALLARD MARITIME,,,,,Monaco,0,MC,
|
||||
IPSL / PEQUIVEN,,,,,Monaco,0,MC,
|
||||
VENUS SHIPPING CO LTD,,,,"1830, Lotte Gold Rose2Cha 890-59 Daechi 4-sa-dong Gangnam-gu",Seoul,NULL,KR,
|
||||
"TOHO ZINC CO., LTD.",,,,"8-2,MARUNOUCHI 1-CHOME,CHIYODA-KU,",Tokyo,100-8207,JP,
|
||||
"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",,,,"8F., NO 88, SEC. 2, NANJING E. RD, ZHONGSHAN DIST., ",Taipei,10457,TW,
|
||||
INDEVCO TRADING,,,,"35 AMBER ROAD #09-14, THE SEA VIEW, SINGAPORE 439945",Singapore,439945,SG,
|
||||
"GREATHORSE INTERNATIONAL SHIP MANAGEMENT CO., LTD.",,,,,Monaco,0,MC,
|
||||
JINCHUAN,,,,"JINCHUAN INDUSTRIAL PARK, QISHA INDUSTRIAL ZONE, GANGKOU DISTRICT, FANGCHENGGANG CITY GUANGXI PROVINCE",Fangchenggang City,538002,CN,
|
||||
MC FOR LLC,,,," Room 4, Premise 8N, prospekt Nevskiy 20A, St Petersburg, 191186, Russia.",Saint Petersburg,191186,RU,
|
||||
GRUPO MEXICO,,,,"EDIFICIO PARQUE REFORMA CAMPOS ELISEOS NO. 400, LOMAS DE CHAPULTEPEC",Mexico City,11000,MX,
|
||||
UBS SWITZERLAND AG,,,,Rue des Noirettes 35 PO Box 2600,Geneva,1211,CH,
|
||||
WLR/TRF SHIPPING LTD.,,,,,Monaco,0,MC,
|
||||
TRANSMARINE NAVIGATION CORPORATION,,,,2321 W WASHINGTON ST # K,Stockton,95203,US,
|
||||
NATIONAL CHEMICAL CARRIERS LTD. CO.,,,,"9/F, ONE JLT, JUMEIRAH LAKE TOWERS",Dubai,,AE,
|
||||
SORIN CORPORATION,,,,"YOUNG-POONG BLDG, 542, GANGNAM-DAERO, GANGNAM-GU",Seoul,06110,KR,
|
||||
"JX METALS SMELTING CO.,LTD.",,,,"1-2 OHTEMACHI 1-CHOME, CHIYODA-KU,",Tokyo,100-0004,JP,
|
||||
CHENGTAI GROUP PTE LTD,,,,TBD,,NULL,CN,
|
||||
FERFERMEX S.A.,,,,"PREDIO ENCINO GORDO S/N, C.P. 96340",Veracruz,96340,MX,
|
||||
"SHOKUYU NAVIGATION CO.,SA",,,,,,NULL,MA,
|
||||
BERLIAN LAJU TANKER TBK PT,,,,"Jalan Abdul Mu'is 40, Kec Gambir",Jakarta,10160,ID,
|
||||
"INTERACID NORTH AMERICA, INC (Stockton)",,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
META NICKEL,,,,"CEYHUN ATIF KANSU CADDESI 114 BAYRAKTAR CENTER, D BLOK 1. KAT DAIRE NO: 1-",Ankara,2,TR,
|
||||
OTAMAY SHIPPING INC.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
NEW WAY VYAPAAR PVT LTD,,,,"249/B, G.T. ROAD (NORTH), LILUAH, FIRST FLOOR, ROOM N° 17, HOWRAH 711 204, WB INDIA",Howrah,711 204,IN,
|
||||
SGS NORTH AMERICA INC.,,,,,Tampa,NULL,US,
|
||||
KOYO KAIUN ASIA PTE. LTD,,,,,Monaco,0,MC,
|
||||
KUTCH CHEMICAL INDUSTRIES LTD,,,,"20-21, SARA NIWAS, HARI NAGAR, GOTRI ROAD, GUJARAT",Vadodara,390 007,IN,
|
||||
FAIRFIELD CHEMICAL CARRIERS INC,,,,"21 RIVER ROAD,2ND FLOOR, CU",Wilton,06897,US,
|
||||
EGRAIN AG,,,,HESSENSTRASSE 18,Hofheim,65719,DE,
|
||||
CHEMBULK OCEAN TRANSPORT LLC,,,,"THE DELAMAR 175 RENNELL DRIVE , CU",Southport,06890,US,
|
||||
FEFERMEX SA DE CV,,,,"PREDIO ENCINO GORDO S/N, COSOLEACAQUE, VERACRUZ, CP 96340, MEXICO",Veracruz,CP 96340,MX,
|
||||
SOUTHERN PERU COPPER CORP,,,,"AVENIDA, CAMINOS DEL LNCA 171, CHACARILLA DEL ESTANQUE, SURCO",Lima,33,PE,
|
||||
K.N.D. LINE S.A.,,,,,Monaco,0,MC,
|
||||
JUPITER DYECHEM PVT LTD.,,,,"MITTAL COURT, A WING, OFFICE N° 92 & 93, 9TH FLOOR, NARIMAN POINT, MUMBAI 400 021, BRANCH OFFICE 138/2 VANMALA COMPOUND, VALEGAON, KHANDAGALE ESTATE, PURNA, BHIWANDI, DISTRICT THANE",Mumbai,400-021,IN,
|
||||
TIMAC AGRO INDUSTRIA BRASIL,,,,"AV. ALMIRANTE MAXIMIANO FONSECA 1550 KM 02, DIST. INDUSTRIAL 96204-040, RIO GRANDE, RS, BRASIL",Rio Grande,96204-040,BR,
|
||||
SATCO SULPHURIC ACID,,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
YAOKI SHIPPING S.A.,,,,,,NULL,MC,
|
||||
"HOPECHEM CO., LTD. (AGENT)",,,,"ROOM 302, N° 233 KENGBEI VILLAGE, WUTONG-SHAN ART TOWN, LUOHU DISTRICT, SHENZHEN, P. R. CHINA 518114",Shenzhen,518114,CN,
|
||||
CONS. DOMINICAN REPUBLIC,,,,,Monaco,NULL,MC,
|
||||
BUNGE ARGENTINA S.A.,,,,"25 DE MAYO 501, 4TH FLOOR, C1002ABK BUENOS AIRES, ARGENTINA",Buenos Aires,C1002ABK,AR,
|
||||
PISCES LINE SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
TONGLING NONFERROUS METALS GROUP CO. LTD,,,,"Room 308, 3rd Floor, Nonferrous Courtyard Business Building, Changjiang West Road, Tongling, Anhui Province",Tongling,24400,CN,
|
||||
NAVQUIM SHIP MANAGEMENT,,,," 10th Floor, Tower C, Weena 242,",Rotterdam,3012 NJ,NL,
|
||||
ETI BAKIR A.S.,,,,"ALTUNIZADE, KISIKLI CD. NO:37",Üsküdar-Istanbul,34662,TR,
|
||||
NEW SEAGULL SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD - SUNTAI,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
"Asahi Tanker Co.,Ltd",,,,"Hibiya-Daibiru 3F 2-2, Uchisaiwaicho 1-Chome, Chiyoda-Ku",Tokyo,100-0011,JP,
|
||||
SGS ITALIA S.R.L.,,,,,Monaco,0,MC,
|
||||
"TATSUMI MARINE CO., LTD",,,,"TATSUMI BLDG. 3RD FLOOR 3-8-7, ITABASHI, CHIYODA-KU, TOYKO 102-0072",Tokyo,102-0072,JP,
|
||||
S5 ASIA LIMITED (THAILAND),,,,"21ST FLOOR, TIMES SQUARE BUILDING, 246 SUKHUMVIT ROAD, KLONGTOEY",Bangkok,10110,TH,
|
||||
OCEAN STAR,,,,,Monaco,0,MC,
|
||||
HICRI ERCILI DENIZCILIK AS,,,,"Balikesir Asfalti Sag Taraf Caddesi, 72, 600 Evler Mah",Bandirma,10200,TR,
|
||||
SUMITOMO CORPORATION ASIA,,,,"KUALA LUMPUR BRANCH, UBN TOWER, 35TH FLOOR, 10 JALAN P. RAMLEE, P.O. BOX 10297",Kuala Lumpur,50710,MY,
|
||||
YARA SWITZERLAND,,,,"ROUTE DU FLORISSANT 13, 1206 GENEVA, SWITZERLAND",Geneva,1206,CH,
|
||||
NEW GLORY SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
UNI-TANKERS A/S,,,,Turbinevej 10,Middelfart,5500,DK,
|
||||
SACONIX LLC,,,,"560 WEST CROSSVILLE ROAD, SUITE N° 204, ROSWELL, GA 30075, USA",Rosswell,GA 30075,US,
|
||||
"PAN PACIFIC COPPER CO., LTD.",,,,"1-2 OHTEMACHI 1-CHOME, CHIYODA-KU,",Tokyo,100-8147,JP,
|
||||
ZODIAC MARITIME LIMITED OF LONDON,,,,"PORTMAN HOUSE, 2 PORTMAN STREET",London,W1H 6DU,GB,
|
||||
UTKILEN A.S.,,,,,Monaco,0,MC,
|
||||
ULTRATANK,,,,,Monaco,0,MC,
|
||||
"TIANJIN JIAHENGYUAN INTL TRADE CO.,LTD",,,,Monaco,,NULL,CN,
|
||||
DUKSAN P&V CO LTD,,,,"2nd Floor, Dongailbo Building, 87, Jungang-daero, Jung-gu",Busan,48929,KR,
|
||||
GOLDEN MERCURY MARITIME S.A.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION THAILAND LTD.,,,,"20TH, 21ST & 22ND FLOOR UNIT 1, M. THAI TOWER, ALL SEASONS PLACE, 87 WIRELESS ROAD, LUMPINI, PHATUMWAN",Bangkok,10330,TH,
|
||||
SUMITOMO CORPORATION INDIA PVT LTD,,,,"OFFICE N° 1, UNIT B, 5TH FLOOR S-14, SOLITAIRE CORPORATE PARK 167, GURU HARGOBINDJI ROAD, CHAKALA, ANDHERI (EAST)",Mumbai,400 093,IN,
|
||||
YOUNG POONG CORPORATION,,,,"555, SUKPO-RI, SUKPO-MYUN, BONGHWA-GUN",Kyoungbuk,0,KR,
|
||||
BTS TANKERS PTE LTD,,,,,Monaco,0,MC,
|
||||
STELLA TANKER PTE LTD,,,,10 ANSON ROAD # 23-03,Singapore,079903,SG,
|
||||
GOLDEN DREAM MARITIME SA,,,,,Monaco,0,MC,
|
||||
JAL KUMUD SHIPPING IFSC PVT,,,,"Unit GA-20, Ground Floor, Pragya Accelerator Savvy Ats Group, Gift City, Gandhinagar,",Gujarat,382355,IN,
|
||||
MANGALORE CHEMICALS,,,,"UB TOWER, LEVEL 11, UB CITY, 24, VITTAL MALLYA ROAD, BENGALURU 560 001, INDIA",Bengaluru,560 001,IN,
|
||||
TATSUMI SHOKAI CO. LTD (ONAHAMA),,,,"312, ONAHAMA AZATAKAYAMA",Iwaki City,0,JP,
|
||||
CHEMSEA SHIPPING COMPANY LIMITED,,,,Rm 1510 Wing Tuck CommCtr 177-183 Wing Lok St Hong Kong,,NULL,HK,
|
||||
JEIL INTL CO LTD,,,,"RM 301, 3F SAMJUNG ENTERPRISE B/D, 5, JUNGANG-DAERO 775 BEON-GIL, BUNSANJIN-GU",Busan,0,KR,
|
||||
GLENCORE INTERNATIONAL AG,,,,"BAARERMATTSTRASSE 3, P.O. BOX 1363, 6341 BAAR, SWITZERLAND",Baar,6341,CH,
|
||||
MOL NORDIC TANKERS A/S,,,,,Monaco,0,MC,
|
||||
ESSEX SHIPPING,,,,"JUBILEE HOUSE, 3 THE DRIVE, GREAT WARLEY, BRENTWOOD ESSEX",Brentwood,CM13 3FR,GB,
|
||||
BANDIRMA GÜBRE FABRIKALARI A.S (C ),,,,SUSAM SOK. NO:22,Cihangir-Istanbul,34433,TR,
|
||||
SGS SWITZERLAND SA,,,,,Monaco,0,MC,
|
||||
"TOROS AGRI INDUSTRY AND TRADE CO., INC. (S)",,,,"TEKFEN TOWER,4. LEVENT",Istanbul,34394,TR,
|
||||
"WOOJIN SHIPPING CO.,LTD",,,,"KUNSHIN BLDG.NEW BLDGO #501, 16, SAMGAE-RO (250-4,DOHWA-DONG) MAPO-GU",Seoul,0,KR,
|
||||
TRF SHIP MANAGEMENT AS,,,,,Monaco,0,MC,
|
||||
AMERICAS EXPORT CORP.,,,,"P.O. BOX 3067, WEST PALM BEACH, FL 33402, FLORIDA, UNITED STATES",West Palm Beach,FL 33402,US,
|
||||
QUANG BINH IMPORT AND EXPORT JOINT STOCK,,,,"N° 23, BLOCK 01, AREA 97 BACH DANG, HA LY, HONG BANG DISTRICT, HAI PHONG CITY, VIETNAM",Hai Phong,0,VN,
|
||||
GROUPE CHIMIQUE TUNISIEN,,,,"7, RUE DU ROYAUME D'ARABIE-SAOUDITE, 1002 TUNIS BELVEDERE, TUNISIA",Tunis Belvedere,1002,TN,
|
||||
PARANAPANEMA S/A. (S),,,,"VIA DO COBRE N° 3700, AREA INDUSTRIAL OESTE (AIO) - COPEC, CEP 42850-00 DIAS D'AVILA",Bahia,CEP 42850-00,BR,
|
||||
NYRSTAR SALES & MARKETING AG,,,,TESSINERPLATZ 7,Zurich,8002,CH,
|
||||
IPRUDENTIAL SHIPPING AGENCY SERVICES INC,,,,"4TH FLOOR, KALAYAAN BLDG, SALCEDO CORNER DELA ROSA ST., LEGASPI VILLAGE, MAKATI VILLAGE, PHILIPPINES 1229",Makati Village,1229,PH,
|
||||
DAEHO SHIPPING CO LTD,,,,"Room 203(woolim bldg) 19, Daepyeong-ro 28beon-gil, Yeongdo-gu, Busan, Korea",,NULL,KR,
|
||||
SIETEMAR SA,,,,,Monaco,0,MC,
|
||||
WALLEM,,,,,Monaco,0,MC,
|
||||
BACONCO J-V COMPANY,,,,,Monaco,NULL,MC,
|
||||
"CORAL BAY NICKEL CORP. ",,,,"RIO TUBA EXPORT PROCESSING, ZONE RIO TUBA, BATARAZA, PALAWAN, PHILIPPINES 5306, TIN 005-961-540-00000",Palawan,5306,PH,
|
||||
ROSSING URANIUM,,,,"PRIVATE BAG 5005, SWAKOPMUNG, NAMIBIA",Swakopmung,0,NA,
|
||||
INCITEC LTD,,,,"LEVEL 8, 28 SOUTHBANK BOULEVARD, SOUTHBANK, VICTORIA 3006, AUSTRALIA",Victoria,3006,AU,
|
||||
TBD Supplier,,,,TBD,Monaco,TBD,MC,
|
||||
SATCO - Tampa Terminal,,,,,Tampa,NULL,US,
|
||||
NORFALCO LLC,,,,"100 KING STREET WEST, SUITE 6900, PO BOX 403, TORONTO, ON",Toronto,M5X 1E3,CA,
|
||||
INDIAN FARMERS FERTILIZER COOPERATIVE LTD,,,,"PARADEEP UNIT, VILLAGE MUSADIA, P. O. PARADEEP, JAGATSINGHPUR, ORISSA - 754142, INDIA, GST N° 21AAAAI0050M2Z6",Orissa,754142,IN,
|
||||
SUMITOMO CORPORATION AFRICA (GHANA),,,,"SILVER STAR TOWER, 8TH FLOOR, AIRPORT CITY, 8TH FLOOR",Accra,0,GH,
|
||||
SUMITOMO CORPORATION THAILAND,,,,"20TH, 21ST & 22ND FLOOR UNIT 1, M. THAI TOWER, ALL SEASONS PLACE, 87 WIRELESS ROAD, LUMPINI, PHATUMWAN",Bangkok,10330,TH,
|
||||
ORION REEDEREI GMBH & CO KG,,,,,Monaco,0,MC,
|
||||
SOCIEDAD CONTRACTUAL EL ABRA,,,,"CAMINO CONCHI VIEJO S/N, KM 75, CALAMA, CHILE",Calama,0,CL,
|
||||
POLYSERVE,,,,"22 SYRIA ST., MOHANDESIEEN,",Giza,0,EG,
|
||||
MT MARITIME MANAGEMENT USA LLC,,,,,Monaco,0,MC,
|
||||
SUN METALS CORPORATION PTY LTD,,,,1 ZINC AVENUE STUART QLD,STUART,4811,AU,
|
||||
"SINOTRANS DONGWAN WAREHOUSE LOGISTICS CO.,LTD.",,,,"ZHONGHUA RD, GANGKOU DISTRICT, FANGCHENGGANG",Guangxi,0,CN,
|
||||
INTERNATIONAL COBALT CO,,,,"P.O. BOX N° 7539, E. BAY STREET, NASSAU, BAHAMAS",Nassau,0,BS,
|
||||
GULF AGENCY COMPANY LIMITED,,,,,Monaco,0,MC,
|
||||
NIPPON MARINE CO. LTD,,,,,Monaco,0,MC,
|
||||
"Interacid Trading Chile ",,,,"Av. Isidora Goyenechea 3600, Of. 301",Santiago,NULL,CL,
|
||||
DORVAL SC TANKERS INC.,,,,"3RD FL KDX MONZENNAKACHO BLDG, 1-14-1, BOTAN, KOTO-KU",Tokyo,NULL,JP,
|
||||
Interacid Trading SA,,,,Av. des Baumettes 5,Renens,1020,CH,
|
||||
BHP BILLITON,,,,"CERRO EL PLOMO 6000, 18TH FLOOR, LAS CONDES, SANTIAGO, CHILE",Santiago,NULL,CL,
|
||||
ENEOS OCEAN CORPORATION,,,,"THE LANDMARK TOWER YOKOHAMA 48TH FLOOR 2-2-1, MINATOMIRAI, NISHI-KU",Yokohama,220-8148,JP,
|
||||
SEATRANS MARINE PRIVATE LIMITED,,,,"ABIR KUNJ, 158 RAJDANGA NABAPALLY",Kolkata,700107,IN,
|
||||
MAC SHIPPING MANAGEMENT PTE,,,,hk,Hong Kong,NULL,HK,
|
||||
NEXA RESOURCES CAJAMARQUILLA S.A.,,,,"CAR. CENTRAL N° 9.5 CAJAMARQUILLA (CARR. CENTRAL KM 9.5 DESVIO A HUACHIPA), LIMA, LURIGANCHO",Lurigancho,0,PE,
|
||||
SUMITOMO CORPORATION - ACIDS,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
OCEAN FAVOR SHIPPING (SHANGHAI) LTD,,,,"ROOM 517 A BUILDING 3 NO 2588, SOUTH LIANHUA ROAD MINHANG DISTRICT",Shanghai,0,CN,
|
||||
FUJI LAVA MARITIME LTD,,,,,Monaco,0,MC,
|
||||
"CENTRAL ROMANA CORP. LTD. ",,,,"C/O AMERICAS EXPORT COPR, P.O. BOX 3067, WEST PALM BEACH, FLORIDA 33401-3067",West Palm Beach,33401-3067,US,
|
||||
THE BTTCO OVERSEAS,,,,"B-92, OM TOWERS, GHODA CAMP ROAD, SHAHIBAUG",Ahmedabad,380 004,IN,
|
||||
BEYKIM DENIZCILIK GEMI,,,,"IC Kapi 142, Blok 2, Maslak A Blok, AOS 55 Sokak, Maslak Mah, 42, SariyeR",Istanbul,NULL,TR,
|
||||
YARA BRAZIL,,,,"AV. ALMIRANTE MAXIMIANO FONSECA 2001, CEP 96204-040, RIO GRANDE, RS, BRASIL",Rio Grande,CEP 96204-040,BR,
|
||||
SAEHAN MARINE CO LTD (BUSAN),,,,"12TH FLOOR, ANNJAY TOWER 208, TEHERAN-RO, GANGNAM-GU, SEOUL, 06220, KOREA",Seoul,6220,KR,
|
||||
GOLDEN STENA BAYCREST TANKERS PTE LTD,,,,"108 PASIR PANJANG ROAD, #04-15 GOLDEN AGRI PLAZA",Singapore,118535,SG,
|
||||
DUCGIANG CHEMICAL AND DETERGENT,,,,"TANG LOONG INDUSTRIAL ZONE, TANG LOONG, BAO THANG, LAO CAI, VIETNAM",Lao Cai,NULL,VN,
|
||||
VISIONCHEM,,,,"#1515 SAMKOO BUILDING, 40 CHEONGPA-RO, YONGSAN-GU",Seoul,4373,KR,
|
||||
"Shandong Xiangying Chemical Import and Export CO.,LTD.",,,,"715, Tower A, world trade center, Yantai, Handong province,",,NULL,CN,
|
||||
SUMITOMO MYANMAR,,,,"#208~213, 2ND FLOOR, PRIME HILL BUSINESS SQUARE, NO.60 SHWE DAGON PAGODA ROAD, DAGON TOWNSHIP",Yangon,0,MM,
|
||||
WILSON INTERNATIONAL TRADING PRIVATE LTD,,,,"8 Temasek Boulevard 17-02/03 Suntec Tower 3 ",Singapore,038988,SG,
|
||||
PANOLI INTERMEDIATES (INDIA) PVT. LTD.,,,,"'Sara Niwas', 20-21, Harinagar Co.Op. Society, Gotri Road",Vadodara,39007,IN,
|
||||
DHL EXPESS,,,,,Monaco,NULL,MC,
|
||||
"HIBI KYODO SMELTING CO.,LTD.",,,,"1-11-1 OSAKI , SHINAGAWA-KU",Tokyo,141-8584,JP,
|
||||
STERLITE INDUSTRIES,,,,"SIPCOT INDUSTRIAL COMPLEX, MADURAI BYPASS ROAD, THOOTHUKUDI (TAMIL NADU)",Tuticorin,628002,IN,
|
||||
TAIHEI KAIUN KK,,,,,Monaco,0,MC,
|
||||
PT QMB NEW ENERGY MATERIALS,,,,"Sopo Del Office Tower Lantai 22, Unit A, Jalan Mega Kuningan Barat III Lot 10.1-6 Kawasan Mega Kuningan, Kota Adm. Jakarta Selatan, Provinsi DKI Jakarta, Kode Pos: 12950, Indonesia",Jakarta,12950,ID,
|
||||
GLENCORE HOLDING AG,,,,"BAARERMATTSTRASSE 3, P.O. BOX 1363",Baar,6340,CH,
|
||||
"SHANGHAI SUMITOMO CORPORATION CO.,LTD.",,,,"10F, SHANGHAI WORLD FINANCIAL CENTER, 100 CENTURY AVENUE, PUDONG NEW AREA",Shanghai,200120,CN,
|
||||
"INEOS SULPHUR CHEMICALS SPAIN, S.L.U.",,,,"DIQUE DE ZIERBENA, MUELLE AZ-1",Zierbena-Bizkaia,48508,ES,
|
||||
CIECH S.A,,,,UL. WSPOLNA,Warsaw,00-684,PL,
|
||||
SUMITOMO AUSTRALIA,,,,"LEVEL 21, 101 COLLINS STREET, MELBOURNE VIC 3000, AUSTRALIA",Melbourne,VIC 3000,AU,
|
||||
GOLDEN JUPITER NAVIGATION S.A.,,,,,Monaco,0,MC,
|
||||
"Agrifields DMCC, Dubai",,,,"3201, JBC4, Cluster N, Jumeirah Lake Towers",Dubai,NULL,AE,
|
||||
"CVCI ",,,,"AVENUE D'OUCHY 47, 1006 LAUSANNE, SWITZERLAND",Lausanne,CH-1006,CH,
|
||||
"PAN OCEAN CO., LTD",,,,"Tower 8, 7, Jong-ro 5-Gil, Jongro-Gu, Seoul, Korea (Rep of) Korea, Republic Of ",Seoul,NULL,KR,
|
||||
SGS-CSTC STANDARDS TECNICAL SERVICES CO. LTD,,,,,Monaco,0,MC,
|
||||
SINOCHEM INTERNATIONAL CORP.,,,,,Monaco,0,MC,
|
||||
"INFICESS SHIPPING CO., LTD.",,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION VIETNAM LLC (HA),,,,"9TH FLOOR, CORNERSTONE BLDG, 16 PHAN CHU TRINH STREET, HOAN, KIEM DISTRICT",Hanoi,0,VN,
|
||||
SUMITOMO CORPORATION - NO OR LIMITED COMMISSION,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
SEASTAR MARITIME CO. LTD,,,,"Room 2112, Techno-Mart 21 Bldg, 85 Gwangnaru-Ro 56 Gil, Gwangjin-Gu",Seoul,NULL,KR,
|
||||
NFC PUBLIC COMPANY LTD - MAHACHAI,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
INTERACID TRADING (CHILE) S.A.,,,,ISIDORA GOYENECHEA NR. 3600 - OFFICE 301 LAS CONDES,Santiago,,CL,
|
||||
J.M. BAXI & CO,,,,"GODREJ COLISEUM, 8TH FLOOR, 801-C WING, EVERARD NAGAR, SION (EAST),",Mumbai,400022,IN,
|
||||
KONA MARITIME S.A.,,,,,Monaco,0,MC,
|
||||
NewChem Consulting AG,,,,Waldeggweg 6A 6318 Walchwil Switzerland,Walchwil,6318,CH,
|
||||
TAGANITO HPAL NICKEL CORPORATION,,,,"TAGANITO SPECIAL ECONOMIC ZONE (TSEZ) BARANGAY TAGANITO, CLAVER, SURIGAO DEL NORTE, PHILIPPINES 8410",Surigao Del Norte,8410,PH,
|
||||
KHANG TRUNG HIEU CO. LTD,,,,"N° 12/17, VO THI SAU STREET, QUYET THANG WARD, BIEN HOA CITY, DONG NAI PROVINCE, VIETNAM",Dong Nai Province,0,VN,
|
||||
INFICESS SHIPPING CO. LTD.,,,,,Monaco,0,MC,
|
||||
SRF LIMITED,,,,"D II/I, GIDC PCPIR, GIDC PHASE II, TAL VAGRA, VILLAGE DAHEJ, BHARUCH, GUJARAT 392130, INDIA",Gujarat,392130,IN,
|
||||
FLYTE YANGON SA,,,,"23rd Floor, MMG Tower, Paseo del Mar y Boulevard Pacific, Costa del Este",Panama City,NULL,PA,
|
||||
ASIA CHEMICAL TANKER ALLIANCE PTE LTD,,,,"6 Temasek Boulevard #44-01 Suntec Tower Four ",Singapore,038986,SG,
|
||||
TATSUMI SHOKAI (NAOSHIMA/HIBI),,,,"TATSUMI SHOKAI BLDG 3FL 4-1-1, CHIKKO, MINATO-KU",Osaka,552-0021,JP,
|
||||
COROMANDEL INTERNATIONAL LTD,,,,"COROMANDEL HOUSE, 1-2-10, SARDAR PATEL ROAD, SECUNDERABAD, 500003, INDIA",Secunderabad,50003,IN,
|
||||
GLENCORE CHILE SPA,,,,"AV. COSTANERA SUR 2730 OF. 1701, PISO 17, LAS CONDES, SANTIAGO, CHILE",Santiago,0,CL,
|
||||
NIPPON MARINE CO LTD,,,,966-15 OITA CITY,Saganoseki,879-2201,JP,
|
||||
SGS ESPANOLA DE CONTROL,,,,,Monaco,0,MC,
|
||||
LISBON SHIPPING LTD,,,,"Room 550, 5th Floor, Zonghe Lou, 385, Chaoyangshan Lu, Huangdao Qu, Qingdao",Qingdao,266400,CN,
|
||||
FEDERAL EXPRESS (FEDEX/TNT),,,,,Monaco,NULL,MC,
|
||||
SAI SULPHONATES PCT LTD,,,,"21, C. R. AVENUE, WHITE HOUSE, 2ND FLOOR, 700 072 KOLKATA, INDIA",Kolkata,700 072,IN,
|
||||
MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,,,,"5 SHENTON WAY, #15-01",Singapore,068808,SG,
|
||||
SONGA SHIPMANAGMENT LTD,,,,,Monaco,0,MC,
|
||||
NORTON LILLY INTERNATIONAL,,,,433 CALIFORNIA ST,San Francisco,94104,US,
|
||||
Xingtong Shipping (Singapore) Pte Ltd,,,,TEMASEK BOULEVARD #24-05 SUNTEC TOWER FOUR SINGAPORE Tel: +65 9298398 Email: chartering@xtshipping.com,Singapore,038986,SG,
|
||||
GUJARAT FLUOROCHEMICALS LIMITED,,,,"ABS TOWERS, 2ND FLOOR, OLD PADRA ROAD, VADODARA 390007, GUJARAT, INDIA",Vadodara,390007,IN,
|
||||
CELSIUS SHIPPING APS,,,,,Monaco,0,MC,
|
||||
JEIL INTERNATIONAL CO. LTD.,,,,,Monaco,0,MC,
|
||||
TIMAC AGRO IRELAND LIMITED,,,,"4 & 5, PRIORITY COURT THE QUAY, NEW ROSS, CO WEXFORD, IRELAND",Wexford,Y34 HV25,IE,
|
||||
SUMITOMO CORPORATION ASIA & OCEANIA,,,,"35TH FLOOR UBN TOWER, 10 JALAN P RAMLEE",Kuala Lumpur,50250,MY,
|
||||
"INTERACID NORTH AMERICA, INC (Beaumont)",,,,"560 WEST CROSSVILLE ROAD, SUITE N° 204, ROSWELL, GA 30075, USA",Roswell,30075,US,
|
||||
NFC PUBLIC COMPANY LTD - UCHA,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
NUOVA SOLMINE S.P.A.,,,,VIA NUOVA VIGNOLE 38,Serravalle Scrivia AL,15069,IT,
|
||||
LUMUT VENTURE SDN BHD,,,,,Monaco,0,MC,
|
||||
CONE SUL AGENCIA DE NAVEGACAO LTDA,,,,,Monaco,NULL,MC,
|
||||
SUMITOMO CORPORATION (SHANGHAI) LTD,,,,"10F,Shanghai World Financial Center,100 Century Avenue, Pudong New Area, Shanghai ",Shanghai,NULL,CN,
|
||||
MITSUBISHI MATERIALS CORPORATION METALS COMPANY,,,,"11F KEIDANRENKAIKAN, 1-3-2,OHTEMACHI,CHIYODA-KU",Tokyo,100-8117,JP,
|
||||
CREDIT SUISSE (SWITZERLAND) SA,,,,1201 Geneva,Geneva,NULL,CH,
|
||||
"INTERACID NORTH AMERICA, INC (Tampa)",,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL",Tampa,33610,US,
|
||||
PT PUPUK ISKANDAR MUDA (PIM),,,,JL. MEDAN - BANDA ACEH PO BOX 21,,NULL,ID,
|
||||
"FURUKAWA METALS & RESOURCES CO., LTD.",,,,"2-3 MARUNOUCHI, 2 - CHOME, CHIYODA-KU",Tokyo,0,JP,
|
||||
"SUNWOO TANKER CO. LTD, KOREA",,,,"MI-WON BLDG ROOM 1702, 70 GUKJEGEUMYUNG-RO, YEONGDEUNGPO-GU",Seoul,NULL,KR,
|
||||
SATCO SULPHURIC ACID WC,,,,"10210 HIGHLAND MANOR DRIVE - SUITE 140, TAMPA - FL ",Stockton,33610,US,
|
||||
SUMITOMO CORPORATION VIETNAM LLC (HM),,,,"LANDMARK BUILDING, 6TH FLOOR, 5B TON DUC THANG, BEN NGHE WARD, DISTRICT 1,",Ho Chi Minh,0,VN,
|
||||
ODFJELL TANKERS AS,,,,CONRAD MOHRS VEG 29,Bergen,5072,NO,
|
||||
"SUMITOMO METAL MINING CO., LTD.",,,,"3-5-3, NISHIBARA-CHO, NIIHAMA",EHIME,792-8555,JP,
|
||||
GANSU YONGQI INDUSTRY AND TRADE. CO. (S),,,,"NORTH OF YANAN ROAD, WEST OF HEYA ROAD, JINCHUAN DISTRICT, GANSU PROVINCE,",Jinchang City,0,CN,
|
||||
UPS UNITED PARCEL SERVICE (SCHWEIZ) AG,,,,,Monaco,0,MC,
|
||||
HANSA TANKERS AS OF BERGEN,,,,KALFARVEIEN 57A,Bergen,5022,NO,
|
||||
SC SHIPPING SINGAPORE PTE LTD,,,,,Monaco,0,MC,
|
||||
FOSPHORY SP Z.O.O,,,,IL. KUJAWSKA 2,Gdansk,80-550,PL,
|
||||
VELOCITE SARL,,,,"RUE DR CESAR-ROUX 29, 1003 LAUSANNE, SWITZERLAND",Lausanne,1003,CH,
|
||||
TACHIBANAYA CO. LTD.,,,,,Monaco,0,MC,
|
||||
SUMITOMO CORPORATION TAIWAN LTD,,,,"SUMITOMO-FLYSUN BLDG. 8TH FL., NO. 260, TUN HWA N. ROAD",Taipei,NULL,TW,
|
||||
TAIKO MARKETING SDN,,,,"B2-6-01, BLOK B2, MERITUS @ OASIS CORPORATE N° 2, JALAN PJU 1A/2, ARA DAMANSARA, 47301 PETALING JAYA, SELANGOR, MALAYSIA",Petaling Jaya,47301,MY,
|
||||
"SAEHAN MARINE CO, LTD (MARINE)",,,,10 ANSON ROAD #26-02 INTERNATIONAL PLAZA,Singapore,79903,SG,
|
||||
DILMAS CO LTD,,,,"ul Pogranichnaya 72, Nakhodka, Primorskiy kray, 692910, Russia.",Primorsk,692910,RU,
|
||||
SAYBOLT POLAND SP. Z O.O.,,,,1 PODLASKA STR.,Gdynia,81-325,PL,
|
||||
AKR SEA TRANSPORT PT,,,,"Lot 7, Wisma AKR, Jalan Panjang 5, Kec Kebon Jeruk",Jakarta,11530,ID,
|
||||
NAVIG8 CHEMICAL POOLS INC.,,,,"3 TEMASEK AVENUE, #25-01 CENTINNIAL TOWER",Singapore,039190,SG,
|
||||
RAETSMARINE INSURANCE B.V.,,,,,Monaco,0,MC,
|
||||
AR SAVAGE AND SON,,,,"202 SOUTH ROME AVE, SUITE 200",Tampa,33606,US,
|
||||
MARCOBRE S.A.C.,,,,"Giovanni Batista Lorenzo Bernini 149, Piso 3, Oficina 301, San Borja, Lima, Peru",Lima,NULL,PE,
|
||||
LS-NIKKO COPPER INC.,,,,"15FI., (LS YONGSAN TOWER), 92, HANGANG-DAERO, YONSAN-GU",Seoul,4386,KR,
|
||||
JLS MONKEY CO LTD,,,,"Workshop B, 3rd Floor, Manning Industrial Building, 116-118, How Ming Street, Kwun Tong, Kowloon",Hong Kong,NULL,CN,
|
||||
SUMITOMO CORPORATION - FURUKAWA,,,,"3-2 OTEMACHI 2-CHOME, CHIYODA-KU",Tokyo,100-8601,JP,
|
||||
CHEMMASTER TANKERS COMPANY LIMITED,,,,"Room 1510,wing Tuck Commerical Centre, 177-183 Wing Lok Street ",Hong Kong,NULL,HK,
|
||||
BTM MUFG Bank Ltd,,,,"Ropemaker Place, 25 Ropemaker Street London EC2Y 9AN, United Kingdom",London,EC2Y 9AN,GB,
|
||||
PT PETROKIMIA (PKG),,,,"JI. JEND. AHMAD YANI, GRESIK 61119",Gresik,61119,ID,
|
||||
"Sulfuric Acid Trading Company , Inc (SATCO)",,,,3710 Corporex Park Drive - Suite 205,Tampa,NULL,US,
|
||||
TWO LIONS ZHANGJIAGANG,,,,DONGHUA ROAD YANGTZE RIVER INTERNATIONAL CHEMICAL INDUSTRIAL PARK,Zhangjiagang,0,CN,
|
||||
ACE TANKERS MANAGEMENT B.V.,,,,STRAWINSKYLAAN 1057,Amsterdam,STRAWINSKYLAAN 1057,NL,
|
||||
PHILIPPINE ASSOCIATED SMELTING AND REFINING CORP.,,,,"LEYTE INDUSTRIAL DEVELOPMENT ESTATE, ISABEL",Leyte,0,PH,
|
||||
SC SHIPPING SINGAPORE PTE LTD,,,,"6 Temasek Blvd #22/04-05 Suntec Tower 4 ",Singapore,038986,SG,
|
||||
TBD Shipowner,,,,TBD,Monaco,TBD,MC,
|
||||
HICRI ERCILI,,,,600 EVLER MAHALLESI BALIKESIR ASFALTI SAĞ TARAF NO: 72,Bandirma,10200,TR,
|
||||
BARWIL AGENCIES LTD,,,,,Monaco,NULL,MC,
|
||||
FERTILORE SAS,,,,"40 TER AVENUE DE SUFFREN, 75015 PARIS, FRANCE",Paris,75015,FR,
|
||||
FLYTE MANILA SA,,,,"23rd Floor, MMG Tower, Paseo del Mar y Boulevard Pacific, Costa del Este",Panama City,NULL,PA,
|
||||
BOLIDEN MINERAL AB,,,,,Skelleftehamn,932 81,SE,
|
||||
SHOKUYU TANKER CO. LTD.,,,,,Monaco,0,MC,
|
||||
"ATLANTIC COPPER ",,,,"P° DE LA CASTELLANA, 95 - 21ST FLOOR",Madrid,28046,ES,
|
||||
MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,,,,"5 SHENTON WAY, #15-01",Singapore,068808,SG,
|
||||
TATSUMI MARINE (S) PTE LTD,,,,,Singapore,NULL,SG,
|
||||
SUMITOMO CORPORATION DIS TICARET A.S.,,,,"Ferko Plaza, Esentepe district, Büyükdere Street No.175, A Blok 20th Floor, Şişli",Istanbul,NULL,TR,
|
||||
Scrap Co. Ltd,,,,Blvd des Moulins,Monaco,NULL,MC,
|
||||
"FOCUS SHIPPING CO.,LTD",,,,12/F 3 LOCKHART RD. WANCHAI,Hong Kong,0,HK,
|
||||
NAMHAE CHEMICAL CORP,,,,"1506, 17, Seobinggo-ro, Yongsan-gu",Seoul,04387,KR,
|
||||
SHANGHAI ORIENT INTERTEK TESTING SERVICES CO.,,,,,Monaco,0,MC,
|
||||
MARNAVI SHIPPING MANAGEMENT PVT LTD,,,,,Monaco,0,MC,
|
||||
SGS TESTING KOREA CO. LTD.,,,,,Monaco,0,MC,
|
||||
FERALCO (UK) LTD,,,,"DITTON ROAD, WIDNES, CHESHIRE WA 8 OPH, ENGLAND",Cheshire,WA 8 OPH,GB,
|
||||
ACE QUANTUM CHEMICAL TANKERS CV,,,,,Monaco,0,MC,
|
||||
HONGKONG ZHOUSHAN YIHAI SHIPPING CO. LTD.,,,,,Monaco,0,MC,
|
||||
AET SHIP MANAGEMENT (M) SDN. BHD.,,,,,Monaco,0,MC,
|
||||
NFC PUBLIC COMPANY LTD - PROMMITR,,,,"88 SC GROUP BUILDING, 3RD FLOOR, THE PARKLAND ROAD, BANGNA NUEA SUB-DISTRICT, BANGNA, BANGKOK 10260, THAILAND",Bangkok,10260,TH,
|
||||
SUMIMA SDN BHD,,,,,Monaco,0,MC,
|
||||
"TOROS AGRI INDUSTRY AND TRADE CO, INC. (C )",,,,"TEKFEN TOWER,4. LEVENT",Istanbul,34394,TR,
|
||||
"TRICON ENERGY, LTD",,,,"1500 Post Oak Blvd., 18th Floor, Houston, Texas 77056 USA",Houston,77056,US,
|
||||
HUTAMA TRANS KONTINENTAL PT,,,,"Lantai 26, Mangkuluhur City Tower One, Jalan Jendral Gatot Subroto Kav 1-3, Kel Karet Semanggi",Jakarta,12930,ID,
|
||||
GRACEFUL STAR SHIPPING CO LTD,,,,"Room 2109, 21st Floor, C C Wu Building, 302-308, Hennessy Road, Wan Chai, Hong Kong, China.",Hong Kong,NULL,CN,
|
||||
MAADEN PHOSPHATE COMPANY,,,,"P.O. Box 11110, Ras Al Khair, Industrial City",Jubail,31961,SA,
|
||||
SUMITOMO CORP. ASIA,,,,"KUALA LUMPUR BRANCH, UBN TOWER, 35TH FLOOR, 10 JALAN P. RAMLEE, P.O. BOX 10297, 50710 KUALA LUMPUR, MALAYSIA",Kuala Lumpur,50710,MY,
|
||||
"COOGEE CHEMICALS PTY LTD ",,,,"CNR PATTERSON & KIWANA BEACH ROADS, KWINANA, P.O. BOX 5051 ROCKINGHAM BEACH, WA 6969, AUSTRALIA",Brand,6969,AU,
|
||||
MAKINO KAIUN CO LTD & MAKI OCEAN SHIPPING SA,,,,,Monaco,0,MC,
|
||||
PROCYON SHIPPING S.A.,,,,,Monaco,0,MC,
|
||||
VENATOR ASIA SDN BHD (EX HUNTSMAN),,,,"KAWASAN INDUSTRI TELUK KALUNG, P.O. BOX 29, KEMAMA 24007, TERENGGANU, MALAYSIA",Kemaman,24000,MY,
|
||||
WOMAR LOGISTICS PTE LTD,,,,"8 TEAMASEK BOULEVARD, #22-06, SUNTEC TOWER 3",Singapore,038988,SG,
|
||||
EASTERN TANKERS CO. LTD.,,,,,Monaco,0,MC,
|
||||
AS INVENTOR SHIPPING,,,,,Monaco,0,MC,
|
||||
HICRI ERCILI DENIZCILIK AS,,,,,Monaco,0,MC,
|
||||
BAO PHUNG COMPANY,,,,,Monaco,NULL,MC,
|
||||
CIE MARITIME BELGE SA,,,,,Monaco,0,MC,
|
||||
Accounting Matching,,,,internal,,NULL,CH,
|
||||
IND-AUST MARITIME PVT LTD,,,,"715, JK Chambers, Sector 17, Vashi, Navi Mumbai, Maharashtra",,400703,IN,
|
||||
SAG Surveyors Alliance Group,,,,"Calle Bolívar 472 Of 705, Miraflores – Lima 18, Perú",Lima,0,PE,
|
||||
STAINLESS TANKERS INC C/O WOMAR LOGISTICS PTE LTD,,,,,Monaco,0,MC,
|
||||
HINDALCO INDUSTRIES LTD,,,,"P.O. DAHEJ, DIST : BHARUCH, PIN CODE",Gujarat,392 130,IN,
|
||||
INCHCAPE SHIPPING SERVICES (JAPAN) LTD.,,,,"HAMAMATSUCHO BLDG. 6F, 1-1-1, SHIBAURA, MINATO-KU",Tokyo,105-0023,JP,
|
||||
GOLDEN FORTUNE SHIPHOLDING S.A.,,,,,Monaco,0,MC,
|
||||
SU NAVIGATION PTE. LTD,,,,"200 CANTONMENT ROAD, #14-04 SOUTHPOINT,",Singapore,089763,SG,
|
||||
PEREZ Y CIA MADRID,,,,FORTUNY 9,Madrid,28010,ES,
|
||||
INTER QUIMICA S.A.,,,,"AV. SAN MARTIN 209, EDIF. JARABA IMPORT, SANTO DOMINGO, DOMINICAN REPUBLIC",Santo Domingo,0,DO,
|
||||
"B&M AGENCIA MARITIMA S.A. ",,,,AVENIDA ANDALICAN 881,Mejillones,NULL,CL,
|
||||
PROQUIGEL QUIMICA S.A.,,,,"FAZENDA CAROBA S/N, 43.813-300 CANDEIAS, BA, BRASIL",Candeias,43.813-300,BR,
|
||||
SUMITOMO CORPORATION MANILA,,,,,Manila,0,PH,
|
||||
TIOXIDE (MALAYSIA) SDN BHD,,,,"KAWASAN INDUSTRI TELUK KALUNG, P.O. BOX 29, KEMAMA 24007, TERENGGANU, MALAYSIA",Terengganu,24007,MY,
|
||||
DAITOH TRADING CO. LTD.,,,,,Monaco,0,MC,
|
||||
TBD Customer,,,,TBD,Monaco,TBD,MC,
|
||||
BETTY MILD MARITIME S.A,,,,,Monaco,0,MC,
|
||||
IINO KAIUN KAISHA LTD OF TOKYO.,,,,"IINO BUILDING, 2-1-1 UCHISAIWAICHO, CHIYODA-KU",Tokyo,100-0011,JP,
|
||||
IINO SINGAPORE PTE. LTD,,,,168 ROBINSON ROAD #13-02 CAPITAL TOWER,Singapore,068912,SG,
|
||||
"JINLONG COPPER CO., LTD",,,,"1, Jinshan West Road Tongling Anhui",,244021,CN,
|
||||
MOSAIC FERTILIZANTES BRAZIL,,,,"AV. ROQUE PETRONI JUNIOR 999, 14° ANDAR BROOKLYN, CEP 04707-910 SAO PAULO, BRAZIL",Sao Paulo,CEP 04707-910,BR,
|
||||
|
@@ -0,0 +1,2 @@
|
||||
reference,party_code,currency_code,purchase_date,warehouse_code,payment_term,invoice_method,description,comment,state,line_type,line_product_code,line_quantity,line_unit_price,line_description,line_delivery_date
|
||||
25026ABC,2869,USD,11/18/2025,,NET 30,order,Office Supplies Order,Urgent delivery required,draft,line,H2SO4,17000,100,Premium Notebooks,1/29/2026
|
||||
|
@@ -0,0 +1,78 @@
|
||||
number,reference,party_name,currency_code,purchase_date,payment_term,warehouse_code,wb,tol_min,tol_max,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,line_quantity,line_unit_code,line_unit_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date
|
||||
1849,24976,ATLANTIC COPPER ,USD,11/1/2024,NET 30,,1,5,5,Huelva,Tampa,FOB,manual,,1849 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,95,19000 Mt of sulphuric acid - Tel-quel,4/15/2025,4/21/2025,bldate,4/15/2025
|
||||
2050,25023,ATLANTIC COPPER ,USD,10/3/2025,NET 30,,1,5,5,Huelva,Huelva,CFR,manual,,2050 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,84.5,19000 Mt of sulphuric acid - Tel-quel,1/13/2026,1/19/2026,bldate,1/13/2026
|
||||
1925,24988,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,1,5.26,5.26,Pori,,FOB,manual,,1925 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,84.57,19000 Mt of sulphuric acid - 100%,6/25/2025,7/4/2025,bldate,6/25/2025
|
||||
1926,24997AB,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,1,5.26,5.26,Ronnskar,Beaumont,FOB,manual,,1926 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,88.91,19000 Mt of sulphuric acid - Tel-quel,8/22/2025,9/4/2025,bldate,9/3/2025
|
||||
1928,25011ABCDE,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,1,5.26,5.26,Pori,Mejillones,FOB,manual,,1928 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,19000,Mt,63.49,19000 Mt of sulphuric acid - Tel-quel,11/16/2025,11/25/2025,bldate,11/16/2025
|
||||
1929,25005,BOLIDEN HARJAVALTA OY,USD,12/20/2024,NET 30,,1,3.9,3.9,Ronnskar,Beaumont,FOB,manual,,1929 / H2SO4 FY 2025 / Default,line,H2SO4,19250,Mt,74.56,19250 Mt of sulphuric acid - Tel-quel,10/11/2025,10/20/2025,bldate,10/11/2025
|
||||
2039,25015,BOLIDEN HARJAVALTA OY,USD,9/26/2025,NET 30,,1,5.26,5.26,Ronnskar,Beaumont,FOB,manual,,2039 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,65.83,19000 Mt of sulphuric acid - 100%,11/25/2025,12/4/2025,bldate,11/25/2025
|
||||
1957,24981,HEXAGON GROUP AG,USD,3/21/2025,NET 30,,1,22.33,22.33,Rugao,Mejillones,CFR,manual,,1957 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,12875,Mt,156,12875 Mt of sulphuric acid - Tel-quel,4/28/2025,5/7/2025,bldate,5/1/2025
|
||||
2047,25014,HEXAGON GROUP AG,USD,9/26/2025,NET 30,,1,5,5,Huelva,Tampa,CFR,manual,,2047 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,105.9,19000 Mt of sulphuric acid - Tel-quel,11/15/2025,11/20/2025,bldate,11/15/2025
|
||||
2057,25012,HEXAGON GROUP AG,USD,10/27/2025,NET 30,,1,0,0,Antwerpen,Mejillones,CFR,manual,,2057 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,6000,Mt,159.59,6000 Mt of sulphuric acid - Tel-quel,11/10/2025,11/20/2025,bldate,11/10/2025
|
||||
2038,STOLT COA MTM,INTERACID TRADING (CHILE) S.A.,USD,9/26/2025,NET 30,,1,0,0,,,,manual,,2038 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,0.01,Mt,0,0.01 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026
|
||||
1876,24971AB,JINCHUAN,USD,12/5/2024,NET 30,,1,10,10,Fangcheng,Mejillones,FOB,manual,,1876 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,30000,Mt,40,30000 Mt of sulphuric acid - Tel-quel,4/13/2025,5/2/2025,bldate,4/13/2025
|
||||
1877,24986ABC,JINCHUAN,USD,12/5/2024,NET 30,,1,10,10,Fangcheng,Mejillones,FOB,manual,,1877 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,30000,Mt,40,30000 Mt of sulphuric acid - Tel-quel,7/5/2025,7/16/2025,bldate,7/5/2025
|
||||
1878,25020ABCD,JINCHUAN,USD,12/5/2024,NET 30,,1,0,0,Fangcheng,Mejillones,FOB,manual,,1878 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,27000,Mt,40,27000 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026
|
||||
1904,24990AB,JINCHUAN,USD,12/16/2024,NET 30,,1,10,10,Fangcheng,Mejillones,FOB,manual,,1904 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,30000,Mt,35,30000 Mt of sulphuric acid - Tel-quel,9/10/2025,9/30/2025,bldate,9/10/2025
|
||||
1956,25002,JINCHUAN,USD,3/19/2025,NET 30,,1,10,10,Fangcheng,Tuticorin,FOB,manual,,1956 / H2SO4 FY 2025 / Default,line,H2SO4,30000,Mt,35,30000 Mt of sulphuric acid - Tel-quel,10/10/2025,10/30/2025,bldate,10/26/2025
|
||||
2151,25020E,JINCHUAN,USD,1/5/2026,NET 30,,1,33.36,33.36,Fangcheng,Mejillones,FOB,manual,,2151 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,4496.46,Mt,160,4496.46 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026
|
||||
2157,25020D,JINCHUAN,USD,1/7/2026,NET 30,,1,0,0,Fangcheng,Mejillones,FOB,manual,,2157 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,3000,Mt,160,3000 Mt of sulphuric acid - Tel-quel,12/21/2025,1/4/2026,bldate,1/5/2026
|
||||
1912,24975,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,10,10,Rugao,Isabel,FOB,manual,,1912 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,65.63,10000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/7/2025
|
||||
1913,24987,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,0,10,YIZHENG,Gladstone,FOB,manual,,1913 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,73.5,10000 Mt of sulphuric acid - Tel-quel,6/8/2025,6/18/2025,bldate,6/8/2025
|
||||
1914,24992,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,10,10,Rugao,Tuticorin,FOB,manual,,1914 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,86.5,10000 Mt of sulphuric acid - Tel-quel,7/18/2025,7/30/2025,bldate,7/18/2025
|
||||
1915,24992,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,10,10,Rugao,Tuticorin,FOB,manual,,1915 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,91.19,10000 Mt of sulphuric acid - Tel-quel,7/18/2025,7/30/2025,bldate,7/18/2025
|
||||
1916,25006,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,5,5,Rugao,Isabel,FOB,manual,,1916 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,91.81,10000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025
|
||||
1917,25022A,"JINLONG COPPER CO., LTD",USD,12/17/2024,NET 30,,1,22.58,22.58,YIZHENG,Visakhapatnam,FOB,manual,,1917 / H2SO4 FY 2025 / Default,line,H2SO4,7750,Mt,77.31,7750 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025
|
||||
2034,25022B,"JINLONG COPPER CO., LTD",USD,9/5/2025,NET 30,,1,10,10,YIZHENG,Kakinada,FOB,manual,,2034 / H2SO4 FY 2025 / Default,line,H2SO4,9500,Mt,114.29,9500 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025
|
||||
2153,25022C,"JINLONG COPPER CO., LTD",USD,1/5/2026,NET 30,,1,4.76,4.76,YIZHENG,Kakinada,FOB,manual,,2153 / H2SO4 FY 2025 / Default,line,H2SO4,3674.89,Mt,77.31,3674.89 Mt of sulphuric acid - Tel-quel,12/29/2025,1/5/2026,bldate,12/29/2025
|
||||
1866,24977,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,1,5.08,5.08,Callao,Mejillones,CIF,manual,,1866 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,14750,Mt,148.17,14750 Mt of sulphuric acid - Tel-quel,4/18/2025,4/28/2025,bldate,4/18/2025
|
||||
1867,24991,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,1,5.08,5.08,Callao,Mejillones,CIF,manual,,1867 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,7/1/2025,7/10/2025,bldate,7/1/2025
|
||||
1868,25000,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,1,5.08,5.08,Callao,Mejillones,CIF,manual,,1868 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,8/20/2025,8/30/2025,bldate,8/20/2025
|
||||
1869,25016,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,12/2/2024,NET 30,,1,5.08,5.08,Callao,Mejillones,CIP,manual,,1869 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,14750,Mt,155,14750 Mt of sulphuric acid - Tel-quel,12/3/2025,12/13/2025,bldate,12/3/2025
|
||||
1962,24973B,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,4/2/2025,NET 30,,1,0,0,Callao,Mejillones,CIF,manual,,1962 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,20,Mt,155,20 Mt of sulphuric acid - Tel-quel,3/20/2025,4/4/2025,bldate,4/3/2025
|
||||
2085,25026ABC,NEXA RESOURCES CAJAMARQUILLA S.A.,USD,11/18/2025,NET 30,,1,5,5,Callao,Mejillones,CIF,manual,,2085 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,17000,Mt,171,17000 Mt of sulphuric acid - Tel-quel,1/29/2026,2/9/2026,bldate,1/29/2026
|
||||
2084,25019A,"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",USD,11/17/2025,NET 30,,1,10,10,Taichung,Gladstone,FOB,manual,,2084 / H2SO4 FY 2025 / Default,line,H2SO4,7500,Mt,101,7500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025
|
||||
2155,25019B,"PEIFENG TECHNOLOGY & FERTILIZERS CO., LTD.",USD,1/6/2026,NET 30,,1,10,10,Taichung,Gladstone,FOB,manual,,2155 / H2SO4 FY 2025 / Default,line,H2SO4,2500,Mt,101,2500 Mt of sulphuric acid - Tel-quel,12/24/2025,1/2/2026,bldate,12/24/2025
|
||||
2028,25004,SAS International LLC,USD,8/29/2025,NET 30,,1,5,5,Rugao,Mejillones,CFR,manual,,2028 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,7000,Mt,150,7000 Mt of sulphuric acid - Tel-quel,10/5/2025,10/14/2025,bldate,10/5/2025
|
||||
2060,25017,SAS International LLC,USD,10/31/2025,NET 30,,1,5.26,5.26,Pori,Mejillones,CFR,manual,,2060 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,9500,Mt,165,9500 Mt of sulphuric acid - Tel-quel,12/15/2025,12/31/2025,bldate,12/15/2025
|
||||
2130,25027,SAS International LLC,USD,12/22/2025,NET 30,,1,5,5,Ilo,Mejillones,CFR,manual,,2130 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,13500,Mt,184,13500 Mt of sulphuric acid - Tel-quel,2/15/2026,2/26/2026,bldate,2/15/2026
|
||||
2093,Humon 2026 contract 1/4,"Shandong Xiangying Chemical Import and Export CO.,LTD.",USD,12/1/2025,NET 30,,1,10,10,Laizhou,,FOB,manual,,2093 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,32837.82,Mt,0,32837.8 Mt of sulphuric acid - Tel-quel,2/1/2026,2/28/2026,bldate,2/1/2026
|
||||
1761,24972A,SUMITOMO CORPORATION - MMC,USD,5/15/2024,NET 30,,1,0,0,Onahama,Map Ta Phut,FOB,manual,,1761 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,33.36,5000 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025
|
||||
1949,24972B,SUMITOMO CORPORATION - MMC,USD,2/26/2025,NET 30,,1,0,0,Onahama,Samut Prakan,FOB,manual,,1949 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,2700,Mt,33.36,2700 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025
|
||||
1970,24982/B,SUMITOMO CORPORATION - MMC,USD,4/17/2025,NET 30,,1,0,0,Onahama,Samut Prakan,FOB,manual,,1970 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,2350,Mt,45.25,2350 Mt of sulphuric acid - Tel-quel,5/9/2025,5/16/2025,bldate,5/9/2025
|
||||
1972,24982/A,SUMITOMO CORPORATION - MMC,USD,4/17/2025,NET 30,,1,0,0,Onahama,Map Ta Phut,FOB,manual,,1972 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,45.25,5000 Mt of sulphuric acid - Tel-quel,5/9/2025,5/16/2025,bldate,5/9/2025
|
||||
1989,24998,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 30,,1,0,10,Onahama,LHOKSEUMAWE,FOB,manual,,1989 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,7000,Mt,64,7000 Mt of sulphuric acid - Tel-quel,8/21/2025,8/30/2025,bldate,8/21/2025
|
||||
1991,25007A,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 30,,1,0,0,Naoshima,Map Ta Phut,FOB,manual,,1991 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,51,5000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025
|
||||
1993,25013A,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 30,,1,0,0,Naoshima,Map Ta Phut,FOB,manual,,1993 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,55.2,5000 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025
|
||||
1995,25024A,SUMITOMO CORPORATION - MMC,USD,7/2/2025,NET 30,,1,0,0,Naoshima,Map Ta Phut,FOB,manual,,1995 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,92,5000 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026
|
||||
2043,25007B,SUMITOMO CORPORATION - MMC,USD,9/26/2025,NET 30,,1,20,20,Naoshima,Samut Prakan,FOB,manual,,2043 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,2000,Mt,51,2000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025
|
||||
2044,25007C,SUMITOMO CORPORATION - MMC,USD,9/26/2025,NET 30,,1,0,0,Naoshima,Samut Prakan,FOB,manual,,2044 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,1000,Mt,51,1000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/7/2025,bldate,10/1/2025
|
||||
2053,25018,SUMITOMO CORPORATION - MMC,USD,10/16/2025,NET 30,,1,10,10,Naoshima,Stockton,FOB,manual,,2053 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,63,19000 Mt of sulphuric acid - Tel-quel,12/15/2025,12/24/2025,bldate,12/20/2025
|
||||
2058,25013B,SUMITOMO CORPORATION - MMC,USD,10/29/2025,NET 30,,1,0,0,Naoshima,Samut Prakan,FOB,manual,,2058 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,3600,Mt,55.2,3600 Mt of sulphuric acid - Tel-quel,11/21/2025,12/3/2025,bldate,11/21/2025
|
||||
2097,25024B,SUMITOMO CORPORATION - MMC,USD,12/3/2025,NET 30,,1,0,0,Naoshima,Samut Prakan,FOB,manual,,2097 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,1899.67,Mt,55.2,1899.67 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026
|
||||
2105,25024C,SUMITOMO CORPORATION - MMC,USD,12/4/2025,NET 30,,1,0,0,Naoshima,Samut Prakan,FOB,manual,,2105 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,3600,Mt,45,3600 Mt of sulphuric acid - Tel-quel,1/15/2026,1/24/2026,bldate,1/15/2026
|
||||
2107,MMC FEB LXML ,SUMITOMO CORPORATION - MMC,USD,12/5/2025,NET 30,,1,0,0,,Samut Prakan,FOB,manual,,2107 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,2000,Mt,0,2000 Mt of sulphuric acid - Tel-quel,2/1/2026,2/28/2026,bldate,2/1/2026
|
||||
2109,MMC MAR LXML ,SUMITOMO CORPORATION - MMC,USD,12/5/2025,NET 30,,1,0,0,,Samut Prakan,FOB,manual,,2109 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,1700,Mt,0,1700 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026
|
||||
2131,MMC - Feb shipment ,SUMITOMO CORPORATION - MMC,USD,12/29/2025,NET 30,,1,0,0,,Map Ta Phut,FOB,manual,,2131 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,3500,Mt,0,3500 Mt of sulphuric acid - Tel-quel,2/10/2026,2/28/2026,bldate,2/10/2026
|
||||
1881,24970,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,1881 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/1/2025
|
||||
1883,24979,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,1883 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,5/18/2025,5/31/2025,bldate,5/18/2025
|
||||
1885,24985,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,1885 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,37,20000 Mt of sulphuric acid - Tel-quel,6/15/2025,6/30/2025,bldate,6/17/2025
|
||||
1887,24996,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Hibi,Stockton,FOB,manual,,1887 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,8/15/2025,8/29/2025,bldate,8/15/2025
|
||||
1889,24989,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Hibi,Stockton,FOB,manual,,1889 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,7/12/2025,7/25/2025,bldate,7/12/2025
|
||||
1891,24999,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Hibi,Stockton,FOB,manual,,1891 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,34,20000 Mt of sulphuric acid - Tel-quel,9/7/2025,9/22/2025,bldate,9/7/2025
|
||||
1893,25003,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,1893 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,72,20000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/15/2025,bldate,10/1/2025
|
||||
1895,25008,SUMITOMO CORPORATION - PPC,USD,12/5/2024,NET 30,,1,10,10,Hibi,Stockton,FOB,manual,,1895 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,64,20000 Mt of sulphuric acid - Tel-quel,11/1/2025,11/30/2025,bldate,11/1/2025
|
||||
1933,24984,SUMITOMO CORPORATION - PPC,USD,1/6/2025,NET 30,,1,10,10,Saganoseki,Tuticorin,FOB,manual,,1933 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,42,20000 Mt of sulphuric acid - Tel-quel,6/9/2025,6/19/2025,bldate,6/12/2025
|
||||
2122,25021,SUMITOMO CORPORATION - PPC,USD,12/10/2025,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,2122 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,71,20000 Mt of sulphuric acid - Tel-quel,1/7/2026,1/21/2026,bldate,1/7/2026
|
||||
2124,25025,SUMITOMO CORPORATION - PPC,USD,12/18/2025,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,2124 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,71,20000 Mt of sulphuric acid - Tel-quel,1/30/2026,2/13/2026,bldate,1/30/2026
|
||||
2126,PPC 2026 Stockton contract 03/12,SUMITOMO CORPORATION - PPC,USD,12/18/2025,NET 30,,1,10,10,Saganoseki,Stockton,FOB,manual,,2126 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,0,20000 Mt of sulphuric acid - Tel-quel,3/1/2026,3/31/2026,bldate,3/1/2026
|
||||
1919,24980ABC,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET 30,,1,10,10,YIZHENG,Mejillones,FOB,manual,,1919 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,30000,Mt,73.63,30000 Mt of sulphuric acid - Tel-quel,5/30/2025,6/15/2025,bldate,6/2/2025
|
||||
1920,24994,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET 30,,1,10,10,YIZHENG,Tuticorin,FOB,manual,,1920 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,92.94,20000 Mt of sulphuric acid - Tel-quel,8/16/2025,8/28/2025,bldate,8/16/2025
|
||||
1921,25001ABCD,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,12/17/2024,NET 30,,1,10,10,Rugao,Mejillones,FOB,manual,,1921 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,30000,Mt,73.125,30000 Mt of sulphuric acid - Tel-quel,10/1/2025,10/20/2025,bldate,10/1/2025
|
||||
1937,24983,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,1/10/2025,NET 30,,1,3.3,3.3,YIZHENG,,FOB,manual,,1937 / H2SO4 FY 2025 / Default,line,H2SO4,11375,Mt,72.19,11375 Mt of sulphuric acid - Tel-quel,5/15/2025,5/25/2025,bldate,5/15/2025
|
||||
1987,25009,TONGLING NONFERROUS METALS GROUP CO. LTD,USD,6/24/2025,NET 30,,1,10,10,Rugao,LHOKSEUMAWE,FOB,manual,,1987 / H2SO4 FY 2025 / Default,line,H2SO4,8000,Mt,77.31,8000 Mt of sulphuric acid - Tel-quel,11/5/2025,11/11/2025,bldate,11/5/2025
|
||||
1944,24974AB,"TRICON ENERGY, LTD",USD,1/31/2025,NET 30,,1,10.71,10.71,Ilo,Mejillones,CFR,manual,,1944 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,14000,Mt,145.75,14000 Mt of sulphuric acid - Tel-quel,5/5/2025,5/11/2025,bldate,5/5/2025
|
||||
1959,24978,"TRICON ENERGY, LTD",USD,3/28/2025,NET 30,,1,10,10,Bandirma,Beaumont,CFR,manual,,1959 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,138.9,19000 Mt of sulphuric acid - Tel-quel,4/28/2025,5/8/2025,bldate,5/1/2025
|
||||
1986,24993,"TRICON ENERGY, LTD",USD,6/19/2025,NET 30,,1,10,10,Onsan,Mejillones,CFR,manual,,1986 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,5500,Mt,169,5500 Mt of sulphuric acid - Tel-quel,8/7/2025,8/15/2025,bldate,8/7/2025
|
||||
2001,24995,"TRICON ENERGY, LTD",USD,7/11/2025,NET 30,,1,22.22,22.22,Zhangjiagang,Mejillones,CFR,manual,,2001 / H2SO4 FY 2025 / Chile FY25,line,H2SO4,4500,Mt,171,4500 Mt of sulphuric acid - Tel-quel,8/19/2025,8/26/2025,bldate,8/19/2025
|
||||
2040,25010,"TRICON ENERGY, LTD",USD,9/26/2025,NET 30,,1,5,5,Bandirma,Tampa,CFR,manual,,2040 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,102,19000 Mt of sulphuric acid - Tel-quel,10/15/2025,10/25/2025,bldate,10/15/2025
|
||||
|
@@ -0,0 +1,3 @@
|
||||
source_id,source_line_id,number,reference,party_name,currency_code,purchase_date,payment_term,warehouse_code,weight_basis,tol_min,tol_max,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,line_quantity,line_unit_code,line_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date
|
||||
C00E4A1E-4743-4C73-8F92-FEA123D2DCCE,C00E4A1E-4743-4C73-8F92-FEA123D2DCCE,1761,24972A,SUMITOMO CORPORATION - MMC,USD,5/15/2024,NET 30,,NCSW,0,0,Onahama,Map Ta Phut,FOB,manual,,1761 / H2SO4 FY 2025 / Thailand FY25,line,H2SO4,5000,Mt,33.36,5000 Mt of sulphuric acid - Tel-quel,3/20/2025,4/2/2025,bldate,4/2/2025
|
||||
A65BA960-E7EC-44FD-A945-8554C8D9B89B,A65BA960-E7EC-44FD-A945-8554C8D9B89B,1849,24976,ATLANTIC COPPER ,USD,11/1/2024,NET 30,,NCSW,5,5,Huelva,Tampa,FOB,manual,,1849 / H2SO4 FY 2025 / Default,line,H2SO4,19000,Mt,95,19000 Mt of sulphuric acid - Tel-quel,4/15/2025,4/21/2025,bldate,4/15/2025
|
||||
|
@@ -0,0 +1,6 @@
|
||||
contract_number,contract_ref,line_sequence,product,supplier,currency,p_r,mode,price,unit
|
||||
1881,24970,1,Maritime Freight,MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,USD,PAY,Per qt,74.5,MT
|
||||
1881,24970,1,Profit sharing,SUMITOMO CORPORATION - PPC,USD,PAY,Per qt,0.5,MT
|
||||
1881,24970,1,BAF,MOL CHEMICAL TANKERS PTE. LTD. OF SINGAPORE,USD,REC,Per qt,2.43,MT
|
||||
1881,24970,1,Finance,TBD Supplier,USD,PAY,Per qt,0.25,MT
|
||||
1881,24970,1,P&I charterer's liability,FILHET - ALLARD MARITIME,USD,PAY,Per qt,0.06,MT
|
||||
|
@@ -0,0 +1,3 @@
|
||||
number,reference,party_name,currency_code,sale_date,payment_term,warehouse_code,wb,tol_min,tol_max,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,line_quantity,line_unit_code,line_unit_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date
|
||||
1882,24970,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,Saganoseki,Stockton,CFR,manual,,1882 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,112.07,20000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/1/2025
|
||||
1955,24975,"Agrifields DMCC, Dubai",USD,3/19/2025,NET 30,,NCSW,10,10,Rugao,Isabel,CIF,manual,,1955 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,98.5,10000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/7/2025
|
||||
|
@@ -0,0 +1,3 @@
|
||||
source_id,source_line_id,number,reference,party_name,currency_code,sale_date,payment_term,warehouse_code,weight_basis,tol_min,tol_max,from_location_name,to_location_name,incoterm_name,invoice_method,description,comment,line_type,line_product_code,line_quantity,line_unit_code,line_price,line_description,line_from_del,line_to_del,pricing_trigger,pricing_estimated_date
|
||||
8A8829E9-FDFC-4313-A2ED-2247AE21DF9E,8A8829E9-FDFC-4313-A2ED-2247AE21DF9E,1882,24970,"INTERACID NORTH AMERICA, INC (Stockton)",USD,12/5/2024,NET 30,,NCSW,10,10,Saganoseki,Stockton,CFR,manual,,1882 / H2SO4 FY 2025 / Default,line,H2SO4,20000,Mt,112.07,20000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/1/2025
|
||||
31E490C4-9FDC-49FA-8FC6-BE2492F40E8F,31E490C4-9FDC-49FA-8FC6-BE2492F40E8F,1955,24975,"Agrifields DMCC, Dubai",USD,3/19/2025,NET 30,,NCSW,10,10,Rugao,Isabel,CIF,manual,,1955 / H2SO4 FY 2025 / Default,line,H2SO4,10000,Mt,98.5,10000 Mt of sulphuric acid - Tel-quel,4/1/2025,4/15/2025,bldate,4/7/2025
|
||||
|
41
Reference Data/python_project/loaders/backup/Services.csv
Normal file
41
Reference Data/python_project/loaders/backup/Services.csv
Normal file
@@ -0,0 +1,41 @@
|
||||
code,name,category,uom,sale_price,cost_price,description
|
||||
SWAP,SWAP,SERVICES,Mt,0,0,
|
||||
Time Charter Hire,Time Charter Hire,SERVICES,Mt,0,0,
|
||||
Paper allocation,Paper allocation,SERVICES,Mt,0,0,
|
||||
FX Difference,FX Difference,SERVICES,Mt,0,0,
|
||||
Freight Commission,Freight Commission,SERVICES,Mt,0,0,
|
||||
CFD,CFD,SERVICES,Mt,0,0,
|
||||
Deal Expense,Invoice Validation Deal Expense,SERVICES,Mt,0,0,
|
||||
Spot Owner Demurrage,Spot Owner Demurrage,SERVICES,Mt,0,0,
|
||||
Product Commission,Commission,SERVICES,Mt,0,0,
|
||||
Claim Amount,Claim Amount,SERVICES,Mt,0,0,
|
||||
Import Tax,Import Tax,SERVICES,Mt,0,0,
|
||||
Freight,Freight,SERVICES,Mt,0,0,
|
||||
Other,Other,SERVICES,Mt,0,0,
|
||||
Port Costs,Port Costs,SERVICES,Mt,0,0,
|
||||
Demurrage,Demurrage,SERVICES,Mt,0,0,
|
||||
Finance,Finance,SERVICES,Mt,0,0,
|
||||
Hedging,Hedging,SERVICES,Mt,0,0,
|
||||
Agency Fees,Agency Fees,SERVICES,Mt,0,0,
|
||||
Bunker Costs,Bunker Costs,SERVICES,Mt,0,0,
|
||||
EU ETS,EU ETS,SERVICES,Mt,0,0,
|
||||
Inspection,Inspection,SERVICES,Mt,0,0,
|
||||
Banking Charges,Banking Charges,SERVICES,Mt,0,0,
|
||||
Insurance,Insurance,SERVICES,Mt,0,0,
|
||||
Broker Fees,Broker Fees,SERVICES,Mt,0,0,
|
||||
Commision,Commision,SERVICES,Mt,0,0,
|
||||
LC Fees,LC Fees,SERVICES,Mt,0,0,
|
||||
Address Commission,Address Commission,SERVICES,Mt,0,0,
|
||||
Additional berth,Additional berth,SERVICES,Mt,0,0,
|
||||
P&I charterer's liability,P&I charterer's liability,SERVICES,Mt,0,0,
|
||||
BAF,Bunker adjustment factor,SERVICES,Mt,0,0,
|
||||
Negative ACCT,Negative ACCT,SERVICES,Mt,0,0,
|
||||
Positive ACCT,Positive ACCT,SERVICES,Mt,0,0,
|
||||
METI freight,METI,SERVICES,Mt,0,0,
|
||||
COAnalysis,COAnalysis,SERVICES,Mt,0,0,
|
||||
Expected Revenue,Expected Revenue,SERVICES,Mt,0,0,
|
||||
Premium + 5% Tax,Premium + 5% Tax,SERVICES,Mt,0,0,
|
||||
Future allocation,Future allocation,SERVICES,Mt,0,0,
|
||||
Profit sharing,Profit sharing,SERVICES,Mt,0,0,
|
||||
Deal Revenue,Invoice Validation Deal Revenue,SERVICES,Mt,0,0,
|
||||
Contract discount / premium,Contract discount / premium,SERVICES,Mt,0,0,
|
||||
|
@@ -0,0 +1,34 @@
|
||||
name,type,lat,lon
|
||||
Antwerpen,supplier,51.2309677570105,4.37805175781251
|
||||
Bandirma,supplier,40.369697136983,27.9607200622559
|
||||
Callao,supplier,-12.0144723358403,-77.1260833740236
|
||||
Donghae,supplier,0,0
|
||||
Fangcheng,supplier,21.6140258994842,108.322792053223
|
||||
Hamburg,supplier,53.57532,10.01534
|
||||
Hazira,supplier,21.125681,82.794998
|
||||
Hibi,supplier,34.39572,133.78713
|
||||
Huelva,supplier,37.26638,-6.94004
|
||||
Ilo,supplier,-17.6361697242517,-71.4935302734376
|
||||
Japan,supplier,37.3002752813444,138.515625
|
||||
Laizhou,supplier,37.2882577829837,119.795265197754
|
||||
Lazaro Cardenas,supplier,17.9073656185063,-102.127704620362
|
||||
Longkou,supplier,34.668138,104.165802
|
||||
Naoshima,supplier,34.4612772884371,134.009170532227
|
||||
Niihama,supplier,34.0407116420786,133.319778442383
|
||||
Ningde,supplier,26.6965451115852,119.752349853516
|
||||
Odda,supplier,52.133057,5.29525
|
||||
Onahama,supplier,36.9268411923102,140.907897949219
|
||||
Onsan,supplier,35.4601106672107,129.37671661377
|
||||
Pori,supplier,61.6450645681561,21.3828277587891
|
||||
Putian,supplier,0,0
|
||||
Qingdao,supplier,36.06605,120.36939
|
||||
Rizhao,supplier,0,0
|
||||
Ronnskar,supplier,64.6574030409482,21.2832641601563
|
||||
Rugao,supplier,32.0645374947014,120.495300292969
|
||||
Saganoseki,supplier,33.2411282959313,131.885418891907
|
||||
Taichung,supplier,0,0
|
||||
Tampa,supplier,27.897652653541,-82.4285316467287
|
||||
Ulsan,supplier,35.5366963783951,129.458084106446
|
||||
Yeosu,supplier,34.7416124988318,127.705078125
|
||||
YIZHENG,supplier,32.2244855374297,119.227289967737
|
||||
Zhangjiagang,supplier,31.96815,120.40019
|
||||
|
796
Reference Data/python_project/loaders/backup/Vessels.csv
Normal file
796
Reference Data/python_project/loaders/backup/Vessels.csv
Normal file
@@ -0,0 +1,796 @@
|
||||
vessel_name,vessel_year,vessel_imo
|
||||
Fairchem Blue Shark,2019,9804837
|
||||
Woojin Evelyn,2002,9269594
|
||||
Sigaia Theresa,2015,NULL
|
||||
Sc Hongkong,2001,9187904
|
||||
Chem Sol,2017,9739276
|
||||
Southern Vulture,2018,9852274
|
||||
Chembulk Yokohama,2003,9276248
|
||||
Fuji Galaxy,2010,9490301
|
||||
Ctg Bismuth,2016,9739290
|
||||
Navig8 Violette,2015,9690626
|
||||
Stolt Sun,2000,9149512
|
||||
Houyoshi Park,2016,9725847
|
||||
Beatrice,2013,9674763
|
||||
Yc Azalea,2004,9272682
|
||||
Argent Hibiscus,2010,9414266
|
||||
Fairchem Success,2017,9798648
|
||||
Pvt Sunrise,2011,9565742
|
||||
Stolt Inspiration,1997,9102083
|
||||
Tiger Reliance,2018,9800776
|
||||
Bow Hercules,2017,9752046
|
||||
Stolt Strength,2005,9311024
|
||||
Lila Vancouver,2008,9407067
|
||||
Southern Puma,2016,9792008
|
||||
Niseko Galaxy,2020,9804930
|
||||
Sun Freesia,2019,9570591
|
||||
Navig8 Ammolite,2015,9727534
|
||||
Qikiqtaaluk W.,2011,9421221
|
||||
Ginga Lynx,2009,9442550
|
||||
Lime Galaxy,2008,9380972
|
||||
Skarven,2009,9400394
|
||||
Davide B,2016,9721750
|
||||
Ginga Leopard,2008,9425992
|
||||
Eastern Quest,2009,9472749
|
||||
Halcon Trader,2016,9742053
|
||||
Bow Orion,2019,9818515
|
||||
Navig8 Azotic,2016,9719757
|
||||
Chemstar Sapphire,2019,9804904
|
||||
Chem Altamira,2015,9705744
|
||||
Chemroad Queen,2015,9737151
|
||||
Chem Sea 1,2016,9731729
|
||||
Dongyang Chemi,2001,9255969
|
||||
Shamrock Jupiter,2009,9416082
|
||||
Bow Faith,1997,9114232
|
||||
Brillante,2017,9743825
|
||||
Bow Summer,2005,9215270
|
||||
Team Sapphire,2004,9312406
|
||||
Mtm Santos,2015,9712606
|
||||
Asl Orchid,2011,9594157
|
||||
Sichem Mississipi,2008,9376658
|
||||
Chemroute Brilliant,2009,9442562
|
||||
Chemstar Jewel,2012,9624782
|
||||
Pacific Endeavor,2011,9490325
|
||||
Sky Ploeg,2015,9724441
|
||||
Chemroute Pegasus,2012,9566162
|
||||
Stolt Ajisai,2011,9477555
|
||||
Stolt Greenshank,2011,9518799
|
||||
Golden Hachi,2020,9874583
|
||||
Mtm Tortola,2016,9742065
|
||||
Sg Pegasus,2011,9494876
|
||||
Bow Sirius,2006,9215294
|
||||
Chemroad Echo,2004,9284685
|
||||
No.2 Asian Pioneer,2016,9730983
|
||||
Condor Trader,2016,9742077
|
||||
Sichem Palace,2004,9304318
|
||||
Mtm Colorado,2004,9278052
|
||||
Alessandro Dp,2007,9384162
|
||||
Kitikmeot W.,2010,9421219
|
||||
Nordic Ami,2019,9800051
|
||||
Golden Resolution,2014,9710074
|
||||
Stolt Kashi,2003,9266243
|
||||
Chem Wolverine,2006,9340439
|
||||
Golden Prelude,2021,9881079
|
||||
Cs Onsan,2013,9659684
|
||||
Mtm Amsterdam,2018,9776444
|
||||
Stolt Endurance,2004,9284697
|
||||
Celsius Mumbai,2005,9304332
|
||||
Stolt Invention,1997,9102100
|
||||
Chem Barcelona,2016,9725835
|
||||
Komodo Park,2024,9981465
|
||||
Eva Tokyo,2019,9865520
|
||||
Heung-a Pioneer,2008,9415478
|
||||
Golden Sky,2017,9792151
|
||||
Nordic Copenhagen,2019,9796810
|
||||
Stolt Pondo,2007,9374521
|
||||
Emanuele S,2007,9298363
|
||||
Giancarlo D,2016,9721748
|
||||
Chemraod Hawk,2018,9790622
|
||||
Sichem Marseill,2007,9378199
|
||||
Navig8 Victoria,2015,9690614
|
||||
Stolt Kiri,2003,9266231
|
||||
Royal Crystal 7,2007,9381330
|
||||
Stolt Groenland,2009,9414072
|
||||
Ensemble,2017,9749453
|
||||
Golden Yosa,2008,9407081
|
||||
Golden Wave,2019,9819911
|
||||
Sichem Iris,2008,9392183
|
||||
Fairchem Katana,2016,9749685
|
||||
Chembulk Virgin Gorda,2004,9294288
|
||||
Ginga Puma,2006,9343780
|
||||
Arpeggio,2017,9749441
|
||||
Southern Bull,2007,9378785
|
||||
Mtm New York,2016,9749386
|
||||
Golden Ace,2015,9736626
|
||||
Chemroute Sky,2010,9508160
|
||||
Zy Galaxy,1997,9143221
|
||||
Daesan Chemi,2004,9303273
|
||||
Marquette,2016,9732785
|
||||
Fairchem Thresher,2019,9829746
|
||||
Sun Diana,2009,9409508
|
||||
Xena,2007,9360958
|
||||
Chem Stream,2010,9479979
|
||||
Navig8 Axinite,2016,9719771
|
||||
Bay Pride,2017,9806665
|
||||
Mtm Penang,2015,9712591
|
||||
Chem Tiger,2003,9287297
|
||||
Argent Daisy,2009,9382061
|
||||
Navig8 Andesine,2015,9711559
|
||||
Chem Mia,2008,9407093
|
||||
Stolt Hagi,2016,9750206
|
||||
Gwen,2008,9407067
|
||||
Bow Saga,2007,9215309
|
||||
Amagi Galaxy,2010,9490313
|
||||
Chem Mercury,2018,9815276
|
||||
Bow Precision,2018,9790646
|
||||
Chem Spica,2017,9739264
|
||||
Jal Kisan,2002,9223851
|
||||
Stolt Loyalty,2017,9680114
|
||||
Sc Golden Fortune Lx,2000,9233870
|
||||
Bow Sea,2006,9215282
|
||||
Chembulk Columbus,2011,9515319
|
||||
Stolt Creativity,1997,9102095
|
||||
Chem Lithium,2017,9815252
|
||||
Tiger Integrity,2018,9760574
|
||||
Stolt Innovation,1996,9102069
|
||||
Chem New York,2014,9705732
|
||||
Navig8 Amber,2015,9714056
|
||||
Golden Taka,2004,9305544
|
||||
Chembulk Singapore,2007,9330587
|
||||
Stolt Bobcat,2009,9511167
|
||||
Easterly As Omaria,2007,9363819
|
||||
Southern Mermaid,2014,9724166
|
||||
Golden Australis,2020,9882748
|
||||
Giovanni Dp,2003,9261516
|
||||
Navig8 Almandine,2015,9714068
|
||||
Genuine Venus,2013,9613965
|
||||
Trf Miami,2008,9416056
|
||||
Stolt Vestland,1992,8911669
|
||||
Hafnia Azurite,2016,9727560
|
||||
Chemroad Sea,2011,9565730
|
||||
Stolt Acer,2004,9272668
|
||||
"Chem Spark ",2016,9731743
|
||||
Naeba Galaxy,2018,9791169
|
||||
Southern Turkey,2018,9749740
|
||||
Sg Friendship,2003,9288576
|
||||
Bow Santos,2004,9303651
|
||||
Dreggen,2008,9416070
|
||||
Golden Pioneer,2010,9421594
|
||||
Fsl London,2006,9340465
|
||||
Stolt Yuri,2016,9750218
|
||||
Roseanne,2003,9300544
|
||||
Golden Grace,2005,9317030
|
||||
Mtm Gibraltar,2003,9282924
|
||||
Southern Owl,2016,9773143
|
||||
Sichem Melbourne,2007,9376921
|
||||
Argent Aster,2007,9379959
|
||||
Golden Orion,2014,9712333
|
||||
Stolt Jaeger,1997,9114775
|
||||
Bow Olympus,2019,9818527
|
||||
Sc Chongqing,2010,9425045
|
||||
Ocean Hope,2006,9340453
|
||||
Celsius Middelfart,2015,9733349
|
||||
Easterly As Olivia,2007,9340489
|
||||
Woojin Kelly,2006,9330408
|
||||
Stolt Effort,1999,9178202
|
||||
Stolt Virtue,2004,9274317
|
||||
Eva Usuki,2020,9865532
|
||||
Golden Jupiter,2019,9837573
|
||||
Dh Diligency,2019,9813060
|
||||
Singapore Pioneer,2009,9478262
|
||||
Bow Flower,1994,9047491
|
||||
Alden,2016,9733363
|
||||
Bunga Lily,2011,9542178
|
||||
Stolt Megami,2008,9425980
|
||||
Bochem Singapura,2011,9565625
|
||||
Tiger Perseverance,2019,9800788
|
||||
Dm Emerald,2010,9412763
|
||||
Celsius Miami,2005,9304320
|
||||
Bochem Bucephalas,2023,9760550
|
||||
Akra 103,2000,9187538
|
||||
Stream Baltic,2019,9838668
|
||||
Bow Clipper,1995,9047518
|
||||
Saehan Nuria,2010,NULL
|
||||
Golden Betelgeuse,2019,9458315
|
||||
Chem New Orleans,2015,9705756
|
||||
Yangon,2003,9250165
|
||||
Chembulk Tortola,2007,9342786
|
||||
Chem Sirius,2011,9558397
|
||||
Bow Compass,2009,9412737
|
||||
Stellar Lilac,2008,9499943
|
||||
Stream Pacific,2019,9838670
|
||||
Fairchem Fortitude,2020,9805910
|
||||
Orchid Sylt,2009,9367413
|
||||
Manila I,2003,9242326
|
||||
Chem Star,1997,9156541
|
||||
Stolt Courage,2004,9296731
|
||||
Sichem Beijing,2007,9397042
|
||||
Greenwich Park,2011,9505998
|
||||
Stolt Stream,2000,9169940
|
||||
Jutlandia Swan,2015,9736638
|
||||
Hafnia Topaz,2016,9753686
|
||||
No2. Heung-a Pioneer,2008,9415480
|
||||
Bow Dalian,2012,9504205
|
||||
Chemocean Orion,2018,9777412
|
||||
Albatross Trader,2015,9724063
|
||||
Yc Pansy,2005,9311256
|
||||
Bow Palladium,2017,9777371
|
||||
Sc Chengdu,2010,9572185
|
||||
Ulriken,2006,9325843
|
||||
Mtm Hamburg,2008,9379844
|
||||
Jipro Isis,2008,9370719
|
||||
"Graceful Star ",1995,9102928
|
||||
Chem Venus,2004,9324215
|
||||
Awasan Pioneer,2009,9438925
|
||||
Stolt Larix,2015,9617650
|
||||
Stolt Achievement,1999,9124469
|
||||
Rudolf Schulte,2011,9576765
|
||||
Jipro Neftis,2011,9459292
|
||||
Bow Gemini,2017,9752034
|
||||
Guanaco,2003,9256834
|
||||
Stolt Satsuki,2017,9781114
|
||||
Mtm Dublin,2008,9335824
|
||||
Stolt Flamenco,2010,9391995
|
||||
Bow Nangang,2013,9504217
|
||||
Songa Winds,2009,9416109
|
||||
Concerto,2017,9743837
|
||||
Stanley Park,2008,9363845
|
||||
Stolt Confidence,1996,9102071
|
||||
Bow Firda,2003,9250751
|
||||
Bow Flora,1998,9143207
|
||||
Reinhold Schulte,2012,9576789
|
||||
Stolt Zulu,2006,9351531
|
||||
Sinar Malahayati,2006,9349643
|
||||
Chemroad Polaris,2014,9536923
|
||||
Stolt Ilex,2010,9505936
|
||||
Stolt Sisto,2010,9359375
|
||||
Bristol Trader,2016,9737101
|
||||
Bochem Mumbai,2010,9565637
|
||||
Great Epsilon,2020,9873644
|
||||
Chemstar Iris,2018,9827463
|
||||
Fairchem Integrity,2019,9860192
|
||||
Chemroad Rose,2005,9317846
|
||||
Stolt Lerk,2017,9719252
|
||||
Bow Hector,2009,9363493
|
||||
Sichem Amethyst,2006,9354571
|
||||
Sichem Ruby,2006,9344174
|
||||
Koryu,2013,9668283
|
||||
Nordic Aqua,2018,9800116
|
||||
As Orelia,2008,9363821
|
||||
Sunrise Ray,2018,9829679
|
||||
Chem Saiph,2017,9731781
|
||||
Dh Diligency,2019,9813060
|
||||
Golden Denise,2006,9366196
|
||||
Navig8 Gauntlet,2019,9853228
|
||||
Tiger Tenacity,2017,9760550
|
||||
Genuine Hercules,2013,9597147
|
||||
Saehan Jasper,2009,9416111
|
||||
Korea Chemi,2004,9274276
|
||||
Astra,2003,9273387
|
||||
Sichem Manila,2007,9322097
|
||||
Dionne,2018,9814909
|
||||
Jbu Sapphire,2009,9412725
|
||||
Ginga Jaguar,2005,9321873
|
||||
Goldengate Park,2013,9493145
|
||||
Chemroad Aqua,2018,9790610
|
||||
Xing Tong Kai Ming,2024,9988059
|
||||
Mtm Newport,2018,9774575
|
||||
Harsanadi,1999,9220196
|
||||
Fairchem Sabre,2013,9657478
|
||||
Wawasan Jade,2010,9565613
|
||||
Damiania,2006,9308235
|
||||
Uacc Manama,2010,9458822
|
||||
Stolt Concept,1999,9178197
|
||||
Eastern Liberty,2002,9276236
|
||||
Stolt Perseverance,2001,9124471
|
||||
Caribbean 1,2009,9416094
|
||||
Ginga Cougar,2005,9321861
|
||||
Serene Monaco,2005,9309629
|
||||
Tablones,2003,9043093
|
||||
Capella,2003,9278650
|
||||
Ginga Lion,2004,9278727
|
||||
Sc Petrel,2016,9746176
|
||||
Scarlet Ray,2020,9799654
|
||||
Bow Architect,2005,9319480
|
||||
Eastern Neptune,2006,9370630
|
||||
Bay Yasu,2008,9363869
|
||||
Stolt Aguila,2009,9391983
|
||||
Royal Aqua,2008,9381366
|
||||
Xanthia,2003,9246152
|
||||
Sfl Aruba,2022,9919761
|
||||
Mtm Shanghai,2006,9345908
|
||||
Lincoln Park,2012,9640097
|
||||
Chem Rotterdam,2014,9640140
|
||||
Stolt Magnesium,2017,9739317
|
||||
Ginga Cheetah,2007,9414216
|
||||
Mac London,2003,9296872
|
||||
Eva Hongkong,2017,9800001
|
||||
Southern Shark,2016,9821299
|
||||
Navig8 Tanzanite,2016,9753703
|
||||
Octaden,2007,9340477
|
||||
Swan Pacific,2016,9749805
|
||||
Mtm Southport,2008,9416032
|
||||
Stolt Island,2009,9414058
|
||||
Jbu Opal,2009,9400409
|
||||
Songa Challenge,2009,9409510
|
||||
Ammolit,1995,9016870
|
||||
Ariane Makara,2009,9442548
|
||||
Intermezzo,2019,9804825
|
||||
Bow Capricorn,2016,9752010
|
||||
Jkt Shanghai,2000,9175535
|
||||
Sun Ploeg,2015,9724439
|
||||
Stolt Norland,2009,9414060
|
||||
Bay Spirit,2019,9852286
|
||||
Southern Giraffe,2008,9415014
|
||||
Chemocean Leo,2018,9777424
|
||||
Sun Triton,2017,9781097
|
||||
Indigo Ray,2016,9716016
|
||||
Argent Gerbera,2010,9424596
|
||||
Sc Virgo,2017,9801093
|
||||
Ivory Ray,2011,9505986
|
||||
Sichem Lily,2009,9393395
|
||||
Songa Dream,2010,9505948
|
||||
Stolt Distributor,2002,9276145
|
||||
Celsius Eagle,2010,9423750
|
||||
Chem Argon,2016,9716004
|
||||
Sc Taurus,2017,9801081
|
||||
Fairchem Victory,2016,9773179
|
||||
Stolt Glory,2005,9311012
|
||||
Stolt Tenacity,2017,9680102
|
||||
Clarice,2014,9674775
|
||||
Stolt Cobalt,2016,9739305
|
||||
Chem Antares,2004,9286554
|
||||
Om Shanghai,2007,9358632
|
||||
Stolt Spruce,1993,8919037
|
||||
Ginga Ocelot,2013,9581423
|
||||
Ulsan Chemi,2003,9279927
|
||||
Stolt Cedar,1994,8919049
|
||||
Sichem Challenge,1998,9196448
|
||||
Navig8 Amethyst,2015,9714501
|
||||
Patalya,2005,9305180
|
||||
Mtm Rotterdam,2011,9477567
|
||||
Stolt Surf,2000,9168623
|
||||
Chembulk Kobe,2002,9263136
|
||||
Nordic Masa,2009,9451410
|
||||
Hakone Galaxy,2018,9791171
|
||||
Stolt Skua,1999,9199311
|
||||
Songa Breeze,2009,9423645
|
||||
Southern Xantis,2020,9883493
|
||||
Stolt Sakura,2010,9432969
|
||||
Nordic Marita,2012,9558402
|
||||
Stolt Sea,1999,9149495
|
||||
Ginga Saker,2003,9258155
|
||||
Chemstar Stellar,2012,9624770
|
||||
Fg Rotterdam,2012,9485863
|
||||
Fairchem Blade,2014,9692416
|
||||
Chemstar Tierra,2018,9827451
|
||||
Purple Ray,2020,9804899
|
||||
Chem Bulldog,2010,9587790
|
||||
Mtm Singapore,2011,9477529
|
||||
Moyra,2005,9271999
|
||||
Chem Stellar,2016,9731755
|
||||
Clayton,2025,1019979
|
||||
Stolt Sanderling,2011,9518804
|
||||
Ginga Panther,2007,9379985
|
||||
Sichem Hong Kong,2007,9397054
|
||||
Hyde Park,2017,9725861
|
||||
Fairchem Endurance,2020,9800441
|
||||
Bow Faith,1997,5408609
|
||||
Alpaca,2010,9403293
|
||||
Golden Vega,2016,9773167
|
||||
Melderskin,2016,9737577
|
||||
Stolt Sneland,2008,9352212
|
||||
Doris Ruby,2003,9279939
|
||||
Moquegua,2002,9262869
|
||||
Sc Mercury,2016,9746188
|
||||
Mid Eagle,2007,9330795
|
||||
Crimson Ray,2007,9347152
|
||||
Stolt Efficiency,1998,9102112
|
||||
Stolt Basuto,2006,9351543
|
||||
Stolt Alm,2016,9719238
|
||||
Jazz,2019,9804849
|
||||
Sun Jupiter,2019,9837573
|
||||
"Magenta Ray ",2019,9829722
|
||||
Gt Star,2012,9485849
|
||||
Fairchem Tiger,2019,9829760
|
||||
Stolt Voyager,2003,9297292
|
||||
Stolt Quetzal,2009,9376660
|
||||
G Silver,2004,9324215
|
||||
Chem Silicon,2018,9829681
|
||||
Concon Trader,2018,9800037
|
||||
Huemul,2008,9371775
|
||||
As Omaria,2007,9363819
|
||||
Rayong Chemi,2002,9257125
|
||||
Navig8 Aventurine,2015,9711547
|
||||
Stolt Mercury,2017,9739329
|
||||
Fairchem Falcon,2018,9799642
|
||||
Fairchem Copper,2019,9829758
|
||||
Sea Ploeg,2016,9724453
|
||||
Stolt Breland,2010,9414084
|
||||
Mtm Tokyo,2003,9279111
|
||||
Chemroad Journey,2009,9414254
|
||||
Intrepid Seahawk,2011,9576777
|
||||
Mtm North Sound,2006,9360946
|
||||
Stolt Lotus,2014,9617648
|
||||
Carole M,2016,9732797
|
||||
Stolt Integrity,2017,9680097
|
||||
Dh Fealty,2018,9829772
|
||||
Stolt Maple,2017,9764491
|
||||
Southern Falcon,2008,9414993
|
||||
Bow Jaguar,2024,9989209
|
||||
Melito Carrier,1993,8920581
|
||||
Saehan Intrasia,2005,9330460
|
||||
Golden Cygnus,2010,9498080
|
||||
Chemraod Lily,2006,9325855
|
||||
Hafnia Turquoise,2016,9753674
|
||||
Golden Mind,2020,9881067
|
||||
Cutlass Galaxy,2015,9746164
|
||||
Navig8 Tourmaline,2016,9753698
|
||||
Mac Singapore,2001,9244386
|
||||
Radiant Ray,2018,9749697
|
||||
Stolt Momiji,2010,9470545
|
||||
Sun Dahlia,2018,9570577
|
||||
Zoey,2011,9624548
|
||||
Mtm Amazon,2007,9374533
|
||||
Southern Quokka,2017,9792010
|
||||
Chembulk Barcelona,2004,9278662
|
||||
Navig8 Aragonite,2015,9727558
|
||||
Shamrock Mercury,2010,9477531
|
||||
Ginga Caracal,2009,9426300
|
||||
Mid Nature,2011,9542154
|
||||
Ginga Hawk,2000,9222651
|
||||
Stolt Lind,2017,9719264
|
||||
Stolt Pelican,1996,9016882
|
||||
Central Park,2015,9725823
|
||||
Amelia,2011,9624768
|
||||
Bow Harmony,2008,9379909
|
||||
Bow Fortune,1999,9168635
|
||||
Golden Leader,2017,9805130
|
||||
Chemroad Quest,2010,9451288
|
||||
Mtm Vancouver,2019,9867607
|
||||
Celsius Manhattan,2006,9323766
|
||||
Chem Houston,2014,9705720
|
||||
Golden Aspirant,2016,9758313
|
||||
Lumphini Park,2013,9640114
|
||||
Navig8 Adamite,2015,9727546
|
||||
Mandal,2016,9732773
|
||||
Nordic Callao,2019,9340439
|
||||
Stolt Sypress,1998,9150315
|
||||
Tsukuba Galaxy,2020,9796834
|
||||
Chemroad Orchid,2019,9790608
|
||||
Valentine,2008,9504023
|
||||
Trf Bergen,2015,9692246
|
||||
Nordic Americas,2004,9304306
|
||||
Stolt Focus,2001,9214305
|
||||
Golden Ray,2012,9640102
|
||||
Stolt Kingfisher,1998,9154323
|
||||
Fairchem Bronco,2007,9360960
|
||||
Sc Falcon,2016,9746190
|
||||
Nordic Ace,2018,9800104
|
||||
Tivoli Park,2018,9780536
|
||||
Bow Tungsten,2018,9777400
|
||||
Fanfare,2018,9760562
|
||||
Chemroad Sirius,2018,9757979
|
||||
Yelena,2011,9613616
|
||||
Susana S,2009,9406714
|
||||
Chemroad Hope,2011,9565754
|
||||
Spring Ploeg,2017,9774707
|
||||
Edge Galaxy,2017,9788954
|
||||
Forest Park,2013,9640126
|
||||
Southern Hawk,2009,9534901
|
||||
Zao Galaxy,2012,9566150
|
||||
Pvt Jupiter,2008,9408803
|
||||
Stolt Redshank,2011,9566746
|
||||
Bochem Ghent,2011,9565649
|
||||
Chemstar River,2017,9758026
|
||||
Golden Creation,NULL,NULL
|
||||
Golden Vega,0,NULL
|
||||
Chem Selenium,0,NULL
|
||||
Bunga Laurel,NULL,NULL
|
||||
Bow Condor,0,NULL
|
||||
Ncc Mekka,1995,9047752
|
||||
Mac Singapore,2001,9244386
|
||||
Southern Lion,NULL,NULL
|
||||
Navi8 Spark,NULL,NULL
|
||||
Sea Ploeg,NULL,NULL
|
||||
Golden Creation,NULL,NULL
|
||||
Mtm Rotterdam,NULL,NULL
|
||||
Bow Andes,0,NULL
|
||||
Southern Lion,2011,9567752
|
||||
"Genuine Galaxy ",0,NULL
|
||||
Fairchem Triumph,2017,9758038
|
||||
Polaris Stardom,20089,9470246
|
||||
Conti Chivalry,NULL,NULL
|
||||
Bunga Lucerne,2012,9508938
|
||||
Bw Helium,NULL,NULL
|
||||
Chembulk Sydney,NULL,NULL
|
||||
Dh Glory,2020,9815628
|
||||
Nordic Maya,2005,9339351
|
||||
Ginga Liger,2021,9893448
|
||||
Esteem Sango,2021,9900461
|
||||
Sun Iris,2020,9873254
|
||||
Goldstar Shine,2004,9279707
|
||||
Cnc Dream,2004,9305544
|
||||
Golden Procyon,2015,9750476
|
||||
Bochem Chennai,2012,9565766
|
||||
Bochem Chennai,2012,9565766
|
||||
Bochem Chennai,2012,9565766
|
||||
Sc Brilliant,2007,9340702
|
||||
Fairchem Angel,2020,9809394
|
||||
Itsa Ship Accounting,NULL,None
|
||||
Forshun,2001,9236054
|
||||
Pacific Star,2008,9363481
|
||||
Chermoute Pegasus,2012,9566162
|
||||
Barbouni,2007,9416020
|
||||
Lisbon,2000,9223916
|
||||
Benten Galaxy,2022,NULL
|
||||
Pvt Azura,2009,9423683
|
||||
Southern Narwhal,2015,9715995
|
||||
Mid Fortune,2009,9423683
|
||||
Bum Shin,2003,9263095
|
||||
G Bright,2004,9294276
|
||||
Golden Unity,2011,9572575
|
||||
T Procyon,2021,9569499
|
||||
Chem Leona,2010,9505948
|
||||
Chemroute Sun,2008,9414228
|
||||
Fairchem Fynbos,2021,9914292
|
||||
Chem Sceptrum,2017,9731793
|
||||
Begonia,2005,9330381
|
||||
Takao Galaxy,2022,9920069
|
||||
Daeho Sunny,2009,9511105
|
||||
Bow Cecil,1998,9143219
|
||||
Sc Scorpio,2017,9801079
|
||||
Stolt Argon,2016,9739288
|
||||
Thai Chemi,2006,9330393
|
||||
Easterly As Omaria,2007,9340489
|
||||
Taruca,2005,9331402
|
||||
Lila Confidence,2006,9340427
|
||||
Eva Fuji,2021,9914242
|
||||
Bochem Marengo,2017,9749025
|
||||
Whitney,2009,9551337
|
||||
Hakuba Galaxy,2021,9804916
|
||||
Kamui Galaxy,2022,9942653
|
||||
Chem Sea 1,2016,9731729
|
||||
Golden Axis,2022,9930832
|
||||
Chem Star 1,2016,9731731
|
||||
Stolt Bismuth,2016,9739290
|
||||
Tsurugi Galaxy,2020,9875501
|
||||
Tsukuba Galaxy,NULL,9796834
|
||||
Chem Sceptrum,2017,9731793
|
||||
Bangkok Chemi,2006,9330410
|
||||
Bochem London,2016,9743849
|
||||
Stream Arctic,2019,9817509
|
||||
Woojin Elvis,2009,9442665
|
||||
Stolt Fulmar,2000,9148972
|
||||
Fairchem Conquest,2017,9798648
|
||||
Ginga Tiger,2003,9278715
|
||||
Mumbai,2003,9242338
|
||||
Stolt Facto,2010,9359363
|
||||
Chem Taurus,2010,9477505
|
||||
Silver Ray,2013,9493133
|
||||
Navig8 Aronaldo,2015,9711561
|
||||
Blue Arrow,2012,9630444
|
||||
Slogen,2016,9733375
|
||||
Hodaka Galaxy,2018,9791157
|
||||
Bow Prosper,2020,9866770
|
||||
Mtm Kobe,2018,9776456
|
||||
Marex Sara,2016,9773997
|
||||
Rlo Explorer,1998,9148958
|
||||
Chemroute Oasis,2011,9512173
|
||||
Chemroad Dita,2009,9414242
|
||||
Karruca,2001,9216470
|
||||
Stolt Span,1999,9149524
|
||||
Lavender Ray,2017,9740794
|
||||
Bow Cedar,1996,9087013
|
||||
Malbec Legend,2016,9732814
|
||||
Stolt Ocelot,2008,9459539
|
||||
Stolt Orca,2012,9565699
|
||||
Ct Ace,2006,9352597
|
||||
Mar Camino,2010,9573892
|
||||
Hari Akash,1998,9156553
|
||||
Bow Performer,2018,9790658
|
||||
Bochem Oslo,2010,9420710
|
||||
Fairchem Valor,2019,9791195
|
||||
"Jal Garuda ",2000,9196709
|
||||
Fairchem Kiso,2011,9527075
|
||||
Argent Sunrise,2008,9392377
|
||||
Golden Creation,2015,9738662
|
||||
Stolt Kikyo,1998,9156565
|
||||
Ami,2006,9360934
|
||||
Panagia Thalassini,2017,9730335
|
||||
Stolt Renge,2017,9781126
|
||||
Sun Venus,2016,9774410
|
||||
Eastern Oasis,2007,9383986
|
||||
Ogino Park,2017,9725873
|
||||
Golden Deneb,2019,9859258
|
||||
Patrona I,2004,9305178
|
||||
Navig8 Achroite,2016,9727584
|
||||
Golden Chie,2010,9566203
|
||||
Bay Dignity,2017,9806706
|
||||
Marmotas,2005,9304344
|
||||
Global Pioneer,2010,9542142
|
||||
Ds Cougar,2009,9515292
|
||||
Mtm Houston,2010,9505924
|
||||
Stolt Commitment,2000,9168647
|
||||
Songa Peace,2009,9409522
|
||||
Nave Polaris,2011,9457749
|
||||
Southern Unicorn,2018,9749702
|
||||
Tiger Glory,2017,9749025
|
||||
Rabigh Sun,2008,9392365
|
||||
Stolt Pride,2016,9680073
|
||||
Ct Confidence,2006,9340427
|
||||
Bow Neon,2017,9777369
|
||||
"Griya Bugis ",1998,9191280
|
||||
Nocturne,2020,9804863
|
||||
Raon Teresa,2002,9244984
|
||||
Nordic Ann,2010,9422665
|
||||
Yc Daisy,2005,9304344
|
||||
Stolt Seagull,1997,9125645
|
||||
Fairchem Pinnacle,2025,NULL
|
||||
Azalea Galaxy,2006,9343778
|
||||
Stolt Rindo,2005,9314765
|
||||
Navig8 Sky,2016,9731731
|
||||
Rt Star,2011,9523835
|
||||
Ctg Argon,2016,9739288
|
||||
Navig8 Ametrine,2015,9714513
|
||||
Stolt Palm,2018,9764506
|
||||
Hicri Kaan,1998,9171474
|
||||
Chemtrans Mobile,2016,9732802
|
||||
Stolt Sycamore,2000,9198563
|
||||
Bow Condor,2000,9214032
|
||||
Bunga Lilac,2011,9542166
|
||||
Southern Robin,2018,9749714
|
||||
Bow Star,2004,9197296
|
||||
Stolt Excellence,2018,9720081
|
||||
Sagami,2008,9379911
|
||||
Stolt Tsubaki,2011,9477543
|
||||
Bow Chain,2002,9214317
|
||||
Nq Laelia,2008,9393383
|
||||
Rainbow Island 88,2004,9286542
|
||||
Chembulk Vancouver,2003,9282364
|
||||
Mtm Big Apple,2018,9774563
|
||||
Bunga Laurel,2010,9529645
|
||||
Stolt Apal,2016,9719240
|
||||
Wawasan Topaz,2010,9565601
|
||||
Sichem New York,2007,9337834
|
||||
Fairchem Sword,2013,9673678
|
||||
Shenghui Glory,2015,9724037
|
||||
Navig8 Amazonite,2015,9719769
|
||||
Bow Sun,2003,9197284
|
||||
Mid Osprey,2006,9330783
|
||||
Stolt Vision,2005,9274329
|
||||
Celsius Messina,2007,9349655
|
||||
Nordic Aki,2011,9505974
|
||||
Stolt Selje,1993,8919051
|
||||
Navig8 Goal,2019,9853216
|
||||
Ginga Fortitude,2020,9805910
|
||||
Yc,NULL,NULL
|
||||
Stolt Spray,2000,9168611
|
||||
Nq Morina,2002,9243382
|
||||
Nordic Mari,2010,9422677
|
||||
Golden Mercury,2003,9272802
|
||||
Asian Pioneer,2015,9730971
|
||||
Linken,2016,9733351
|
||||
Lila Evia,2006,9330771
|
||||
Mtm Fairfield,2002,9264465
|
||||
Stolt Ebony,2017,9744908
|
||||
Stolt Teal,1999,9199323
|
||||
Mtm New Orleans,2016,9749398
|
||||
Bow Engineer,2006,9317860
|
||||
Lavraki,2007,9323077
|
||||
Stolt Sandpiper,2011,9566758
|
||||
Golden Sirius,2009,9478274
|
||||
Fuji Lava,2010,9468528
|
||||
Flumar Maceio,2006,9345893
|
||||
Ginga Merlin,2002,9254252
|
||||
Bow Glory,2017,9758038
|
||||
Mtm Savannah,2015,9726750
|
||||
Malbec Legacy,2016,9732826
|
||||
Wolverine,2006,9043081
|
||||
Forte Galaxy,2018,NULL
|
||||
Uacc Marah,2013,9489091
|
||||
Chem Neon,2018,9815264
|
||||
"Blue Arrow ",2012,9630444
|
||||
Xt Sea Lion,2025,1042378
|
||||
Navig8 Universe,2013,9489106
|
||||
Sunny Orion,2010,9511143
|
||||
Stolt Betula,2003,9266267
|
||||
Stolt Suisen,1998,9156577
|
||||
Bow Spring,2004,9215256
|
||||
Bow Cardinal,1997,9114244
|
||||
Bow Platinum,2018,9777383
|
||||
Mtm Hudson,2004,9278064
|
||||
Chem Cobalt,2016,9740770
|
||||
Eva Bergen,2018,9800013
|
||||
Latana,2000,9186352
|
||||
Eastern Prosperity,2008,9383998
|
||||
"Bellis Theresa ",2000,9192375
|
||||
Chem Singapore,2015,9705768
|
||||
Floyen,2016,9749790
|
||||
Kaimon Galaxy,2012,9552472
|
||||
Sunny Orion,2010,9511143
|
||||
Chemstar River,2017,9758026
|
||||
Fairchem Loyalty,2019,9791183
|
||||
Nave Cosmos,2010,9457024
|
||||
Bomar Lynx,2003,9043093
|
||||
Bow Fagus,1995,9047764
|
||||
Stolt Osprey,1998,9147461
|
||||
Bunga Lavender,2010,9542130
|
||||
Lauren,2002,9266229
|
||||
Tiger Harmony,2016,9725859
|
||||
Fairchem Hawk,2019,9804813
|
||||
Sandro,2017,9390525
|
||||
Bow Odyssey,2020,9818539
|
||||
Horin Trader,2015,9724051
|
||||
Bow Caroline,2009,9367554
|
||||
Chem Gallium,2017,9740782
|
||||
Ginga Kite,2001,9228291
|
||||
Summer Ploeg,2017,9790464
|
||||
Wawasan Ruby,2010,9477517
|
||||
Menuett,2019,9790634
|
||||
Acadia Park,2018,9780548
|
||||
Sun Neptune,2007,9363807
|
||||
Marigold,2000,9221669
|
||||
Chemroad Haya,2004,9303649
|
||||
Ginga Bobcat,2010,9472737
|
||||
Key Wind,1999,9148960
|
||||
Uacc Riyadh,2011,9458834
|
||||
Bochem Bayard,2017,9749025
|
||||
Bow Titanium,2018,9777395
|
||||
Ebony Ray,2008,9363857
|
||||
Stolt Viking,2001,9196711
|
||||
Mtm Westport,2005,9317858
|
||||
Mtm Key West,2020,9790672
|
||||
Navig8 Aquamarine,2015,9711573
|
||||
Chem Amsterdam,2013,9640138
|
||||
Fairchem Honor,2019,9860207
|
||||
Stolt Sincerity,2016,9680085
|
||||
Woojin Chemi,2003,9272814
|
||||
Sichem Mumbai,2006,9322085
|
||||
Pacific Sapphire,2008,9508158
|
||||
Jbu Onyx,2008,9392999
|
||||
Southern Wolf,2019,9867267
|
||||
Stolt Calluna,2017,9744893
|
||||
Bow Sky,2005,9215268
|
||||
Chem Polaris,2008,9416044
|
||||
Southern Condor,2007,9441659
|
||||
Golden Sagittarius,2019,9867073
|
||||
Chemroad Sakura,2018,9757967
|
||||
Navig8 Alabaster,2015,9727572
|
||||
Prabhu Parvati,2008,NULL
|
||||
Sun Edelweiss,2018,9570589
|
||||
Fairchem Aldebaran,2017,9552666
|
||||
Stolt Capability,1998,9102124
|
||||
Straum,2010,9406726
|
||||
Cypress Galaxy,2007,9379973
|
||||
Stolt Vanguard,2004,9274305
|
||||
Mtm Antwerp,2004,9291456
|
||||
Diva,2017,9760548
|
||||
Bow Fuling,2012,9504190
|
||||
Navig8 Amessi,2015,9719745
|
||||
Stolt Sequoia,2003,9235062
|
||||
Golden Dream,2002,9255971
|
||||
Bochem Luxembourg,2015,9737565
|
||||
Stolt Sagaland,2008,9352200
|
||||
Harmonics,2019,9799680
|
||||
Alessandro Dp,2007,9384162
|
||||
Battersea Park,2002,9255983
|
||||
Gallop,2019,9799678
|
||||
Kiso,2008,9379894
|
||||
Chemroad Wing,2005,9309502
|
||||
Sc Citrine,2025,9989027
|
||||
Vari Trader,2018,9800025
|
||||
Bow Aquarius,2016,9753791
|
||||
Fairchem Charger,2009,9367401
|
||||
Elm Galaxy,2006,9331256
|
||||
Stolt Auk,2001,9164108
|
||||
Beech Galaxy,2007,9340441
|
||||
Lilac Ray,2020,9883481
|
||||
Fairchem Mako,2018,9826574
|
||||
Argent Iris,2011,9459280
|
||||
Celsius Birdie,2009,9423724
|
||||
Golden Altair,2016,9792163
|
||||
Celsius Mexico,2008,9408798
|
||||
Woojin Frank,2005,9317262
|
||||
Birdie Trader,2016,9724099
|
||||
Liberty,2009,9423542
|
||||
|
@@ -0,0 +1,239 @@
|
||||
from proteus import config, Model
|
||||
import psycopg2
|
||||
|
||||
# XML-RPC Configuration (default connection method)
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database inspection)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
print("="*80)
|
||||
print("CUSTOM FIELDS IDENTIFICATION FOR purchase.purchase")
|
||||
print("="*80)
|
||||
|
||||
# Connect to Tryton via XML-RPC
|
||||
print(f"\nConnecting via XML-RPC to {SERVER_URL}...")
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully\n")
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Get all fields that Proteus sees
|
||||
proteus_fields = sorted([key for key in dir(Purchase)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find',
|
||||
'copy', 'read', 'write', 'search']])
|
||||
|
||||
print(f"1. FIELDS VISIBLE TO PROTEUS: {len(proteus_fields)} fields")
|
||||
print("-"*80)
|
||||
|
||||
# Standard Tryton purchase.purchase fields (from base module)
|
||||
standard_purchase_fields = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'lines', 'invoices', 'invoices_ignored', 'invoices_recreated',
|
||||
'invoice_lines', 'invoice_lines_ignored', 'moves',
|
||||
'shipment_state', 'invoice_state', 'number', 'reference',
|
||||
'shipments', 'shipment_returns', 'rec_name', 'origin',
|
||||
'untaxed_amount', 'tax_amount', 'total_amount',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache',
|
||||
'delivery_date', 'party_lang', 'contact', 'xml_id'
|
||||
}
|
||||
|
||||
# Identify potential custom fields
|
||||
potential_custom_fields = [f for f in proteus_fields if f not in standard_purchase_fields]
|
||||
|
||||
print(f"\n2. POTENTIAL CUSTOM FIELDS: {len(potential_custom_fields)} fields")
|
||||
print("-"*80)
|
||||
for field in potential_custom_fields:
|
||||
print(f" - {field}")
|
||||
|
||||
# Connect to PostgreSQL to get actual table columns
|
||||
print(f"\n3. COLUMNS IN POSTGRESQL TABLE 'purchase_purchase'")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
dbname=DATABASE_NAME,
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get all columns from purchase_purchase table
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
character_maximum_length,
|
||||
is_nullable,
|
||||
column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'purchase_purchase'
|
||||
ORDER BY ordinal_position;
|
||||
""")
|
||||
|
||||
db_columns = cursor.fetchall()
|
||||
|
||||
print(f"Total columns in database: {len(db_columns)}\n")
|
||||
|
||||
# Standard columns that typically exist in purchase_purchase
|
||||
standard_db_columns = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'number', 'reference', 'delivery_date', 'contact',
|
||||
'shipment_state', 'invoice_state', 'origin',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache'
|
||||
}
|
||||
|
||||
db_column_names = [col[0] for col in db_columns]
|
||||
custom_db_columns = [col for col in db_columns if col[0] not in standard_db_columns]
|
||||
|
||||
print("Custom columns in database:")
|
||||
for col in custom_db_columns:
|
||||
col_name, data_type, max_length, nullable, default = col
|
||||
length_info = f"({max_length})" if max_length else ""
|
||||
print(f" - {col_name:<30} {data_type}{length_info:<15} NULL: {nullable}")
|
||||
|
||||
# Compare: Fields in Proteus vs Columns in DB
|
||||
print(f"\n4. COMPARISON: PROTEUS vs DATABASE")
|
||||
print("-"*80)
|
||||
|
||||
# Fields in Proteus but NOT as direct columns in DB (might be related fields, functions, etc.)
|
||||
proteus_only = set(potential_custom_fields) - set(db_column_names)
|
||||
if proteus_only:
|
||||
print(f"\nFields in Proteus but NOT as columns in DB ({len(proteus_only)}):")
|
||||
print("(These might be Many2One, One2Many, Function fields, etc.)")
|
||||
for field in sorted(proteus_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Columns in DB but NOT visible in Proteus (these are the problem!)
|
||||
db_only = set([col[0] for col in custom_db_columns]) - set(proteus_fields)
|
||||
if db_only:
|
||||
print(f"\n⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS ({len(db_only)}):")
|
||||
print("(These fields MUST be added to the Python model!)")
|
||||
for field in sorted(db_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Fields that exist in BOTH Proteus and DB
|
||||
both = set(potential_custom_fields) & set([col[0] for col in custom_db_columns])
|
||||
if both:
|
||||
print(f"\n✓ Custom fields properly defined in BOTH Proteus and DB ({len(both)}):")
|
||||
for field in sorted(both):
|
||||
print(f" - {field}")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error connecting to PostgreSQL: {e}")
|
||||
|
||||
# Test persistence of custom fields
|
||||
print(f"\n5. TESTING FIELD PERSISTENCE")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
# Find a draft purchase to test
|
||||
drafts = Purchase.find([('state', '=', 'draft')], limit=1)
|
||||
|
||||
if drafts:
|
||||
test_purchase = drafts[0]
|
||||
test_id = test_purchase.id
|
||||
|
||||
print(f"Testing with purchase ID: {test_id}")
|
||||
print("\nTesting custom fields (attempting to set and save):\n")
|
||||
|
||||
# Test a sample of custom fields
|
||||
test_fields = {}
|
||||
|
||||
# Add fields to test if they exist
|
||||
if 'reference' in potential_custom_fields:
|
||||
test_fields['reference'] = 'TEST_REF'
|
||||
if 'crop' in potential_custom_fields:
|
||||
test_fields['crop'] = 'TEST_CROP'
|
||||
if 'forex' in potential_custom_fields:
|
||||
test_fields['forex'] = 'TEST_FOREX'
|
||||
if 'broker' in potential_custom_fields:
|
||||
test_fields['broker'] = 'TEST_BROKER'
|
||||
if 'certif' in potential_custom_fields:
|
||||
test_fields['certif'] = 'TEST_CERT'
|
||||
if 'wb' in potential_custom_fields:
|
||||
test_fields['wb'] = 'TEST_WB'
|
||||
|
||||
for field_name, test_value in test_fields.items():
|
||||
try:
|
||||
original_value = getattr(test_purchase, field_name, None)
|
||||
setattr(test_purchase, field_name, test_value)
|
||||
test_purchase.save()
|
||||
|
||||
# Reload
|
||||
reloaded = Purchase(test_id)
|
||||
new_value = getattr(reloaded, field_name, None)
|
||||
|
||||
if new_value == test_value:
|
||||
print(f" ✓ {field_name}: PERSISTS correctly")
|
||||
# Restore original value
|
||||
setattr(reloaded, field_name, original_value)
|
||||
reloaded.save()
|
||||
else:
|
||||
print(f" ✗ {field_name}: Does NOT persist (expected: '{test_value}', got: '{new_value}')")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ {field_name}: Error - {str(e)[:60]}")
|
||||
else:
|
||||
print("No draft purchases found for testing")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during persistence testing: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY & RECOMMENDATIONS")
|
||||
print("="*80)
|
||||
print("""
|
||||
Next steps for your colleague:
|
||||
|
||||
1. Review the "⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS" section
|
||||
→ These fields exist in PostgreSQL but are missing from the Python model
|
||||
|
||||
2. Review fields that "Does NOT persist" in the testing section
|
||||
→ These fields are visible but not working correctly
|
||||
|
||||
3. Add missing fields to your custom Tryton module:
|
||||
|
||||
File: modules/your_custom_module/purchase.py
|
||||
|
||||
from trytond.pool import PoolMeta
|
||||
from trytond.model import fields
|
||||
|
||||
class Purchase(metaclass=PoolMeta):
|
||||
__name__ = 'purchase.purchase'
|
||||
|
||||
# Add each missing field with appropriate type:
|
||||
custom_field = fields.Char('Custom Field')
|
||||
custom_number = fields.Integer('Custom Number')
|
||||
custom_date = fields.Date('Custom Date')
|
||||
custom_many2one = fields.Many2One('other.model', 'Reference')
|
||||
# etc...
|
||||
|
||||
4. Increment module version in tryton.cfg
|
||||
|
||||
5. Update module: trytond-admin -d tradon -u your_custom_module
|
||||
|
||||
6. Restart Tryton server
|
||||
|
||||
7. Re-run this script to verify all fields work correctly
|
||||
""")
|
||||
@@ -0,0 +1,46 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# Connect via XML-RPC
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
print(f"Connected to Tryton database '{DATABASE_NAME}' successfully!")
|
||||
|
||||
# Get the model using Model.get()
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
try:
|
||||
# Try to get any existing record or create new (without saving)
|
||||
purchases = Purchase.find([], limit=1)
|
||||
if purchases:
|
||||
sample = purchases[0]
|
||||
else:
|
||||
sample = Purchase()
|
||||
|
||||
# Get field names from the instance
|
||||
field_names = sorted([key for key in dir(sample)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find']])
|
||||
|
||||
print(f"\nTotal fields in purchase.purchase: {len(field_names)}")
|
||||
print("\nField list:")
|
||||
for field in field_names:
|
||||
print(f"{field}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Could not inspect fields via instance: {e}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Connection or operation failed: {e}")
|
||||
print("\nPlease verify:")
|
||||
print(f" - Tryton server is running on {SERVER_URL}")
|
||||
print(f" - Database '{DATABASE_NAME}' exists")
|
||||
print(f" - Username and password are correct")
|
||||
@@ -0,0 +1,35 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# Connect
|
||||
config.set_xmlrpc(f'https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Test: Set number on draft purchase
|
||||
print("=== Testing Number Field Persistence ===")
|
||||
draft = Purchase(682) # The ID from your previous test
|
||||
|
||||
print(f"Before: number = {draft.number}, state = {draft.state}")
|
||||
|
||||
# Set number
|
||||
draft.number = "MANUAL_TEST_001"
|
||||
draft.save()
|
||||
print(f"After save: number = {draft.number}")
|
||||
|
||||
# Reload by fetching again from database
|
||||
draft_reloaded = Purchase(682)
|
||||
print(f"After reload: number = {draft_reloaded.number}")
|
||||
|
||||
if draft_reloaded.number == "MANUAL_TEST_001":
|
||||
print("✓ SUCCESS: Number WAS persisted via Proteus!")
|
||||
else:
|
||||
print(f"✗ FAILED: Number NOT persisted. Got: {draft_reloaded.number}")
|
||||
print("\nThis means the 'number' field is likely:")
|
||||
print(" 1. Read-only (controlled by Tryton workflow)")
|
||||
print(" 2. Auto-generated by a sequence")
|
||||
print(" 3. Overwritten by server-side logic")
|
||||
|
||||
# Now verify in PostgreSQL
|
||||
print("\n=== Verify in PostgreSQL ===")
|
||||
print("Run this SQL query to confirm:")
|
||||
print("SELECT id, number, state FROM purchase_purchase WHERE id = 682;")
|
||||
@@ -0,0 +1,44 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2776)
|
||||
|
||||
# Création de la commande d'achat
|
||||
purchase = Purchase()
|
||||
purchase.company = company
|
||||
purchase.party = party
|
||||
purchase.currency = company.currency
|
||||
purchase.tol_min = Decimal(1)
|
||||
purchase.wb = Wb(1)
|
||||
# Ligne d'achat
|
||||
product = Product(12) # id du produit
|
||||
line = purchase.lines.new()
|
||||
line.product = product
|
||||
line.quantity = 10
|
||||
line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
purchase.save()
|
||||
|
||||
print(f"Purchase créée : {purchase.id}")
|
||||
@@ -0,0 +1,45 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
sale = Model.get('sale.sale')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2789)
|
||||
fromLocation = Location(1247)
|
||||
|
||||
# Création de la commande de vente
|
||||
sale = sale()
|
||||
sale.company = company
|
||||
sale.party = party
|
||||
sale.currency = company.currency
|
||||
sale.tol_min = Decimal(1)
|
||||
sale.wb = Wb(1)
|
||||
sale.from_location = fromLocation
|
||||
# Ligne d'achat
|
||||
#product = Product(12) # id du produit
|
||||
# line = sale.lines.new()
|
||||
# line.product = product
|
||||
# line.quantity = 10
|
||||
# line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
sale.save()
|
||||
|
||||
print(f"sale créée : {sale.id}")
|
||||
@@ -0,0 +1,11 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
# Debug: Print what's available in config
|
||||
import helpers.config as cfg
|
||||
print("Available in config:", dir(cfg))
|
||||
print("PURCHASE_FEES_CSV value:", getattr(cfg, 'PURCHASE_FEES_CSV', 'NOT FOUND'))
|
||||
@@ -0,0 +1,398 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Parties.csv'
|
||||
|
||||
# Default values
|
||||
DEFAULT_COUNTRY = 'US' # Default country code if not specified
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_country(country_code):
|
||||
"""Find country by code"""
|
||||
Country = Model.get('country.country')
|
||||
|
||||
if not country_code:
|
||||
country_code = DEFAULT_COUNTRY
|
||||
|
||||
countries = Country.find([('code', '=', country_code.upper())])
|
||||
|
||||
if countries:
|
||||
return countries[0]
|
||||
else:
|
||||
print(f" ⚠ Warning: Country '{country_code}' not found, using '{DEFAULT_COUNTRY}'")
|
||||
default_countries = Country.find([('code', '=', DEFAULT_COUNTRY)])
|
||||
if default_countries:
|
||||
return default_countries[0]
|
||||
|
||||
# Get first available country as last resort
|
||||
all_countries = Country.find([])
|
||||
if all_countries:
|
||||
print(f" ⚠ Using first available country: {all_countries[0].name}")
|
||||
return all_countries[0]
|
||||
|
||||
raise ValueError("No countries found in database!")
|
||||
|
||||
def get_subdivision(country, subdivision_code):
|
||||
"""Find country subdivision (state/province) by code"""
|
||||
if not subdivision_code:
|
||||
return None
|
||||
|
||||
Subdivision = Model.get('country.subdivision')
|
||||
|
||||
# Search for subdivision with matching code and country
|
||||
subdivisions = Subdivision.find([
|
||||
('code', '=', f"{country.code}-{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
# Try without country prefix
|
||||
subdivisions = Subdivision.find([
|
||||
('code', 'ilike', f"%{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
print(f" ⚠ Warning: Subdivision '{subdivision_code}' not found for country {country.code}")
|
||||
return None
|
||||
|
||||
def check_party_exists_by_name(name):
|
||||
"""Check if party with given name already exists"""
|
||||
Party = Model.get('party.party')
|
||||
parties = Party.find([('name', '=', name)])
|
||||
return parties[0] if parties else None
|
||||
|
||||
|
||||
|
||||
def create_party_with_addresses(row):
|
||||
"""Create a new party with address(es) using proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Address = Model.get('party.address')
|
||||
|
||||
# Create party - let Tryton auto-generate the code
|
||||
party = Party()
|
||||
party.name = row['name']
|
||||
|
||||
if row.get('tax_identifier'):
|
||||
party.tax_identifier = row['tax_identifier']
|
||||
|
||||
if row.get('vat_code'):
|
||||
party.vat_code = row['vat_code']
|
||||
|
||||
# Save the party FIRST (without addresses)
|
||||
party.save()
|
||||
|
||||
# Check if we have meaningful address data
|
||||
# Require at least street OR city to be present (not empty)
|
||||
has_street = bool(row.get('street'))
|
||||
has_city = bool(row.get('city'))
|
||||
has_postal_code = bool(row.get('postal_code'))
|
||||
has_country = bool(row.get('country_code'))
|
||||
|
||||
# Create address only if we have at least street OR city
|
||||
if has_street or has_city:
|
||||
address = Address()
|
||||
|
||||
# Link to the party we just created
|
||||
address.party = party
|
||||
|
||||
if row.get('address_name'):
|
||||
address.name = row['address_name']
|
||||
|
||||
if has_street:
|
||||
address.street = row['street']
|
||||
|
||||
if has_city:
|
||||
address.city = row['city']
|
||||
|
||||
# Use postal_code instead of zip
|
||||
if has_postal_code:
|
||||
address.postal_code = row['postal_code']
|
||||
|
||||
# Get country
|
||||
if has_country:
|
||||
country_code = row['country_code']
|
||||
country = get_country(country_code)
|
||||
else:
|
||||
country = get_country(DEFAULT_COUNTRY)
|
||||
|
||||
address.country = country
|
||||
|
||||
# Get subdivision (state/province) if provided
|
||||
if row.get('subdivision_code'):
|
||||
subdivision = get_subdivision(country, row['subdivision_code'])
|
||||
if subdivision:
|
||||
address.subdivision = subdivision
|
||||
|
||||
# Save the address separately
|
||||
address.save()
|
||||
|
||||
# Clean up any empty addresses that might have been auto-created
|
||||
# Reload party to get fresh data
|
||||
party = Party(party.id)
|
||||
|
||||
# Find and delete empty addresses
|
||||
addresses_to_delete = []
|
||||
for addr in party.addresses:
|
||||
# Consider an address empty if it has no street, city, or postal_code
|
||||
is_empty = (
|
||||
(not addr.street or not addr.street.strip()) and
|
||||
(not addr.city or not addr.city.strip()) and
|
||||
(not addr.postal_code or not addr.postal_code.strip())
|
||||
)
|
||||
if is_empty:
|
||||
addresses_to_delete.append(addr)
|
||||
|
||||
# Delete empty addresses
|
||||
if addresses_to_delete:
|
||||
Address.delete(addresses_to_delete)
|
||||
print(f" ℹ Cleaned up {len(addresses_to_delete)} empty address(es)")
|
||||
|
||||
# Reload party one more time to return clean data
|
||||
party = Party(party.id)
|
||||
|
||||
return party
|
||||
|
||||
|
||||
def import_parties(csv_file):
|
||||
"""Import parties from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing parties from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
name = row.get('name', '').strip()
|
||||
tax_identifier = row.get('tax_identifier', '').strip()
|
||||
vat_code = row.get('vat_code', '').strip()
|
||||
|
||||
# Address fields
|
||||
address_name = row.get('address_name', '').strip()
|
||||
street = row.get('street', '').strip()
|
||||
city = row.get('city', '').strip()
|
||||
|
||||
# Handle both 'zip' and 'postal_code' column names
|
||||
postal_code = row.get('postal_code', '').strip() or row.get('zip', '').strip()
|
||||
|
||||
country_code = row.get('country_code', '').strip()
|
||||
subdivision_code = row.get('subdivision_code', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
# Skip if postal_code is 'NULL' or '0'
|
||||
if postal_code and postal_code.upper() in ['NULL', '0']:
|
||||
postal_code = ''
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if party already exists in database
|
||||
existing_party = check_party_exists_by_name(name)
|
||||
|
||||
if existing_party:
|
||||
print(f" ⚠ Party '{name}' already exists with code: {existing_party.code}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Create the party with address
|
||||
row_data = {
|
||||
'name': name,
|
||||
'tax_identifier': tax_identifier,
|
||||
'vat_code': vat_code,
|
||||
'address_name': address_name,
|
||||
'street': street,
|
||||
'city': city,
|
||||
'postal_code': postal_code,
|
||||
'country_code': country_code,
|
||||
'subdivision_code': subdivision_code
|
||||
}
|
||||
|
||||
party = create_party_with_addresses(row_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created party")
|
||||
print(f" Party ID: {party.id}")
|
||||
print(f" Auto-generated Code: {party.code}")
|
||||
print(f" Name: {name}")
|
||||
if tax_identifier:
|
||||
print(f" Tax Identifier: {tax_identifier}")
|
||||
if vat_code:
|
||||
print(f" VAT Code: {vat_code}")
|
||||
if party.addresses:
|
||||
print(f" Addresses: {len(party.addresses)}")
|
||||
for addr in party.addresses:
|
||||
addr_street = (addr.street[:50] + '...') if addr.street and len(addr.street) > 50 else (addr.street or 'N/A')
|
||||
addr_city = addr.city if addr.city else 'N/A'
|
||||
addr_postal = addr.postal_code if addr.postal_code else 'N/A'
|
||||
print(f" - {addr_street}")
|
||||
print(f" {addr_city}, {addr_postal}")
|
||||
else:
|
||||
print(f" Addresses: 0 (no address data provided)")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} parties")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} parties")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported parties"""
|
||||
Party = Model.get('party.party')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Parties")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all parties (or limit to recently created ones)
|
||||
parties = Party.find([], order=[('id', 'DESC')])
|
||||
|
||||
if parties:
|
||||
print(f"Found {len(parties)} parties (showing last 20):\n")
|
||||
print(f"{'Code':<15} {'Name':<40} {'Addresses':<10}")
|
||||
print("-" * 70)
|
||||
|
||||
for party in parties[:20]: # Show last 20 created
|
||||
code = party.code or 'N/A'
|
||||
name = party.name[:39] if party.name else 'N/A'
|
||||
addr_count = len(party.addresses) if party.addresses else 0
|
||||
|
||||
print(f"{code:<15} {name:<40} {addr_count:<10}")
|
||||
else:
|
||||
print("No parties found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_countries():
|
||||
"""List all available countries"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE COUNTRIES (first 20)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Country = Model.get('country.country')
|
||||
countries = Country.find([])
|
||||
|
||||
if countries:
|
||||
print(f"Found {len(countries)} countries:\n")
|
||||
for country in countries[:20]: # Show first 20
|
||||
print(f" - {country.code}: {country.name}")
|
||||
if len(countries) > 20:
|
||||
print(f" ... and {len(countries) - 20} more")
|
||||
else:
|
||||
print("No countries found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PARTY IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Party codes will be auto-generated by Tryton")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available countries
|
||||
# Uncomment if you want to see what's available in your database
|
||||
# list_available_countries()
|
||||
|
||||
# Import parties
|
||||
import_parties(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,364 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import config, Model
|
||||
|
||||
from helpers.config import (
|
||||
PURCHASE_FEES_CSV,
|
||||
connect_to_tryton)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
find_party_by_name,
|
||||
find_product_by_code,
|
||||
find_purchase_contract_by_ref,
|
||||
find_contract_line_by_sequence,
|
||||
find_currency_by_code,
|
||||
parse_decimal,
|
||||
find_supplier_category,
|
||||
ensure_party_is_supplier,
|
||||
find_fee_mode_by_name,
|
||||
find_payable_receivable_by_name,
|
||||
get_existing_fees_for_line,
|
||||
fee_already_exists)
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = PURCHASE_FEES_CSV
|
||||
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_SUPPLIER = True # Set to False to skip auto-enabling supplier flag
|
||||
SKIP_NON_SUPPLIERS = False # Set to True to skip parties that aren't suppliers
|
||||
|
||||
|
||||
def import_purchase_contract_fees(csv_file):
|
||||
"""Import purchase contract line fees from CSV"""
|
||||
|
||||
print(f"{'='*70}")
|
||||
print("IMPORTING PURCHASE CONTRACT LINE FEES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get models
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
except Exception as e:
|
||||
print(f"✗ Error: Could not load fee.fee model - {e}")
|
||||
print("Please ensure the model name is correct for your Tryton customization")
|
||||
return
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
current_contract_ref = None
|
||||
current_contract = None
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
for row_num, row in enumerate(reader, start=2): # Start at 2 (header is row 1)
|
||||
try:
|
||||
# Extract data from CSV
|
||||
contract_ref = row.get('contract_ref', '').strip()
|
||||
line_sequence = row.get('line_sequence', '').strip()
|
||||
product_code = row.get('product', '').strip()
|
||||
supplier_name = row.get('supplier', '').strip()
|
||||
currency_code = row.get('currency', '').strip()
|
||||
p_r_value = row.get('p_r', '').strip()
|
||||
mode_name = row.get('mode', '').strip()
|
||||
price_value = row.get('price', '').strip()
|
||||
unit_value = row.get('unit', '').strip()
|
||||
|
||||
print(f"Processing row {row_num}: {contract_ref} - Line {line_sequence} - {product_code}")
|
||||
|
||||
# Validate required fields
|
||||
if not contract_ref:
|
||||
print(f" ✗ Skipping: Missing contract_ref\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not line_sequence:
|
||||
print(f" ✗ Skipping: Missing line_sequence\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not product_code:
|
||||
print(f" ✗ Skipping: Missing product\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache contract and line if same as previous row
|
||||
if contract_ref != current_contract_ref:
|
||||
current_contract = find_purchase_contract_by_ref(contract_ref)
|
||||
current_contract_ref = contract_ref
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
if not current_contract:
|
||||
print(f" ✗ Skipping: Contract not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache line if same as previous row
|
||||
if line_sequence != current_line_sequence:
|
||||
current_line = find_contract_line_by_sequence(current_contract, line_sequence)
|
||||
current_line_sequence = line_sequence
|
||||
|
||||
if not current_line:
|
||||
print(f" ✗ Skipping: Contract line not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Find related records
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
print(f" ✗ Skipping: Product not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
supplier = find_party_by_name(supplier_name)
|
||||
if not supplier:
|
||||
print(f" ✗ Skipping: Supplier not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Ensure party has SUPPLIER category
|
||||
supplier, is_supplier = ensure_party_is_supplier(supplier, auto_enable=AUTO_ENABLE_SUPPLIER)
|
||||
|
||||
if not is_supplier:
|
||||
if SKIP_NON_SUPPLIERS:
|
||||
print(f" ⚠ Skipping purchase - party does not have SUPPLIER category\n")
|
||||
skipped_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
else:
|
||||
error_msg = f"Row {row_num}: Party '{supplier.rec_name}' does not have SUPPLIER category"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
|
||||
currency = find_currency_by_code(currency_code)
|
||||
if not currency:
|
||||
print(f" ✗ Skipping: Currency not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Parse price
|
||||
price = parse_decimal(price_value, 'price')
|
||||
if price is None:
|
||||
print(f" ✗ Skipping: Invalid price\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Determine payable/receivable
|
||||
payable_receivable = find_payable_receivable_by_name(p_r_value)
|
||||
|
||||
# Find fee mode
|
||||
mode = find_fee_mode_by_name(mode_name)
|
||||
|
||||
# Check if fee already exists
|
||||
existing_fees = get_existing_fees_for_line(current_line)
|
||||
if fee_already_exists(existing_fees, product, supplier, price):
|
||||
print(f" ○ Fee already exists for this line\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Create the fee
|
||||
fee = PurchaseLineFee()
|
||||
fee.line = current_line
|
||||
fee.product = product
|
||||
fee.supplier = supplier
|
||||
fee.currency = currency
|
||||
fee.price = price
|
||||
|
||||
# Set type if found and field exists
|
||||
if mode and hasattr(fee, 'type'):
|
||||
fee.type = 'ordered' # Assuming all imported fees are 'ordered'
|
||||
|
||||
# Set weight_type if found and field exists
|
||||
if mode and hasattr(fee, 'weight_type'):
|
||||
fee.weight_type = 'brut'
|
||||
|
||||
# Set p_r (payable or receivable) if found and field exists
|
||||
if mode and hasattr(fee, 'p_r'):
|
||||
fee.p_r = payable_receivable
|
||||
|
||||
# Set mode if found and field exists
|
||||
if mode and hasattr(fee, 'mode'):
|
||||
fee.mode = mode
|
||||
|
||||
# Set unit if field exists
|
||||
if unit_value and hasattr(fee, 'unit'):
|
||||
# Try to find the unit
|
||||
Unit = Model.get('product.uom')
|
||||
units = Unit.find([('symbol', '=', unit_value)])
|
||||
if not units:
|
||||
units = Unit.find([('name', '=', unit_value)])
|
||||
if units:
|
||||
fee.unit = units[0]
|
||||
|
||||
# Save the fee
|
||||
fee.save()
|
||||
|
||||
print(f" ✓ Fee created successfully")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Supplier: {supplier.rec_name}")
|
||||
print(f" Price: {price} {currency.code}")
|
||||
print(f" Type: {payable_receivable}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {contract_ref}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} fees")
|
||||
print(f"Skipped (missing data or already exist): {skipped_count} fees")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported purchase contract fees"""
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Purchase Contract Line Fees")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
|
||||
# Find all fees (or limit to recently created ones)
|
||||
fees = PurchaseLineFee.find([], order=[('id', 'DESC')])
|
||||
|
||||
if fees:
|
||||
print(f"Found {len(fees)} fees (showing last 50):\n")
|
||||
print(f"{'ID':<8} {'Contract':<15} {'Product':<25} {'Supplier':<25} {'Price':<12} {'Type':<12}")
|
||||
print("-" * 105)
|
||||
|
||||
for fee in fees[:50]: # Show last 50 created
|
||||
fee_id = fee.id
|
||||
|
||||
# Get contract reference
|
||||
contract_ref = 'N/A'
|
||||
if hasattr(fee, 'line') and fee.line:
|
||||
line = fee.line
|
||||
if hasattr(line, 'purchase') and line.purchase:
|
||||
contract = line.purchase
|
||||
if hasattr(contract, 'reference') and contract.reference:
|
||||
contract_ref = str(contract.reference)[:14]
|
||||
|
||||
product = fee.product.rec_name[:24] if hasattr(fee, 'product') and fee.product else 'N/A'
|
||||
supplier = fee.supplier.rec_name[:24] if hasattr(fee, 'supplier') and fee.supplier else 'N/A'
|
||||
price = f"{fee.price:.2f}" if hasattr(fee, 'price') and fee.price else 'N/A'
|
||||
|
||||
# Get type (payable/receivable)
|
||||
fee_type = 'N/A'
|
||||
if hasattr(fee, 'type'):
|
||||
fee_type = fee.type
|
||||
elif hasattr(fee, 'payable_receivable'):
|
||||
fee_type = fee.payable_receivable
|
||||
|
||||
print(f"{fee_id:<8} {contract_ref:<15} {product:<25} {supplier:<25} {price:<12} {fee_type:<12}")
|
||||
else:
|
||||
print("No fees found")
|
||||
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error during verification: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def list_purchase_contracts():
|
||||
"""List purchase contracts for debugging"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PURCHASE CONTRACTS (first 20)")
|
||||
print(f"{'='*70}")
|
||||
|
||||
contracts = Purchase.find([], order=[('id', 'DESC')], limit=20)
|
||||
|
||||
if contracts:
|
||||
print(f"{'ID':<8} {'Reference':<20} {'Party':<30} {'State':<12}")
|
||||
print("-" * 70)
|
||||
|
||||
for contract in contracts:
|
||||
contract_id = contract.id
|
||||
reference = contract.reference[:19] if contract.reference else 'N/A'
|
||||
party = contract.party.rec_name[:29] if contract.party else 'N/A'
|
||||
state = contract.state if contract.state else 'N/A'
|
||||
|
||||
print(f"{contract_id:<8} {reference:<20} {party:<30} {state:<12}")
|
||||
|
||||
# Show number of lines
|
||||
if hasattr(contract, 'lines') and contract.lines:
|
||||
print(f" Lines: {len(contract.lines)}")
|
||||
else:
|
||||
print("No purchase contracts found")
|
||||
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PURCHASE CONTRACT FEE IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List purchase contracts for debugging
|
||||
# Uncomment the following line to see available contracts
|
||||
# list_purchase_contracts()
|
||||
|
||||
# Import purchase contract fees
|
||||
import_purchase_contract_fees(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,356 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'SERVICES' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
|
||||
try:
|
||||
# Connect using XML-RPC with credentials in URL
|
||||
#connection_url = f'{SERVER_URL}/{DATABASE_NAME}/'
|
||||
#print(f'{USERNAME}:{PASSWORD}@{connection_url}')
|
||||
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
#config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search by getting all and comparing
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, try to get default 'Unit'
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using '{DEFAULT_UOM}'")
|
||||
default_uoms = Uom.find([('name', '=', DEFAULT_UOM)])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, categories, uom):
|
||||
"""Create a new service product using proteus"""
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.code = row['code']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price_method = 'fixed' # Services use fixed cost price
|
||||
template.default_uom = uom
|
||||
|
||||
# Link to categories (Many2Many relationship)
|
||||
# Use append() instead of direct assignment
|
||||
if isinstance(categories, list):
|
||||
template.categories.extend(categories) # Use extend for lists
|
||||
else:
|
||||
template.categories.append(categories) # Use append for single category
|
||||
|
||||
template.salable = False # Services are not salable products by default
|
||||
template.purchasable = True # Services are purchasable
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
# Save the template first
|
||||
template.save()
|
||||
|
||||
# Now update the product that was auto-created
|
||||
# When a template is created, Tryton automatically creates a default product
|
||||
if template.products:
|
||||
product = template.products[0]
|
||||
#product.code = row['code']
|
||||
product.suffix_code = row['code'] # Use suffix_code to set product code
|
||||
|
||||
# Set cost price on the product
|
||||
product.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
|
||||
product.save()
|
||||
return product
|
||||
else:
|
||||
raise ValueError("No product was created automatically with template")
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip() or '0.00'
|
||||
cost_price = row.get('cost_price', '0.00').strip() or '0.00'
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product")
|
||||
print(f" Product ID: {product.id}, Template ID: {product.template.id}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<30} {'Categories':<25} {'Sale Price':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:29] if product.template.name else 'N/A'
|
||||
|
||||
# Get categories (Many2Many relationship)
|
||||
if product.template.categories:
|
||||
categories = ', '.join([cat.name for cat in product.template.categories])
|
||||
categories = categories[:24]
|
||||
else:
|
||||
categories = 'N/A'
|
||||
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<30} {categories:<25} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
symbol = f"({uom.symbol})" if hasattr(uom, 'symbol') and uom.symbol else ""
|
||||
print(f" - {uom.name} {symbol}")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,310 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# Configuration
|
||||
DATABASE_NAME = 'tradon'
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'Services' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton database"""
|
||||
print(f"Connecting to Tryton database: {DATABASE_NAME}")
|
||||
try:
|
||||
#config.set_trytond(DATABASE_NAME)
|
||||
config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, return Unit (default)
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using 'Unit'")
|
||||
default_uoms = Uom.find([('name', '=', 'Unit')])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, category, uom):
|
||||
"""Create a new service product"""
|
||||
Product = Model.get('product.product')
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template first
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
template.default_uom = uom
|
||||
template.category = category
|
||||
template.salable = True
|
||||
template.purchasable = False # Services typically not purchased
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
template.save()
|
||||
|
||||
# Create product variant
|
||||
product = Product()
|
||||
product.template = template
|
||||
product.code = row['code']
|
||||
product.save()
|
||||
|
||||
return product
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip()
|
||||
cost_price = row.get('cost_price', '0.00').strip()
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product: {name}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<35} {'Category':<20} {'Sale Price':<12}")
|
||||
print("-" * 80)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:34] if product.template.name else 'N/A'
|
||||
category = product.template.category.name if product.template.category else 'N/A'
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<35} {category:<20} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
print(f" - {uom.name} (Symbol: {uom.symbol if hasattr(uom, 'symbol') else 'N/A'})")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT (using Proteus)")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,397 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from proteus import config, Model
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Customer_Stock_Locations.csv'
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database access)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
# Default values
|
||||
DEFAULT_TYPE = 'storage' # Default location type if not specified
|
||||
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_db_connection():
|
||||
"""Get PostgreSQL database connection"""
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
database=DATABASE_NAME,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
return conn
|
||||
except Exception as e:
|
||||
print(f"✗ Database connection failed: {e}")
|
||||
return None
|
||||
|
||||
def update_location_coordinates(location_id, latitude, longitude):
|
||||
"""Update location coordinates directly in PostgreSQL"""
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update coordinates - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Update lat and lon columns
|
||||
update_query = """
|
||||
UPDATE stock_location
|
||||
SET lat = %s, lon = %s
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, (latitude, longitude, location_id))
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating coordinates: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def check_location_exists_by_name(name):
|
||||
"""Check if location with given name already exists"""
|
||||
Location = Model.get('stock.location')
|
||||
locations = Location.find([('name', '=', name)])
|
||||
return locations[0] if locations else None
|
||||
|
||||
def validate_location_type(loc_type):
|
||||
"""Validate location type"""
|
||||
valid_types = [
|
||||
'supplier', 'customer', 'lost_found', 'warehouse',
|
||||
'storage', 'production', 'drop', 'rental', 'view'
|
||||
]
|
||||
|
||||
if not loc_type or loc_type.lower() not in valid_types:
|
||||
print(f" ⚠ Warning: Invalid type '{loc_type}', using default '{DEFAULT_TYPE}'")
|
||||
return DEFAULT_TYPE
|
||||
|
||||
return loc_type.lower()
|
||||
|
||||
def parse_coordinate(value, coord_name):
|
||||
"""Parse and validate coordinate value"""
|
||||
if not value or value == '':
|
||||
return None
|
||||
|
||||
# Handle 'NULL' or similar string values
|
||||
if isinstance(value, str) and value.strip().upper() in ['NULL', 'NONE', 'N/A', '']:
|
||||
return None
|
||||
|
||||
try:
|
||||
coord = float(value)
|
||||
|
||||
# Validate latitude range (-90 to 90)
|
||||
if coord_name == 'latitude':
|
||||
if coord < -90 or coord > 90:
|
||||
print(f" ⚠ Warning: Latitude {coord} out of range (-90 to 90)")
|
||||
return None
|
||||
|
||||
# Validate longitude range (-180 to 180)
|
||||
if coord_name == 'longitude':
|
||||
if coord < -180 or coord > 180:
|
||||
print(f" ⚠ Warning: Longitude {coord} out of range (-180 to 180)")
|
||||
return None
|
||||
|
||||
return coord
|
||||
except (ValueError, TypeError) as e:
|
||||
print(f" ⚠ Warning: Invalid {coord_name} value '{value}' - {e}")
|
||||
return None
|
||||
|
||||
def create_location(row):
|
||||
"""Create a new location using proteus"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Create location
|
||||
location = Location()
|
||||
location.name = row['name']
|
||||
location.type = row['type']
|
||||
|
||||
# Save the location first
|
||||
location.save()
|
||||
|
||||
# Get coordinates and save them
|
||||
latitude = row.get('latitude')
|
||||
longitude = row.get('longitude')
|
||||
|
||||
# Update coordinates directly in database if provided
|
||||
if latitude is not None or longitude is not None:
|
||||
success = update_location_coordinates(location.id, latitude, longitude)
|
||||
if not success:
|
||||
print(f" ⚠ Location created but coordinates not saved")
|
||||
|
||||
return location, latitude, longitude
|
||||
|
||||
def import_locations(csv_file):
|
||||
"""Import locations from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing locations from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values - get directly from CSV columns
|
||||
name = row.get('name', '').strip()
|
||||
loc_type = row.get('type', '').strip() or DEFAULT_TYPE
|
||||
lat_raw = row.get('lat', '').strip()
|
||||
lon_raw = row.get('lon', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
print(f" CSV Raw values - lat: '{lat_raw}', lon: '{lon_raw}'")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if location already exists in database
|
||||
existing_location = check_location_exists_by_name(name)
|
||||
|
||||
if existing_location:
|
||||
print(f" ⚠ Location '{name}' already exists (ID: {existing_location.id})")
|
||||
print(f" Type: {existing_location.type}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Validate location type
|
||||
loc_type = validate_location_type(loc_type)
|
||||
|
||||
# Parse coordinates
|
||||
latitude = parse_coordinate(lat_raw, 'latitude')
|
||||
longitude = parse_coordinate(lon_raw, 'longitude')
|
||||
|
||||
print(f" Parsed values - lat: {latitude}, lon: {longitude}")
|
||||
|
||||
# Create the location with parsed data
|
||||
location_data = {
|
||||
'name': name,
|
||||
'type': loc_type,
|
||||
'latitude': latitude,
|
||||
'longitude': longitude
|
||||
}
|
||||
|
||||
location, saved_lat, saved_lon = create_location(location_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created location")
|
||||
print(f" Location ID: {location.id}")
|
||||
print(f" Name: {name}")
|
||||
print(f" Type: {loc_type}")
|
||||
if saved_lat is not None:
|
||||
print(f" Latitude: {saved_lat}")
|
||||
if saved_lon is not None:
|
||||
print(f" Longitude: {saved_lon}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name if 'name' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} locations")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} locations")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported locations with coordinates from database"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Stock Locations")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get database connection to read coordinates
|
||||
conn = get_db_connection()
|
||||
|
||||
if not conn:
|
||||
print("Cannot verify - database connection failed")
|
||||
return
|
||||
|
||||
# Find all locations (or limit to recently created ones)
|
||||
locations = Location.find([], order=[('id', 'DESC')])
|
||||
|
||||
if locations:
|
||||
print(f"Found {len(locations)} locations (showing last 20):\n")
|
||||
print(f"{'ID':<8} {'Name':<35} {'Type':<12} {'Lat':<12} {'Lon':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for location in locations[:20]: # Show last 20 created
|
||||
loc_id = location.id
|
||||
name = location.name[:34] if location.name else 'N/A'
|
||||
loc_type = location.type if location.type else 'N/A'
|
||||
|
||||
# Get coordinates from database
|
||||
lat = 'N/A'
|
||||
lon = 'N/A'
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT lat, lon FROM stock_location WHERE id = %s",
|
||||
(loc_id,)
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
lat = f"{result[0]:.6f}" if result[0] is not None else 'N/A'
|
||||
lon = f"{result[1]:.6f}" if result[1] is not None else 'N/A'
|
||||
cursor.close()
|
||||
except Exception as e:
|
||||
print(f"Error reading coordinates for location {loc_id}: {e}")
|
||||
|
||||
print(f"{loc_id:<8} {name:<35} {loc_type:<12} {lat:<12} {lon:<12}")
|
||||
|
||||
conn.close()
|
||||
else:
|
||||
print("No locations found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON STOCK LOCATION IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Using Direct PostgreSQL for lat/lon coordinates")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Test database connection
|
||||
print("Testing PostgreSQL connection...")
|
||||
conn = get_db_connection()
|
||||
if conn:
|
||||
print("✓ PostgreSQL connection successful")
|
||||
|
||||
# Test if lat/lon columns exist
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'stock_location'
|
||||
AND column_name IN ('lat', 'lon')
|
||||
""")
|
||||
columns = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
if columns:
|
||||
print("✓ Found lat/lon columns in stock_location table:")
|
||||
for col in columns:
|
||||
print(f" - {col[0]}: {col[1]}")
|
||||
else:
|
||||
print("✗ WARNING: lat/lon columns NOT found in stock_location table!")
|
||||
print(" Coordinates will not be saved!")
|
||||
except Exception as e:
|
||||
print(f" Could not verify columns: {e}")
|
||||
|
||||
conn.close()
|
||||
print()
|
||||
else:
|
||||
print("✗ PostgreSQL connection failed")
|
||||
print("Coordinates will not be saved!\n")
|
||||
return 1
|
||||
|
||||
# Import locations
|
||||
import_locations(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,165 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
|
||||
# Database connection parameters
|
||||
DB_CONFIG = {
|
||||
'host': '72.61.163.139',
|
||||
'port': 5433,
|
||||
'database': 'tradon',
|
||||
'user': 'postgres',
|
||||
'password': 'dsproject'
|
||||
}
|
||||
|
||||
# CSV file path
|
||||
CSV_FILE = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Vessels.csv'
|
||||
|
||||
def import_vessels():
|
||||
"""Import vessel data from CSV into trade_vessel table"""
|
||||
|
||||
print("=" * 60)
|
||||
print("VESSEL IMPORT PROCESS STARTED")
|
||||
print("=" * 60)
|
||||
|
||||
# Initialize connection and cursor objects
|
||||
conn = None
|
||||
cursor = None
|
||||
|
||||
try:
|
||||
# Connect to PostgreSQL database
|
||||
print(f"\n[1/4] Connecting to database...")
|
||||
print(f" Host: {DB_CONFIG['host']}:{DB_CONFIG['port']}")
|
||||
print(f" Database: {DB_CONFIG['database']}")
|
||||
conn = psycopg2.connect(**DB_CONFIG)
|
||||
cursor = conn.cursor()
|
||||
print(" ✓ Database connection established")
|
||||
|
||||
# Read CSV file with UTF-8-BOM encoding to handle Excel-generated CSVs
|
||||
print(f"\n[2/4] Reading CSV file...")
|
||||
print(f" File: {CSV_FILE}")
|
||||
with open(CSV_FILE, 'r', encoding='utf-8-sig') as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
|
||||
# Initialize counters for tracking import results
|
||||
insert_count = 0
|
||||
skip_count = 0
|
||||
|
||||
print(" ✓ CSV file opened successfully")
|
||||
print(f"\n[3/4] Processing vessel records...")
|
||||
print("-" * 60)
|
||||
|
||||
# Process each row from CSV file
|
||||
for row_num, row in enumerate(csv_reader, start=1):
|
||||
# Extract and clean vessel data from CSV row
|
||||
vessel_name = row['vessel_name'].strip()
|
||||
# Convert empty strings to None for vessel_year
|
||||
vessel_year = row['vessel_year'].strip() if row['vessel_year'].strip() else None
|
||||
# Convert empty strings and 'NULL' text to None for vessel_imo
|
||||
vessel_imo = row['vessel_imo'].strip() if row['vessel_imo'].strip() and row['vessel_imo'].upper() != 'NULL' else None
|
||||
|
||||
print(f"\nRow {row_num}: Processing '{vessel_name}'")
|
||||
print(f" Year: {vessel_year if vessel_year else 'N/A'}")
|
||||
print(f" IMO: {vessel_imo if vessel_imo else 'N/A'}")
|
||||
|
||||
# Check if vessel already exists in database to avoid duplicates
|
||||
cursor.execute("""
|
||||
SELECT id FROM trade_vessel
|
||||
WHERE vessel_name = %s AND vessel_imo = %s
|
||||
""", (vessel_name, vessel_imo))
|
||||
|
||||
existing = cursor.fetchone()
|
||||
|
||||
# Skip insertion if vessel already exists
|
||||
if existing:
|
||||
print(f" ⚠ SKIPPED - Duplicate found (ID: {existing[0]})")
|
||||
skip_count += 1
|
||||
continue
|
||||
|
||||
# Insert new vessel record into trade_vessel table
|
||||
cursor.execute("""
|
||||
INSERT INTO trade_vessel
|
||||
(vessel_name, vessel_year, vessel_imo, active, create_date, create_uid, write_date, write_uid)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
""", (
|
||||
vessel_name, # Vessel name from CSV
|
||||
vessel_year, # Year vessel was built
|
||||
vessel_imo, # IMO number (international maritime identifier)
|
||||
True, # Set active flag to True
|
||||
datetime.now(), # Record creation timestamp
|
||||
1, # User ID who created the record
|
||||
datetime.now(), # Record last modification timestamp
|
||||
1 # User ID who last modified the record
|
||||
))
|
||||
|
||||
# Get the ID of the newly inserted record
|
||||
new_id = cursor.fetchone()[0]
|
||||
|
||||
# Increment insert counter and log success
|
||||
insert_count += 1
|
||||
print(f" ✓ INSERTED successfully (New ID: {new_id})")
|
||||
|
||||
print("-" * 60)
|
||||
|
||||
# Commit all inserts to database
|
||||
print(f"\n[4/4] Committing transaction to database...")
|
||||
conn.commit()
|
||||
print(" ✓ Transaction committed successfully")
|
||||
|
||||
# Display import summary statistics
|
||||
print("\n" + "=" * 60)
|
||||
print("IMPORT SUMMARY")
|
||||
print("=" * 60)
|
||||
print(f"✓ Records inserted: {insert_count}")
|
||||
print(f"⚠ Records skipped: {skip_count}")
|
||||
print(f" Total processed: {insert_count + skip_count}")
|
||||
print("=" * 60)
|
||||
|
||||
except psycopg2.Error as e:
|
||||
# Rollback transaction if database error occurs
|
||||
print("\n" + "!" * 60)
|
||||
print("DATABASE ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
except FileNotFoundError:
|
||||
# Handle case where CSV file doesn't exist
|
||||
print("\n" + "!" * 60)
|
||||
print("FILE NOT FOUND ERROR")
|
||||
print("!" * 60)
|
||||
print(f"CSV file not found: {CSV_FILE}")
|
||||
print("Please check the file path and try again.")
|
||||
print("!" * 60)
|
||||
|
||||
except Exception as e:
|
||||
# Catch any other unexpected errors and rollback
|
||||
print("\n" + "!" * 60)
|
||||
print("UNEXPECTED ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
finally:
|
||||
# Clean up database resources
|
||||
print(f"\n[CLEANUP] Closing database connection...")
|
||||
if cursor:
|
||||
cursor.close()
|
||||
print(" ✓ Cursor closed")
|
||||
if conn:
|
||||
conn.close()
|
||||
print(" ✓ Connection closed")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("VESSEL IMPORT PROCESS COMPLETED")
|
||||
print("=" * 60 + "\n")
|
||||
|
||||
# Execute import when script is run directly
|
||||
if __name__ == "__main__":
|
||||
import_vessels()
|
||||
@@ -0,0 +1,239 @@
|
||||
from proteus import config, Model
|
||||
import psycopg2
|
||||
|
||||
# XML-RPC Configuration (default connection method)
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database inspection)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
print("="*80)
|
||||
print("CUSTOM FIELDS IDENTIFICATION FOR purchase.purchase")
|
||||
print("="*80)
|
||||
|
||||
# Connect to Tryton via XML-RPC
|
||||
print(f"\nConnecting via XML-RPC to {SERVER_URL}...")
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully\n")
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Get all fields that Proteus sees
|
||||
proteus_fields = sorted([key for key in dir(Purchase)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find',
|
||||
'copy', 'read', 'write', 'search']])
|
||||
|
||||
print(f"1. FIELDS VISIBLE TO PROTEUS: {len(proteus_fields)} fields")
|
||||
print("-"*80)
|
||||
|
||||
# Standard Tryton purchase.purchase fields (from base module)
|
||||
standard_purchase_fields = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'lines', 'invoices', 'invoices_ignored', 'invoices_recreated',
|
||||
'invoice_lines', 'invoice_lines_ignored', 'moves',
|
||||
'shipment_state', 'invoice_state', 'number', 'reference',
|
||||
'shipments', 'shipment_returns', 'rec_name', 'origin',
|
||||
'untaxed_amount', 'tax_amount', 'total_amount',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache',
|
||||
'delivery_date', 'party_lang', 'contact', 'xml_id'
|
||||
}
|
||||
|
||||
# Identify potential custom fields
|
||||
potential_custom_fields = [f for f in proteus_fields if f not in standard_purchase_fields]
|
||||
|
||||
print(f"\n2. POTENTIAL CUSTOM FIELDS: {len(potential_custom_fields)} fields")
|
||||
print("-"*80)
|
||||
for field in potential_custom_fields:
|
||||
print(f" - {field}")
|
||||
|
||||
# Connect to PostgreSQL to get actual table columns
|
||||
print(f"\n3. COLUMNS IN POSTGRESQL TABLE 'purchase_purchase'")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
dbname=DATABASE_NAME,
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get all columns from purchase_purchase table
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
character_maximum_length,
|
||||
is_nullable,
|
||||
column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'purchase_purchase'
|
||||
ORDER BY ordinal_position;
|
||||
""")
|
||||
|
||||
db_columns = cursor.fetchall()
|
||||
|
||||
print(f"Total columns in database: {len(db_columns)}\n")
|
||||
|
||||
# Standard columns that typically exist in purchase_purchase
|
||||
standard_db_columns = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'number', 'reference', 'delivery_date', 'contact',
|
||||
'shipment_state', 'invoice_state', 'origin',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache'
|
||||
}
|
||||
|
||||
db_column_names = [col[0] for col in db_columns]
|
||||
custom_db_columns = [col for col in db_columns if col[0] not in standard_db_columns]
|
||||
|
||||
print("Custom columns in database:")
|
||||
for col in custom_db_columns:
|
||||
col_name, data_type, max_length, nullable, default = col
|
||||
length_info = f"({max_length})" if max_length else ""
|
||||
print(f" - {col_name:<30} {data_type}{length_info:<15} NULL: {nullable}")
|
||||
|
||||
# Compare: Fields in Proteus vs Columns in DB
|
||||
print(f"\n4. COMPARISON: PROTEUS vs DATABASE")
|
||||
print("-"*80)
|
||||
|
||||
# Fields in Proteus but NOT as direct columns in DB (might be related fields, functions, etc.)
|
||||
proteus_only = set(potential_custom_fields) - set(db_column_names)
|
||||
if proteus_only:
|
||||
print(f"\nFields in Proteus but NOT as columns in DB ({len(proteus_only)}):")
|
||||
print("(These might be Many2One, One2Many, Function fields, etc.)")
|
||||
for field in sorted(proteus_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Columns in DB but NOT visible in Proteus (these are the problem!)
|
||||
db_only = set([col[0] for col in custom_db_columns]) - set(proteus_fields)
|
||||
if db_only:
|
||||
print(f"\n⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS ({len(db_only)}):")
|
||||
print("(These fields MUST be added to the Python model!)")
|
||||
for field in sorted(db_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Fields that exist in BOTH Proteus and DB
|
||||
both = set(potential_custom_fields) & set([col[0] for col in custom_db_columns])
|
||||
if both:
|
||||
print(f"\n✓ Custom fields properly defined in BOTH Proteus and DB ({len(both)}):")
|
||||
for field in sorted(both):
|
||||
print(f" - {field}")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error connecting to PostgreSQL: {e}")
|
||||
|
||||
# Test persistence of custom fields
|
||||
print(f"\n5. TESTING FIELD PERSISTENCE")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
# Find a draft purchase to test
|
||||
drafts = Purchase.find([('state', '=', 'draft')], limit=1)
|
||||
|
||||
if drafts:
|
||||
test_purchase = drafts[0]
|
||||
test_id = test_purchase.id
|
||||
|
||||
print(f"Testing with purchase ID: {test_id}")
|
||||
print("\nTesting custom fields (attempting to set and save):\n")
|
||||
|
||||
# Test a sample of custom fields
|
||||
test_fields = {}
|
||||
|
||||
# Add fields to test if they exist
|
||||
if 'reference' in potential_custom_fields:
|
||||
test_fields['reference'] = 'TEST_REF'
|
||||
if 'crop' in potential_custom_fields:
|
||||
test_fields['crop'] = 'TEST_CROP'
|
||||
if 'forex' in potential_custom_fields:
|
||||
test_fields['forex'] = 'TEST_FOREX'
|
||||
if 'broker' in potential_custom_fields:
|
||||
test_fields['broker'] = 'TEST_BROKER'
|
||||
if 'certif' in potential_custom_fields:
|
||||
test_fields['certif'] = 'TEST_CERT'
|
||||
if 'wb' in potential_custom_fields:
|
||||
test_fields['wb'] = 'TEST_WB'
|
||||
|
||||
for field_name, test_value in test_fields.items():
|
||||
try:
|
||||
original_value = getattr(test_purchase, field_name, None)
|
||||
setattr(test_purchase, field_name, test_value)
|
||||
test_purchase.save()
|
||||
|
||||
# Reload
|
||||
reloaded = Purchase(test_id)
|
||||
new_value = getattr(reloaded, field_name, None)
|
||||
|
||||
if new_value == test_value:
|
||||
print(f" ✓ {field_name}: PERSISTS correctly")
|
||||
# Restore original value
|
||||
setattr(reloaded, field_name, original_value)
|
||||
reloaded.save()
|
||||
else:
|
||||
print(f" ✗ {field_name}: Does NOT persist (expected: '{test_value}', got: '{new_value}')")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ {field_name}: Error - {str(e)[:60]}")
|
||||
else:
|
||||
print("No draft purchases found for testing")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during persistence testing: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY & RECOMMENDATIONS")
|
||||
print("="*80)
|
||||
print("""
|
||||
Next steps for your colleague:
|
||||
|
||||
1. Review the "⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS" section
|
||||
→ These fields exist in PostgreSQL but are missing from the Python model
|
||||
|
||||
2. Review fields that "Does NOT persist" in the testing section
|
||||
→ These fields are visible but not working correctly
|
||||
|
||||
3. Add missing fields to your custom Tryton module:
|
||||
|
||||
File: modules/your_custom_module/purchase.py
|
||||
|
||||
from trytond.pool import PoolMeta
|
||||
from trytond.model import fields
|
||||
|
||||
class Purchase(metaclass=PoolMeta):
|
||||
__name__ = 'purchase.purchase'
|
||||
|
||||
# Add each missing field with appropriate type:
|
||||
custom_field = fields.Char('Custom Field')
|
||||
custom_number = fields.Integer('Custom Number')
|
||||
custom_date = fields.Date('Custom Date')
|
||||
custom_many2one = fields.Many2One('other.model', 'Reference')
|
||||
# etc...
|
||||
|
||||
4. Increment module version in tryton.cfg
|
||||
|
||||
5. Update module: trytond-admin -d tradon -u your_custom_module
|
||||
|
||||
6. Restart Tryton server
|
||||
|
||||
7. Re-run this script to verify all fields work correctly
|
||||
""")
|
||||
@@ -0,0 +1,46 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# Connect via XML-RPC
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
print(f"Connected to Tryton database '{DATABASE_NAME}' successfully!")
|
||||
|
||||
# Get the model using Model.get()
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
try:
|
||||
# Try to get any existing record or create new (without saving)
|
||||
purchases = Purchase.find([], limit=1)
|
||||
if purchases:
|
||||
sample = purchases[0]
|
||||
else:
|
||||
sample = Purchase()
|
||||
|
||||
# Get field names from the instance
|
||||
field_names = sorted([key for key in dir(sample)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find']])
|
||||
|
||||
print(f"\nTotal fields in purchase.purchase: {len(field_names)}")
|
||||
print("\nField list:")
|
||||
for field in field_names:
|
||||
print(f"{field}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Could not inspect fields via instance: {e}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Connection or operation failed: {e}")
|
||||
print("\nPlease verify:")
|
||||
print(f" - Tryton server is running on {SERVER_URL}")
|
||||
print(f" - Database '{DATABASE_NAME}' exists")
|
||||
print(f" - Username and password are correct")
|
||||
@@ -0,0 +1,35 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# Connect
|
||||
config.set_xmlrpc(f'https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Test: Set number on draft purchase
|
||||
print("=== Testing Number Field Persistence ===")
|
||||
draft = Purchase(682) # The ID from your previous test
|
||||
|
||||
print(f"Before: number = {draft.number}, state = {draft.state}")
|
||||
|
||||
# Set number
|
||||
draft.number = "MANUAL_TEST_001"
|
||||
draft.save()
|
||||
print(f"After save: number = {draft.number}")
|
||||
|
||||
# Reload by fetching again from database
|
||||
draft_reloaded = Purchase(682)
|
||||
print(f"After reload: number = {draft_reloaded.number}")
|
||||
|
||||
if draft_reloaded.number == "MANUAL_TEST_001":
|
||||
print("✓ SUCCESS: Number WAS persisted via Proteus!")
|
||||
else:
|
||||
print(f"✗ FAILED: Number NOT persisted. Got: {draft_reloaded.number}")
|
||||
print("\nThis means the 'number' field is likely:")
|
||||
print(" 1. Read-only (controlled by Tryton workflow)")
|
||||
print(" 2. Auto-generated by a sequence")
|
||||
print(" 3. Overwritten by server-side logic")
|
||||
|
||||
# Now verify in PostgreSQL
|
||||
print("\n=== Verify in PostgreSQL ===")
|
||||
print("Run this SQL query to confirm:")
|
||||
print("SELECT id, number, state FROM purchase_purchase WHERE id = 682;")
|
||||
@@ -0,0 +1,44 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2776)
|
||||
|
||||
# Création de la commande d'achat
|
||||
purchase = Purchase()
|
||||
purchase.company = company
|
||||
purchase.party = party
|
||||
purchase.currency = company.currency
|
||||
purchase.tol_min = Decimal(1)
|
||||
purchase.wb = Wb(1)
|
||||
# Ligne d'achat
|
||||
product = Product(12) # id du produit
|
||||
line = purchase.lines.new()
|
||||
line.product = product
|
||||
line.quantity = 10
|
||||
line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
purchase.save()
|
||||
|
||||
print(f"Purchase créée : {purchase.id}")
|
||||
@@ -0,0 +1,45 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
sale = Model.get('sale.sale')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2789)
|
||||
fromLocation = Location(1247)
|
||||
|
||||
# Création de la commande de vente
|
||||
sale = sale()
|
||||
sale.company = company
|
||||
sale.party = party
|
||||
sale.currency = company.currency
|
||||
sale.tol_min = Decimal(1)
|
||||
sale.wb = Wb(1)
|
||||
sale.from_location = fromLocation
|
||||
# Ligne d'achat
|
||||
#product = Product(12) # id du produit
|
||||
# line = sale.lines.new()
|
||||
# line.product = product
|
||||
# line.quantity = 10
|
||||
# line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
sale.save()
|
||||
|
||||
print(f"sale créée : {sale.id}")
|
||||
@@ -0,0 +1,11 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
# Debug: Print what's available in config
|
||||
import helpers.config as cfg
|
||||
print("Available in config:", dir(cfg))
|
||||
print("PURCHASE_FEES_CSV value:", getattr(cfg, 'PURCHASE_FEES_CSV', 'NOT FOUND'))
|
||||
@@ -0,0 +1,398 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Parties.csv'
|
||||
|
||||
# Default values
|
||||
DEFAULT_COUNTRY = 'US' # Default country code if not specified
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_country(country_code):
|
||||
"""Find country by code"""
|
||||
Country = Model.get('country.country')
|
||||
|
||||
if not country_code:
|
||||
country_code = DEFAULT_COUNTRY
|
||||
|
||||
countries = Country.find([('code', '=', country_code.upper())])
|
||||
|
||||
if countries:
|
||||
return countries[0]
|
||||
else:
|
||||
print(f" ⚠ Warning: Country '{country_code}' not found, using '{DEFAULT_COUNTRY}'")
|
||||
default_countries = Country.find([('code', '=', DEFAULT_COUNTRY)])
|
||||
if default_countries:
|
||||
return default_countries[0]
|
||||
|
||||
# Get first available country as last resort
|
||||
all_countries = Country.find([])
|
||||
if all_countries:
|
||||
print(f" ⚠ Using first available country: {all_countries[0].name}")
|
||||
return all_countries[0]
|
||||
|
||||
raise ValueError("No countries found in database!")
|
||||
|
||||
def get_subdivision(country, subdivision_code):
|
||||
"""Find country subdivision (state/province) by code"""
|
||||
if not subdivision_code:
|
||||
return None
|
||||
|
||||
Subdivision = Model.get('country.subdivision')
|
||||
|
||||
# Search for subdivision with matching code and country
|
||||
subdivisions = Subdivision.find([
|
||||
('code', '=', f"{country.code}-{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
# Try without country prefix
|
||||
subdivisions = Subdivision.find([
|
||||
('code', 'ilike', f"%{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
print(f" ⚠ Warning: Subdivision '{subdivision_code}' not found for country {country.code}")
|
||||
return None
|
||||
|
||||
def check_party_exists_by_name(name):
|
||||
"""Check if party with given name already exists"""
|
||||
Party = Model.get('party.party')
|
||||
parties = Party.find([('name', '=', name)])
|
||||
return parties[0] if parties else None
|
||||
|
||||
|
||||
|
||||
def create_party_with_addresses(row):
|
||||
"""Create a new party with address(es) using proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Address = Model.get('party.address')
|
||||
|
||||
# Create party - let Tryton auto-generate the code
|
||||
party = Party()
|
||||
party.name = row['name']
|
||||
|
||||
if row.get('tax_identifier'):
|
||||
party.tax_identifier = row['tax_identifier']
|
||||
|
||||
if row.get('vat_code'):
|
||||
party.vat_code = row['vat_code']
|
||||
|
||||
# Save the party FIRST (without addresses)
|
||||
party.save()
|
||||
|
||||
# Check if we have meaningful address data
|
||||
# Require at least street OR city to be present (not empty)
|
||||
has_street = bool(row.get('street'))
|
||||
has_city = bool(row.get('city'))
|
||||
has_postal_code = bool(row.get('postal_code'))
|
||||
has_country = bool(row.get('country_code'))
|
||||
|
||||
# Create address only if we have at least street OR city
|
||||
if has_street or has_city:
|
||||
address = Address()
|
||||
|
||||
# Link to the party we just created
|
||||
address.party = party
|
||||
|
||||
if row.get('address_name'):
|
||||
address.name = row['address_name']
|
||||
|
||||
if has_street:
|
||||
address.street = row['street']
|
||||
|
||||
if has_city:
|
||||
address.city = row['city']
|
||||
|
||||
# Use postal_code instead of zip
|
||||
if has_postal_code:
|
||||
address.postal_code = row['postal_code']
|
||||
|
||||
# Get country
|
||||
if has_country:
|
||||
country_code = row['country_code']
|
||||
country = get_country(country_code)
|
||||
else:
|
||||
country = get_country(DEFAULT_COUNTRY)
|
||||
|
||||
address.country = country
|
||||
|
||||
# Get subdivision (state/province) if provided
|
||||
if row.get('subdivision_code'):
|
||||
subdivision = get_subdivision(country, row['subdivision_code'])
|
||||
if subdivision:
|
||||
address.subdivision = subdivision
|
||||
|
||||
# Save the address separately
|
||||
address.save()
|
||||
|
||||
# Clean up any empty addresses that might have been auto-created
|
||||
# Reload party to get fresh data
|
||||
party = Party(party.id)
|
||||
|
||||
# Find and delete empty addresses
|
||||
addresses_to_delete = []
|
||||
for addr in party.addresses:
|
||||
# Consider an address empty if it has no street, city, or postal_code
|
||||
is_empty = (
|
||||
(not addr.street or not addr.street.strip()) and
|
||||
(not addr.city or not addr.city.strip()) and
|
||||
(not addr.postal_code or not addr.postal_code.strip())
|
||||
)
|
||||
if is_empty:
|
||||
addresses_to_delete.append(addr)
|
||||
|
||||
# Delete empty addresses
|
||||
if addresses_to_delete:
|
||||
Address.delete(addresses_to_delete)
|
||||
print(f" ℹ Cleaned up {len(addresses_to_delete)} empty address(es)")
|
||||
|
||||
# Reload party one more time to return clean data
|
||||
party = Party(party.id)
|
||||
|
||||
return party
|
||||
|
||||
|
||||
def import_parties(csv_file):
|
||||
"""Import parties from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing parties from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
name = row.get('name', '').strip()
|
||||
tax_identifier = row.get('tax_identifier', '').strip()
|
||||
vat_code = row.get('vat_code', '').strip()
|
||||
|
||||
# Address fields
|
||||
address_name = row.get('address_name', '').strip()
|
||||
street = row.get('street', '').strip()
|
||||
city = row.get('city', '').strip()
|
||||
|
||||
# Handle both 'zip' and 'postal_code' column names
|
||||
postal_code = row.get('postal_code', '').strip() or row.get('zip', '').strip()
|
||||
|
||||
country_code = row.get('country_code', '').strip()
|
||||
subdivision_code = row.get('subdivision_code', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
# Skip if postal_code is 'NULL' or '0'
|
||||
if postal_code and postal_code.upper() in ['NULL', '0']:
|
||||
postal_code = ''
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if party already exists in database
|
||||
existing_party = check_party_exists_by_name(name)
|
||||
|
||||
if existing_party:
|
||||
print(f" ⚠ Party '{name}' already exists with code: {existing_party.code}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Create the party with address
|
||||
row_data = {
|
||||
'name': name,
|
||||
'tax_identifier': tax_identifier,
|
||||
'vat_code': vat_code,
|
||||
'address_name': address_name,
|
||||
'street': street,
|
||||
'city': city,
|
||||
'postal_code': postal_code,
|
||||
'country_code': country_code,
|
||||
'subdivision_code': subdivision_code
|
||||
}
|
||||
|
||||
party = create_party_with_addresses(row_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created party")
|
||||
print(f" Party ID: {party.id}")
|
||||
print(f" Auto-generated Code: {party.code}")
|
||||
print(f" Name: {name}")
|
||||
if tax_identifier:
|
||||
print(f" Tax Identifier: {tax_identifier}")
|
||||
if vat_code:
|
||||
print(f" VAT Code: {vat_code}")
|
||||
if party.addresses:
|
||||
print(f" Addresses: {len(party.addresses)}")
|
||||
for addr in party.addresses:
|
||||
addr_street = (addr.street[:50] + '...') if addr.street and len(addr.street) > 50 else (addr.street or 'N/A')
|
||||
addr_city = addr.city if addr.city else 'N/A'
|
||||
addr_postal = addr.postal_code if addr.postal_code else 'N/A'
|
||||
print(f" - {addr_street}")
|
||||
print(f" {addr_city}, {addr_postal}")
|
||||
else:
|
||||
print(f" Addresses: 0 (no address data provided)")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} parties")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} parties")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported parties"""
|
||||
Party = Model.get('party.party')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Parties")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all parties (or limit to recently created ones)
|
||||
parties = Party.find([], order=[('id', 'DESC')])
|
||||
|
||||
if parties:
|
||||
print(f"Found {len(parties)} parties (showing last 20):\n")
|
||||
print(f"{'Code':<15} {'Name':<40} {'Addresses':<10}")
|
||||
print("-" * 70)
|
||||
|
||||
for party in parties[:20]: # Show last 20 created
|
||||
code = party.code or 'N/A'
|
||||
name = party.name[:39] if party.name else 'N/A'
|
||||
addr_count = len(party.addresses) if party.addresses else 0
|
||||
|
||||
print(f"{code:<15} {name:<40} {addr_count:<10}")
|
||||
else:
|
||||
print("No parties found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_countries():
|
||||
"""List all available countries"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE COUNTRIES (first 20)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Country = Model.get('country.country')
|
||||
countries = Country.find([])
|
||||
|
||||
if countries:
|
||||
print(f"Found {len(countries)} countries:\n")
|
||||
for country in countries[:20]: # Show first 20
|
||||
print(f" - {country.code}: {country.name}")
|
||||
if len(countries) > 20:
|
||||
print(f" ... and {len(countries) - 20} more")
|
||||
else:
|
||||
print("No countries found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PARTY IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Party codes will be auto-generated by Tryton")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available countries
|
||||
# Uncomment if you want to see what's available in your database
|
||||
# list_available_countries()
|
||||
|
||||
# Import parties
|
||||
import_parties(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,807 @@
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import psycopg2
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import Model
|
||||
|
||||
from helpers.config import (
|
||||
PURCHASE_CONTRACTS_CSV,
|
||||
connect_to_tryton,
|
||||
get_db_connection,
|
||||
DB_CONFIG # Add this to your config
|
||||
)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
parse_decimal,
|
||||
parse_date,
|
||||
ensure_party_is_supplier,
|
||||
find_party_by_name,
|
||||
find_uom_by_code,
|
||||
find_currency_by_code,
|
||||
find_warehouse,
|
||||
find_location,
|
||||
find_payment_term_by_name,
|
||||
find_product_by_code,
|
||||
find_incoterm_by_code,
|
||||
find_weight_basis_by_name,
|
||||
get_party_invoice_address,
|
||||
find_purchase_contract_by_number
|
||||
)
|
||||
|
||||
# Import migration mapping helper
|
||||
from helpers.migration_mapping import MigrationMapper
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = PURCHASE_CONTRACTS_CSV
|
||||
|
||||
|
||||
# Default values
|
||||
DEFAULT_STATE = 'draft'
|
||||
DEFAULT_INVOICE_METHOD = 'manual'
|
||||
DEFAULT_INVOICE_STATE = 'none'
|
||||
DEFAULT_SHIPMENT_STATE = 'none'
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_SUPPLIERS = True # Set to False to skip auto-enabling supplier flag
|
||||
SKIP_NON_SUPPLIERS = False # Set to True to skip parties that aren't suppliers
|
||||
|
||||
|
||||
def update_purchase_custom_fields(purchase_id, custom_data):
|
||||
"""Update custom fields in purchase using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# First, check what columns exist in purchase_purchase table
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'purchase_purchase'
|
||||
AND column_name IN ('number')
|
||||
ORDER BY column_name
|
||||
""")
|
||||
existing_columns = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
print(f" Available custom columns in purchase_purchase:")
|
||||
for col_name, col_type in existing_columns.items():
|
||||
print(f" - {col_name} ({col_type})")
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if 'number' in existing_columns and custom_data.get('number'):
|
||||
set_clauses.append("number = %s")
|
||||
values.append(custom_data['number'])
|
||||
print(f" Adding number = {custom_data['number']}")
|
||||
|
||||
if set_clauses:
|
||||
values.append(purchase_id)
|
||||
update_query = f"""
|
||||
UPDATE purchase_purchase
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
print(f" Executing UPDATE with fields: {', '.join([c.split('=')[0].strip() for c in set_clauses])}")
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if rows_affected > 0:
|
||||
print(f" ✓ {rows_affected} row(s) updated successfully")
|
||||
else:
|
||||
print(f" ⚠ No rows updated (purchase_id={purchase_id} not found?)")
|
||||
else:
|
||||
print(f" No custom fields to update (either no data provided or columns not found)")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def update_line_custom_fields(line_id, custom_data):
|
||||
"""Update custom fields in purchase line using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update line custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if custom_data.get('from_del'):
|
||||
set_clauses.append("from_del = %s")
|
||||
values.append(custom_data['from_del'])
|
||||
|
||||
if custom_data.get('to_del'):
|
||||
set_clauses.append("to_del = %s")
|
||||
values.append(custom_data['to_del'])
|
||||
|
||||
if set_clauses:
|
||||
values.append(line_id)
|
||||
update_query = f"""
|
||||
UPDATE purchase_line
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating line custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_pricing_estimated(line_id, pricing_data):
|
||||
"""Create pricing_estimated record using direct SQL"""
|
||||
if not pricing_data or not pricing_data.get('trigger'):
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not create pricing estimate - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if pricing_estimated table exists and what its structure is
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'pricing_estimated'
|
||||
ORDER BY ordinal_position
|
||||
""")
|
||||
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
if not columns:
|
||||
print(f" Info: pricing_estimated table does not exist, skipping pricing estimate")
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Insert into pricing_estimated table
|
||||
insert_query = """
|
||||
INSERT INTO pricing_estimated (line, trigger, estimated_date, create_date, write_date, create_uid, write_uid)
|
||||
VALUES (%s, %s, %s, NOW(), NOW(), 1, 1)
|
||||
"""
|
||||
|
||||
cursor.execute(insert_query, (
|
||||
line_id,
|
||||
pricing_data['trigger'],
|
||||
pricing_data.get('estimated_date')
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print(f" ✓ Pricing estimate created successfully")
|
||||
return True
|
||||
|
||||
except psycopg2.errors.ForeignKeyViolation as e:
|
||||
# This is expected if pricing_estimated references purchase_line only
|
||||
print(f" Info: Pricing estimate skipped (table references purchase_line only, not sale_line)")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return True # Return True to continue processing
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error creating pricing estimate: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def import_purchases(csv_file):
|
||||
"""Import purchases from CSV file with migration mapping tracking"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
PurchaseLine = Model.get('purchase.line')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"IMPORTING PURCHASES FROM CSV")
|
||||
print(f"{'='*70}\n")
|
||||
print(f"Reading from: {csv_file}\n")
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Get company (assuming single company or default)
|
||||
Company = Model.get('company.company')
|
||||
companies = Company.find([])
|
||||
if not companies:
|
||||
print("✗ Error: No company found in the system")
|
||||
return
|
||||
company = companies[0]
|
||||
print(f"Using company: {company.rec_name}\n")
|
||||
|
||||
# Collect all mappings for batch insert at the end
|
||||
purchase_mappings = []
|
||||
line_mappings = []
|
||||
|
||||
try:
|
||||
# Initialize migration mapper
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
row_num = 0
|
||||
|
||||
for row in reader:
|
||||
row_num += 1
|
||||
|
||||
try:
|
||||
# Extract fields from CSV
|
||||
number = row.get('number', '').strip()
|
||||
reference = row.get('reference', '').strip()
|
||||
source_purchase_id = row.get('source_id', '').strip() # Source system ID
|
||||
|
||||
if not number:
|
||||
print(f"Row {row_num}: Skipping - no number\n")
|
||||
continue
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Row {row_num}: Processing purchase {number}")
|
||||
print(f"{'='*70}")
|
||||
|
||||
# CHECK IF ALREADY IMPORTED using migration mapper
|
||||
if source_purchase_id:
|
||||
existing_tryton_id = mapper.get_tryton_id('purchase_contract', source_purchase_id)
|
||||
if existing_tryton_id:
|
||||
print(f" ⏭ Purchase already imported (Source ID: {source_purchase_id} -> Tryton ID: {existing_tryton_id})")
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Alternative: Check by number using existing helper
|
||||
existing_purchase = find_purchase_contract_by_number(number)
|
||||
if existing_purchase:
|
||||
print(f" ⏭ Purchase {number} already exists (ID: {existing_purchase.id})")
|
||||
# Save mapping even if it already exists (for reconciliation)
|
||||
if source_purchase_id:
|
||||
purchase_mappings.append({
|
||||
'object_type': 'purchase_contract',
|
||||
'source_id': source_purchase_id,
|
||||
'tryton_model': 'purchase.purchase',
|
||||
'tryton_id': existing_purchase.id,
|
||||
'recon_key': number
|
||||
})
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Parse other fields
|
||||
purchase_date = parse_date(row.get('purchase_date'))
|
||||
party_name = row.get('party_name', '').strip()
|
||||
|
||||
# Find related records
|
||||
party = find_party_by_name(party_name)
|
||||
if not party:
|
||||
raise ValueError(f"Party not found: {party_name}")
|
||||
|
||||
# Check party is supplier
|
||||
if not ensure_party_is_supplier(party, auto_enable=AUTO_ENABLE_SUPPLIERS):
|
||||
if SKIP_NON_SUPPLIERS:
|
||||
print(f" ⏭ Skipping - party {party_name} is not a supplier\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f"Party {party_name} is not a supplier")
|
||||
|
||||
# Reload party after category addition to get fresh data
|
||||
Party = Model.get('party.party')
|
||||
party = Party(party.id)
|
||||
|
||||
# Find invoice address
|
||||
invoice_address = get_party_invoice_address(party)
|
||||
if not invoice_address:
|
||||
raise ValueError(f"No invoice address found for party {party_name}")
|
||||
|
||||
# Parse additional fields
|
||||
currency = find_currency_by_code(row.get('currency', 'USD'))
|
||||
warehouse = find_warehouse(row.get('warehouse'))
|
||||
payment_term = find_payment_term_by_name(row.get('payment_term'))
|
||||
weight_basis_abbr = find_weight_basis_by_name(row.get('weight_basis'))
|
||||
tol_min = parse_decimal(row.get('tol_min', ''), 'tol_min')
|
||||
tol_max = parse_decimal(row.get('tol_max', ''), 'tol_max')
|
||||
|
||||
from_location_name = row.get('from_location_name', '').strip()
|
||||
from_location = find_location(from_location_name)
|
||||
|
||||
to_location_name = row.get('to_location_name', '').strip()
|
||||
to_location = find_location(to_location_name)
|
||||
|
||||
incoterm_code = row.get('incoterm_name', '').strip()
|
||||
incoterm = find_incoterm_by_code(incoterm_code, 2025)
|
||||
|
||||
description = row.get('description', '').strip()
|
||||
comment = row.get('comment', '').strip()
|
||||
|
||||
# CREATE PURCHASE
|
||||
print(f" Creating purchase...")
|
||||
purchase= Purchase()
|
||||
purchase.company = company
|
||||
purchase.reference = reference
|
||||
purchase.party = party
|
||||
purchase.invoice_address = invoice_address
|
||||
|
||||
purchase.purchase_date = purchase_date
|
||||
purchase.currency = currency
|
||||
if warehouse:
|
||||
purchase.warehouse = warehouse
|
||||
purchase.payment_term = payment_term
|
||||
purchase.wb = weight_basis_abbr
|
||||
purchase.tol_min = tol_min
|
||||
purchase.tol_max = tol_max
|
||||
purchase.incoterm = incoterm
|
||||
purchase.from_location = from_location
|
||||
purchase.to_location = to_location
|
||||
purchase.description = description
|
||||
purchase.comment = comment
|
||||
purchase.state = DEFAULT_STATE
|
||||
purchase.invoice_method = DEFAULT_INVOICE_METHOD
|
||||
|
||||
# Save the purchase
|
||||
purchase.save()
|
||||
print(f" ✓ Purchase created (ID: {purchase.id})")
|
||||
|
||||
# Update custom fields (like 'number')
|
||||
custom_fields = {'number': number}
|
||||
update_purchase_custom_fields(purchase.id, custom_fields)
|
||||
|
||||
# SAVE MIGRATION MAPPING for purchase
|
||||
if source_purchase_id:
|
||||
purchase_mappings.append({
|
||||
'object_type': 'purchase_contract',
|
||||
'source_id': source_purchase_id,
|
||||
'tryton_model': 'purchase.purchase',
|
||||
'tryton_id': purchase.id,
|
||||
'recon_key': number
|
||||
})
|
||||
print(f" 📝 Mapping queued: Source {source_purchase_id} -> Tryton {purchase.id}")
|
||||
|
||||
# Process purchase lines
|
||||
line_type = row.get('line_type', 'line').strip()
|
||||
source_line_id = row.get('source_line_id', '').strip()
|
||||
|
||||
if line_type == 'line':
|
||||
# Regular product line
|
||||
product_code = row.get('line_product_code', '').strip()
|
||||
quantity = parse_decimal(row.get('line_quantity', ''), 'quantity')
|
||||
unit_price = parse_decimal(row.get('line_price', ''), 'unit_price')
|
||||
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
raise ValueError(f"Product not found: {product_code}")
|
||||
|
||||
unit = find_uom_by_code(row.get('line_unit_code', ''))
|
||||
|
||||
# Parse shipping dates
|
||||
from_del = parse_date(row.get('line_from_del', ''))
|
||||
to_del = parse_date(row.get('line_to_del', ''))
|
||||
|
||||
# Create line
|
||||
line = PurchaseLine()
|
||||
line.purchase = purchase
|
||||
line.type = 'line'
|
||||
sequence = 1 # Default sequence, can be enhanced to handle multiple lines
|
||||
line.sequence = sequence
|
||||
line.product = product
|
||||
line.quantity = quantity
|
||||
line.unit = unit if unit else product.purchase_uom
|
||||
line.unit_price = unit_price
|
||||
line.from_del = from_del
|
||||
line.to_del = to_del
|
||||
|
||||
# Optional fields
|
||||
description = row.get('description', '').strip()
|
||||
if description:
|
||||
line.description = description
|
||||
|
||||
line.save()
|
||||
|
||||
# # Update line custom fields
|
||||
# line_custom = {}
|
||||
# from_del = row.get('from_del', '').strip()
|
||||
# to_del = row.get('to_del', '').strip()
|
||||
# if from_del:
|
||||
# line_custom['from_del'] = from_del
|
||||
# if to_del:
|
||||
# line_custom['to_del'] = to_del
|
||||
|
||||
# if line_custom:
|
||||
# update_line_custom_fields(line.id, line_custom)
|
||||
|
||||
# Create pricing estimate if applicable
|
||||
pricing_trigger = row.get('pricing_trigger', '').strip()
|
||||
pricing_estimated_date = parse_date(row.get('pricing_estimated_date', ''))
|
||||
if pricing_trigger:
|
||||
pricing_data = {
|
||||
'trigger': pricing_trigger,
|
||||
'estimated_date': pricing_estimated_date
|
||||
}
|
||||
create_pricing_estimated(line.id, pricing_data)
|
||||
|
||||
# SAVE MIGRATION MAPPING for line
|
||||
if source_line_id:
|
||||
line_mappings.append({
|
||||
'object_type': 'purchase_line',
|
||||
'source_id': source_line_id,
|
||||
'tryton_model': 'purchase.line',
|
||||
'tryton_id': line.id,
|
||||
'recon_key': f"{number}-Line {sequence}-{product_code}"
|
||||
})
|
||||
|
||||
print(f" ✓ Added line (ID: {line.id})")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Quantity: {quantity}")
|
||||
|
||||
else:
|
||||
# Comment, subtitle, or other line types
|
||||
line = PurchaseLine()
|
||||
line.purchase = purchase
|
||||
line.type = line_type
|
||||
line.description = row.get('description', '').strip()
|
||||
line.save()
|
||||
|
||||
print(f" ✓ Added {line_type} line (ID: {line.id})")
|
||||
|
||||
imported_count += 1
|
||||
print(f"✓ Successfully imported purchase {number}\n")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {number if 'number' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# BATCH SAVE ALL MAPPINGS at the end
|
||||
print(f"\n{'='*70}")
|
||||
print("SAVING MIGRATION MAPPINGS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
if purchase_mappings:
|
||||
print(f"Saving {len(purchase_mappings)} purchase mappings...")
|
||||
mapper.save_mappings_batch(purchase_mappings)
|
||||
print(f"✓ Purchase mappings saved\n")
|
||||
|
||||
if line_mappings:
|
||||
print(f"Saving {len(line_mappings)} line mappings...")
|
||||
mapper.save_mappings_batch(line_mappings)
|
||||
print(f"✓ Line mappings saved\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} purchases")
|
||||
print(f"Skipped (already exist): {skipped_count} purchases")
|
||||
print(f"Errors: {error_count}")
|
||||
print(f"Migration mappings saved: {len(purchase_mappings)} purchases, {len(line_mappings)} lines")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported purchases and their migration mappings"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Purchases and Migration Mappings")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all purchases (or limit to recently created ones)
|
||||
purchases = Purchase.find([], order=[('id', 'DESC')])
|
||||
|
||||
if purchases:
|
||||
print(f"Found {len(purchases)} purchases (showing last 10):\n")
|
||||
print(f"{'ID':<8} {'Number':<15} {'Reference':<15} {'Party':<25} {'State':<12} {'Source ID':<15}")
|
||||
print("-" * 100)
|
||||
|
||||
# Initialize mapper to look up source IDs
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
for purchase in purchases[:10]: # Show last 10 created
|
||||
purchase_id = purchase.id
|
||||
|
||||
# Get number from database since it's a custom field
|
||||
conn = get_db_connection()
|
||||
number = 'N/A'
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT number FROM purchase_purchase WHERE id = %s", (purchase_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
number = str(result[0])[:14]
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
# Look up source ID from migration mapping
|
||||
source_id = 'N/A'
|
||||
try:
|
||||
cursor = mapper.connection.cursor()
|
||||
cursor.execute("""
|
||||
SELECT source_id[1]
|
||||
FROM public.os_migration_mapping
|
||||
WHERE tryton_id = %s
|
||||
AND 'purchase_contract' = ANY(object_type)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""", (purchase_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
source_id = str(result[0])[:14]
|
||||
cursor.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
reference = purchase.reference[:14] if purchase.reference else 'N/A'
|
||||
party = purchase.party.rec_name[:24] if purchase.party else 'N/A'
|
||||
state = purchase.state if purchase.state else 'N/A'
|
||||
|
||||
print(f"{purchase_id:<8} {number:<15} {reference:<15} {party:<25} {state:<12} {source_id:<15}")
|
||||
|
||||
# Show lines
|
||||
if purchase.lines:
|
||||
print(f" Lines: {len(purchase.lines)}")
|
||||
for line in purchase.lines[:3]: # Show first 3 lines
|
||||
if line.type == 'line' and line.product:
|
||||
print(f" - {line.product.rec_name[:40]} | Qty: {line.quantity} | Price: {line.unit_price}")
|
||||
else:
|
||||
print(f" - [{line.type}] {(line.description or '')[:40]}")
|
||||
else:
|
||||
print("No purchases found")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def check_mapping_stats():
|
||||
"""Display statistics about migration mappings"""
|
||||
print(f"\n{'='*70}")
|
||||
print("MIGRATION MAPPING STATISTICS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
cursor = mapper.connection.cursor()
|
||||
|
||||
# Count mappings by object type
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
object_type[1] as obj_type,
|
||||
COUNT(*) as count,
|
||||
MIN(write_date) as first_import,
|
||||
MAX(write_date) as last_import
|
||||
FROM public.os_migration_mapping
|
||||
GROUP BY object_type[1]
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
results = cursor.fetchall()
|
||||
|
||||
if results:
|
||||
print(f"{'Object Type':<25} {'Count':<10} {'First Import':<15} {'Last Import':<15}")
|
||||
print("-" * 70)
|
||||
for row in results:
|
||||
obj_type = row[0] or 'N/A'
|
||||
count = row[1]
|
||||
first = row[2].strftime('%Y-%m-%d') if row[2] else 'N/A'
|
||||
last = row[3].strftime('%Y-%m-%d') if row[3] else 'N/A'
|
||||
print(f"{obj_type:<25} {count:<10} {first:<15} {last:<15}")
|
||||
|
||||
# Total count
|
||||
cursor.execute("SELECT COUNT(*) FROM public.os_migration_mapping")
|
||||
total = cursor.fetchone()[0]
|
||||
print(f"\nTotal mappings: {total}")
|
||||
else:
|
||||
print("No migration mappings found")
|
||||
|
||||
cursor.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error retrieving mapping statistics: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def prepare_parties_as_supplier(csv_file):
|
||||
"""Pre-process: Add SUPPLIER category to all parties in CSV using Proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Category = Model.get('party.category')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARING PARTIES AS SUPPLIERS (via Categories)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find SUPPLIER category
|
||||
supplier_categories = Category.find([('name', '=', 'SUPPLIER')])
|
||||
if not supplier_categories:
|
||||
# Try case-insensitive
|
||||
all_categories = Category.find([])
|
||||
for cat in all_categories:
|
||||
if cat.name.upper() == 'SUPPLIER':
|
||||
supplier_categories = [cat]
|
||||
break
|
||||
|
||||
if not supplier_categories:
|
||||
print(f"✗ SUPPLIER category not found in the system!")
|
||||
print(f"Please create a party category named 'SUPPLIER' first.\n")
|
||||
return False
|
||||
|
||||
supplier_category = supplier_categories[0]
|
||||
print(f"Found SUPPLIER category (ID: {supplier_category.id})\n")
|
||||
|
||||
party_names = set()
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
for row in reader:
|
||||
party_name = row.get('party_name', '').strip()
|
||||
if party_name:
|
||||
party_names.add(party_name)
|
||||
|
||||
print(f"Found {len(party_names)} unique parties in CSV\n")
|
||||
|
||||
updated_count = 0
|
||||
already_supplier_count = 0
|
||||
not_found_count = 0
|
||||
|
||||
for party_name in party_names:
|
||||
print(f"Processing party: {party_name}")
|
||||
|
||||
# Find party
|
||||
parties = Party.find([('name', '=', party_name)])
|
||||
if not parties:
|
||||
parties = Party.find([('code', '=', party_name)])
|
||||
|
||||
if not parties:
|
||||
print(f" ✗ Not found\n")
|
||||
not_found_count += 1
|
||||
continue
|
||||
|
||||
party = parties[0]
|
||||
|
||||
# Check if already has SUPPLIER category
|
||||
has_supplier = False
|
||||
if party.categories:
|
||||
for cat in party.categories:
|
||||
if cat.name.upper() == 'SUPPLIER':
|
||||
has_supplier = True
|
||||
break
|
||||
|
||||
if has_supplier:
|
||||
print(f" ✓ Already has SUPPLIER category\n")
|
||||
already_supplier_count += 1
|
||||
continue
|
||||
|
||||
# Add SUPPLIER category using Proteus
|
||||
try:
|
||||
# Reload party and category in same context
|
||||
party_to_update = Party(party.id)
|
||||
supplier_cat = Category(supplier_category.id)
|
||||
|
||||
party_to_update.categories.append(supplier_cat)
|
||||
party_to_update.save()
|
||||
print(f" ✓ SUPPLIER category added\n")
|
||||
updated_count += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed: {e}\n")
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARATION SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Already have SUPPLIER category: {already_supplier_count}")
|
||||
print(f"SUPPLIER category added: {updated_count}")
|
||||
print(f"Not found: {not_found_count}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
return True
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}\n")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PURCHASE IMPORT SCRIPT WITH MIGRATION MAPPING")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: Prepare all parties as suppliers first
|
||||
# Uncomment the following line to mark all parties in CSV as suppliers before importing
|
||||
prepare_parties_as_supplier(CSV_FILE_PATH)
|
||||
|
||||
# Import purchases with migration mapping
|
||||
import_purchases(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
# Show mapping statistics
|
||||
check_mapping_stats()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,364 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import config, Model
|
||||
|
||||
from helpers.config import (
|
||||
PURCHASE_FEES_CSV,
|
||||
connect_to_tryton)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
find_party_by_name,
|
||||
find_product_by_code,
|
||||
find_purchase_contract_by_ref,
|
||||
find_contract_line_by_sequence,
|
||||
find_currency_by_code,
|
||||
parse_decimal,
|
||||
find_supplier_category,
|
||||
ensure_party_is_supplier,
|
||||
find_fee_mode_by_name,
|
||||
find_payable_receivable_by_name,
|
||||
get_existing_fees_for_line,
|
||||
fee_already_exists)
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = PURCHASE_FEES_CSV
|
||||
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_SUPPLIER = True # Set to False to skip auto-enabling supplier flag
|
||||
SKIP_NON_SUPPLIERS = False # Set to True to skip parties that aren't suppliers
|
||||
|
||||
|
||||
def import_purchase_contract_fees(csv_file):
|
||||
"""Import purchase contract line fees from CSV"""
|
||||
|
||||
print(f"{'='*70}")
|
||||
print("IMPORTING PURCHASE CONTRACT LINE FEES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get models
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
except Exception as e:
|
||||
print(f"✗ Error: Could not load fee.fee model - {e}")
|
||||
print("Please ensure the model name is correct for your Tryton customization")
|
||||
return
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
current_contract_ref = None
|
||||
current_contract = None
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
for row_num, row in enumerate(reader, start=2): # Start at 2 (header is row 1)
|
||||
try:
|
||||
# Extract data from CSV
|
||||
contract_ref = row.get('contract_ref', '').strip()
|
||||
line_sequence = row.get('line_sequence', '').strip()
|
||||
product_code = row.get('product', '').strip()
|
||||
supplier_name = row.get('supplier', '').strip()
|
||||
currency_code = row.get('currency', '').strip()
|
||||
p_r_value = row.get('p_r', '').strip()
|
||||
mode_name = row.get('mode', '').strip()
|
||||
price_value = row.get('price', '').strip()
|
||||
unit_value = row.get('unit', '').strip()
|
||||
|
||||
print(f"Processing row {row_num}: {contract_ref} - Line {line_sequence} - {product_code}")
|
||||
|
||||
# Validate required fields
|
||||
if not contract_ref:
|
||||
print(f" ✗ Skipping: Missing contract_ref\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not line_sequence:
|
||||
print(f" ✗ Skipping: Missing line_sequence\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not product_code:
|
||||
print(f" ✗ Skipping: Missing product\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache contract and line if same as previous row
|
||||
if contract_ref != current_contract_ref:
|
||||
current_contract = find_purchase_contract_by_ref(contract_ref)
|
||||
current_contract_ref = contract_ref
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
if not current_contract:
|
||||
print(f" ✗ Skipping: Contract not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache line if same as previous row
|
||||
if line_sequence != current_line_sequence:
|
||||
current_line = find_contract_line_by_sequence(current_contract, line_sequence)
|
||||
current_line_sequence = line_sequence
|
||||
|
||||
if not current_line:
|
||||
print(f" ✗ Skipping: Contract line not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Find related records
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
print(f" ✗ Skipping: Product not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
supplier = find_party_by_name(supplier_name)
|
||||
if not supplier:
|
||||
print(f" ✗ Skipping: Supplier not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Ensure party has SUPPLIER category
|
||||
supplier, is_supplier = ensure_party_is_supplier(supplier, auto_enable=AUTO_ENABLE_SUPPLIER)
|
||||
|
||||
if not is_supplier:
|
||||
if SKIP_NON_SUPPLIERS:
|
||||
print(f" ⚠ Skipping purchase - party does not have SUPPLIER category\n")
|
||||
skipped_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
else:
|
||||
error_msg = f"Row {row_num}: Party '{supplier.rec_name}' does not have SUPPLIER category"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
|
||||
currency = find_currency_by_code(currency_code)
|
||||
if not currency:
|
||||
print(f" ✗ Skipping: Currency not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Parse price
|
||||
price = parse_decimal(price_value, 'price')
|
||||
if price is None:
|
||||
print(f" ✗ Skipping: Invalid price\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Determine payable/receivable
|
||||
payable_receivable = find_payable_receivable_by_name(p_r_value)
|
||||
|
||||
# Find fee mode
|
||||
mode = find_fee_mode_by_name(mode_name)
|
||||
|
||||
# Check if fee already exists
|
||||
existing_fees = get_existing_fees_for_line(current_line)
|
||||
if fee_already_exists(existing_fees, product, supplier, price):
|
||||
print(f" ○ Fee already exists for this line\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Create the fee
|
||||
fee = PurchaseLineFee()
|
||||
fee.line = current_line
|
||||
fee.product = product
|
||||
fee.supplier = supplier
|
||||
fee.currency = currency
|
||||
fee.price = price
|
||||
|
||||
# Set type if found and field exists
|
||||
if mode and hasattr(fee, 'type'):
|
||||
fee.type = 'ordered' # Assuming all imported fees are 'ordered'
|
||||
|
||||
# Set weight_type if found and field exists
|
||||
if mode and hasattr(fee, 'weight_type'):
|
||||
fee.weight_type = 'brut'
|
||||
|
||||
# Set p_r (payable or receivable) if found and field exists
|
||||
if mode and hasattr(fee, 'p_r'):
|
||||
fee.p_r = payable_receivable
|
||||
|
||||
# Set mode if found and field exists
|
||||
if mode and hasattr(fee, 'mode'):
|
||||
fee.mode = mode
|
||||
|
||||
# Set unit if field exists
|
||||
if unit_value and hasattr(fee, 'unit'):
|
||||
# Try to find the unit
|
||||
Unit = Model.get('product.uom')
|
||||
units = Unit.find([('symbol', '=', unit_value)])
|
||||
if not units:
|
||||
units = Unit.find([('name', '=', unit_value)])
|
||||
if units:
|
||||
fee.unit = units[0]
|
||||
|
||||
# Save the fee
|
||||
fee.save()
|
||||
|
||||
print(f" ✓ Fee created successfully")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Supplier: {supplier.rec_name}")
|
||||
print(f" Price: {price} {currency.code}")
|
||||
print(f" Type: {payable_receivable}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {contract_ref}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} fees")
|
||||
print(f"Skipped (missing data or already exist): {skipped_count} fees")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported purchase contract fees"""
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Purchase Contract Line Fees")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
|
||||
# Find all fees (or limit to recently created ones)
|
||||
fees = PurchaseLineFee.find([], order=[('id', 'DESC')])
|
||||
|
||||
if fees:
|
||||
print(f"Found {len(fees)} fees (showing last 50):\n")
|
||||
print(f"{'ID':<8} {'Contract':<15} {'Product':<25} {'Supplier':<25} {'Price':<12} {'Type':<12}")
|
||||
print("-" * 105)
|
||||
|
||||
for fee in fees[:50]: # Show last 50 created
|
||||
fee_id = fee.id
|
||||
|
||||
# Get contract reference
|
||||
contract_ref = 'N/A'
|
||||
if hasattr(fee, 'line') and fee.line:
|
||||
line = fee.line
|
||||
if hasattr(line, 'purchase') and line.purchase:
|
||||
contract = line.purchase
|
||||
if hasattr(contract, 'reference') and contract.reference:
|
||||
contract_ref = str(contract.reference)[:14]
|
||||
|
||||
product = fee.product.rec_name[:24] if hasattr(fee, 'product') and fee.product else 'N/A'
|
||||
supplier = fee.supplier.rec_name[:24] if hasattr(fee, 'supplier') and fee.supplier else 'N/A'
|
||||
price = f"{fee.price:.2f}" if hasattr(fee, 'price') and fee.price else 'N/A'
|
||||
|
||||
# Get type (payable/receivable)
|
||||
fee_type = 'N/A'
|
||||
if hasattr(fee, 'type'):
|
||||
fee_type = fee.type
|
||||
elif hasattr(fee, 'payable_receivable'):
|
||||
fee_type = fee.payable_receivable
|
||||
|
||||
print(f"{fee_id:<8} {contract_ref:<15} {product:<25} {supplier:<25} {price:<12} {fee_type:<12}")
|
||||
else:
|
||||
print("No fees found")
|
||||
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error during verification: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def list_purchase_contracts():
|
||||
"""List purchase contracts for debugging"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PURCHASE CONTRACTS (first 20)")
|
||||
print(f"{'='*70}")
|
||||
|
||||
contracts = Purchase.find([], order=[('id', 'DESC')], limit=20)
|
||||
|
||||
if contracts:
|
||||
print(f"{'ID':<8} {'Reference':<20} {'Party':<30} {'State':<12}")
|
||||
print("-" * 70)
|
||||
|
||||
for contract in contracts:
|
||||
contract_id = contract.id
|
||||
reference = contract.reference[:19] if contract.reference else 'N/A'
|
||||
party = contract.party.rec_name[:29] if contract.party else 'N/A'
|
||||
state = contract.state if contract.state else 'N/A'
|
||||
|
||||
print(f"{contract_id:<8} {reference:<20} {party:<30} {state:<12}")
|
||||
|
||||
# Show number of lines
|
||||
if hasattr(contract, 'lines') and contract.lines:
|
||||
print(f" Lines: {len(contract.lines)}")
|
||||
else:
|
||||
print("No purchase contracts found")
|
||||
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PURCHASE CONTRACT FEE IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List purchase contracts for debugging
|
||||
# Uncomment the following line to see available contracts
|
||||
# list_purchase_contracts()
|
||||
|
||||
# Import purchase contract fees
|
||||
import_purchase_contract_fees(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,913 @@
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import psycopg2
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import Model
|
||||
|
||||
from helpers.config import (
|
||||
SALE_CONTRACTS_CSV,
|
||||
connect_to_tryton,
|
||||
get_db_connection
|
||||
)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
parse_decimal,
|
||||
parse_date,
|
||||
ensure_party_is_client,
|
||||
find_party_by_name,
|
||||
find_uom_by_code,
|
||||
find_currency_by_code,
|
||||
find_warehouse,
|
||||
find_location,
|
||||
find_payment_term_by_name,
|
||||
find_product_by_code,
|
||||
find_incoterm_by_code,
|
||||
find_weight_basis_by_name,
|
||||
get_party_invoice_address,
|
||||
find_sale_contract_by_number
|
||||
)
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = SALE_CONTRACTS_CSV
|
||||
|
||||
|
||||
# Default values
|
||||
DEFAULT_STATE = 'draft'
|
||||
DEFAULT_INVOICE_METHOD = 'manual'
|
||||
DEFAULT_INVOICE_STATE = 'none'
|
||||
DEFAULT_SHIPMENT_STATE = 'none'
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_CLIENT = True # Set to False to skip auto-enabling client flag
|
||||
SKIP_NON_CLIENTS = False # Set to True to skip parties that aren't clients
|
||||
|
||||
|
||||
def update_sale_custom_fields(sale_id, custom_data):
|
||||
"""Update custom fields in sale using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# First, check what columns exist in sale_sale table
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'sale_sale'
|
||||
AND column_name IN ('number')
|
||||
ORDER BY column_name
|
||||
""")
|
||||
existing_columns = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
print(f" Available custom columns in sale_sale:")
|
||||
for col_name, col_type in existing_columns.items():
|
||||
print(f" - {col_name} ({col_type})")
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if 'number' in existing_columns and custom_data.get('number'):
|
||||
set_clauses.append("number = %s")
|
||||
values.append(custom_data['number'])
|
||||
print(f" Adding number = {custom_data['number']}")
|
||||
|
||||
if set_clauses:
|
||||
values.append(sale_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_sale
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
print(f" Executing UPDATE with fields: {', '.join([c.split('=')[0].strip() for c in set_clauses])}")
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if rows_affected > 0:
|
||||
print(f" ✓ {rows_affected} row(s) updated successfully")
|
||||
else:
|
||||
print(f" ⚠ No rows updated (sale_id={sale_id} not found?)")
|
||||
else:
|
||||
print(f" No custom fields to update (either no data provided or columns not found)")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def update_line_custom_fields(line_id, custom_data):
|
||||
"""Update custom fields in sale line using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update line custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if custom_data.get('from_del'):
|
||||
set_clauses.append("from_del = %s")
|
||||
values.append(custom_data['from_del'])
|
||||
|
||||
if custom_data.get('to_del'):
|
||||
set_clauses.append("to_del = %s")
|
||||
values.append(custom_data['to_del'])
|
||||
|
||||
if set_clauses:
|
||||
values.append(line_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_line
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating line custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_pricing_estimated(line_id, pricing_data):
|
||||
"""Create pricing_estimated record using direct SQL"""
|
||||
if not pricing_data or not pricing_data.get('trigger'):
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not create pricing estimate - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if pricing_estimated table exists and what its structure is
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'pricing_estimated'
|
||||
ORDER BY ordinal_position
|
||||
""")
|
||||
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
if not columns:
|
||||
print(f" Info: pricing_estimated table does not exist, skipping pricing estimate")
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Insert into pricing_estimated table
|
||||
# Note: This assumes the foreign key constraint allows sale_line references
|
||||
# If it doesn't, this will fail gracefully
|
||||
insert_query = """
|
||||
INSERT INTO pricing_estimated (line, trigger, estimated_date, create_date, write_date, create_uid, write_uid)
|
||||
VALUES (%s, %s, %s, NOW(), NOW(), 1, 1)
|
||||
"""
|
||||
|
||||
cursor.execute(insert_query, (
|
||||
line_id,
|
||||
pricing_data['trigger'],
|
||||
pricing_data.get('estimated_date')
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print(f" ✓ Pricing estimate created successfully")
|
||||
return True
|
||||
|
||||
except psycopg2.errors.ForeignKeyViolation as e:
|
||||
# This is expected if pricing_estimated references purchase_line only
|
||||
print(f" Info: Pricing estimate skipped (table references purchase_line only, not sale_line)")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return True # Return True to continue processing
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error creating pricing estimate: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_sale(row):
|
||||
"""Create a new sale using proteus"""
|
||||
Sale = Model.get('sale.sale')
|
||||
|
||||
# Create sale header
|
||||
sale = Sale()
|
||||
|
||||
# Set company first (required for domain evaluation)
|
||||
sale.company = row['company']
|
||||
|
||||
# Required fields
|
||||
sale.party = row['party']
|
||||
sale.currency = row['currency']
|
||||
|
||||
# Optional header fields
|
||||
if row.get('reference'):
|
||||
sale.reference = row['reference']
|
||||
|
||||
# if row.get('number'):
|
||||
# sale.number = row['number']
|
||||
|
||||
if row.get('description'):
|
||||
sale.description = row['description']
|
||||
|
||||
if row.get('sale_date'):
|
||||
sale.sale_date = row['sale_date']
|
||||
|
||||
if row.get('warehouse'):
|
||||
sale.warehouse = row['warehouse']
|
||||
|
||||
if row.get('payment_term'):
|
||||
sale.payment_term = row['payment_term']
|
||||
|
||||
if row.get('invoice_method'):
|
||||
sale.invoice_method = row['invoice_method']
|
||||
else:
|
||||
sale.invoice_method = DEFAULT_INVOICE_METHOD
|
||||
|
||||
if row.get('invoice_address'):
|
||||
sale.invoice_address = row['invoice_address']
|
||||
elif row['party']:
|
||||
# Get default invoice address from party
|
||||
invoice_address = get_party_invoice_address(row['party'])
|
||||
if invoice_address:
|
||||
sale.invoice_address = invoice_address
|
||||
|
||||
if row.get('comment'):
|
||||
sale.comment = row['comment']
|
||||
|
||||
if row.get('wb'):
|
||||
sale.wb = row['wb']
|
||||
|
||||
if row.get('tol_min'):
|
||||
sale.tol_min = row['tol_min']
|
||||
|
||||
if row.get('tol_max'):
|
||||
sale.tol_max = row['tol_max']
|
||||
|
||||
if row.get('from_location'):
|
||||
sale.from_location = row['from_location']
|
||||
|
||||
if row.get('to_location'):
|
||||
sale.to_location = row['to_location']
|
||||
|
||||
if row.get('incoterm'):
|
||||
sale.incoterm = row['incoterm']
|
||||
|
||||
# State and status fields
|
||||
sale.state = row.get('state', DEFAULT_STATE)
|
||||
sale.invoice_state = DEFAULT_INVOICE_STATE
|
||||
sale.shipment_state = DEFAULT_SHIPMENT_STATE
|
||||
|
||||
# Save sale header first
|
||||
sale.save()
|
||||
|
||||
|
||||
# Update custom fields via SQL if provided (including the number field)
|
||||
custom_data = {}
|
||||
if row.get('number') is not None:
|
||||
custom_data['number'] = row['number']
|
||||
|
||||
if custom_data:
|
||||
update_sale_custom_fields(sale.id, custom_data)
|
||||
|
||||
return sale
|
||||
|
||||
|
||||
def create_sale_line(sale, line_data):
|
||||
"""Create a sale line"""
|
||||
Line = Model.get('sale.line')
|
||||
|
||||
line = Line()
|
||||
line.sale = sale
|
||||
line.type = line_data.get('type', 'line')
|
||||
line.sequence = 1 # Default sequence, can be adjusted later if needed
|
||||
|
||||
if line.type == 'line':
|
||||
# Product line
|
||||
line.product = line_data['product']
|
||||
line.quantity = line_data['quantity']
|
||||
|
||||
# Unit - use provided or default from product
|
||||
if line_data.get('unit'):
|
||||
line.unit = line_data['unit']
|
||||
else:
|
||||
line.unit = line_data['product'].sale_uom
|
||||
|
||||
line.unit_price = line_data['unit_price']
|
||||
|
||||
if line_data.get('description'):
|
||||
line.description = line_data['description']
|
||||
|
||||
# Set taxes if provided
|
||||
if line_data.get('taxes'):
|
||||
line.taxes = line_data['taxes']
|
||||
|
||||
# Shipping date
|
||||
if line_data.get('shipping_date'):
|
||||
line.shipping_date_edit = True
|
||||
line.shipping_date_store = line_data['shipping_date']
|
||||
|
||||
# Delivery dates
|
||||
if line_data.get('from_del'):
|
||||
line.from_del = line_data['from_del']
|
||||
if line_data.get('to_del'):
|
||||
line.to_del = line_data['to_del']
|
||||
|
||||
elif line.type in ['comment', 'title', 'subtotal']:
|
||||
# Non-product lines
|
||||
if line_data.get('description'):
|
||||
line.description = line_data['description']
|
||||
|
||||
line.save()
|
||||
|
||||
# Update custom fields via SQL if provided
|
||||
# custom_data = {}
|
||||
# if line_data.get('from_del'):
|
||||
# custom_data['from_del'] = line_data['from_del']
|
||||
# if line_data.get('to_del'):
|
||||
# custom_data['to_del'] = line_data['to_del']
|
||||
|
||||
# if custom_data:
|
||||
# update_line_custom_fields(line.id, custom_data)
|
||||
|
||||
|
||||
# Create pricing estimate if provided
|
||||
pricing_data = {}
|
||||
if line_data.get('pricing_trigger'):
|
||||
pricing_data['trigger'] = line_data['pricing_trigger']
|
||||
pricing_data['estimated_date'] = line_data.get('pricing_estimated_date')
|
||||
create_pricing_estimated(line.id, pricing_data)
|
||||
|
||||
return line
|
||||
|
||||
|
||||
def import_sales(csv_file):
|
||||
"""Import sales from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Get company (assuming single company or default)
|
||||
Company = Model.get('company.company')
|
||||
companies = Company.find([])
|
||||
if not companies:
|
||||
print("✗ Error: No company found in the system")
|
||||
return
|
||||
company = companies[0]
|
||||
print(f"Using company: {company.rec_name}\n")
|
||||
|
||||
# List available currencies for debugging
|
||||
#available_currencies = list_available_currencies()
|
||||
|
||||
# List available payment terms for debugging
|
||||
#available_payment_terms = list_available_payment_terms()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing sales from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected CSV columns: {reader.fieldnames}\n")
|
||||
|
||||
# Track current sale by number
|
||||
sales_by_number = {}
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
number = row.get('number', '').strip()
|
||||
reference = row.get('reference', '').strip()
|
||||
party_name = row.get('party_name', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not number and not party_name:
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: Number '{number}', Reference '{reference}'")
|
||||
|
||||
# Check if we need to create a new sale for this number
|
||||
if number and number not in sales_by_number:
|
||||
# New sale header
|
||||
|
||||
# Find required entities
|
||||
party = find_party_by_name(party_name)
|
||||
if not party:
|
||||
error_msg = f"Row {row_num}: Party '{party_name}' not found"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
# Debug: Show party details
|
||||
print(f" Found party: {party.rec_name}")
|
||||
if hasattr(party, 'code'):
|
||||
print(f" Code: {party.code}")
|
||||
if hasattr(party, 'id'):
|
||||
print(f" ID: {party.id}")
|
||||
|
||||
if hasattr(party, 'categories') and party.categories:
|
||||
category_names = [cat.name for cat in party.categories]
|
||||
print(f" Categories: {', '.join(category_names)}")
|
||||
has_client = any(cat.name.upper() == 'CLIENT' for cat in party.categories)
|
||||
print(f" Has CLIENT category: {has_client}")
|
||||
else:
|
||||
print(f" Categories: None")
|
||||
|
||||
# Ensure party has CLIENT category
|
||||
if not ensure_party_is_client(party, auto_enable=AUTO_ENABLE_CLIENT):
|
||||
if SKIP_NON_CLIENTS:
|
||||
print(f" ⚠ Skipping sale - party does not have CLIENT category\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
else:
|
||||
error_msg = f"Row {row_num}: Party '{party.rec_name}' does not have CLIENT category"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
# Reload party after category addition to get fresh data
|
||||
Party = Model.get('party.party')
|
||||
party = Party(party.id)
|
||||
|
||||
# Check if sale already exists
|
||||
existing_sale = find_sale_contract_by_number(number)
|
||||
if existing_sale:
|
||||
print(f" ⚠ Sale with number '{number}' for party '{party.rec_name}' already exists (ID: {existing_sale.id})")
|
||||
|
||||
skipped_count += 1
|
||||
continue # Skip creating new sale if it already exists
|
||||
|
||||
# Continue to add line to existing sale or not or update existing one....
|
||||
# To be decided based on requirements
|
||||
# e.g: below will add lines to existing sale, but we need to check if that line already exists or not to avoid duplicates, or we can skip adding lines to existing sale to avoid complexity, etc.
|
||||
# print(f" Using existing sale...\n")
|
||||
# sales_by_number[number] = existing_sale
|
||||
else:
|
||||
|
||||
# Find currency
|
||||
currency_code = row.get('currency_code', '').strip() or 'USD'
|
||||
print(f" Looking for currency: '{currency_code}'")
|
||||
currency = find_currency_by_code(currency_code)
|
||||
if not currency:
|
||||
error_msg = f"Row {row_num}: Currency '{currency_code}' not found"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
# Optional fields
|
||||
warehouse = None
|
||||
warehouse_code = row.get('warehouse_code', '').strip()
|
||||
if warehouse_code:
|
||||
warehouse = find_warehouse(warehouse_code)
|
||||
|
||||
payment_term = None
|
||||
payment_term_name = row.get('payment_term', '').strip()
|
||||
if payment_term_name:
|
||||
payment_term = find_payment_term_by_name(payment_term_name)
|
||||
# Payment term is optional, so continue even if not found
|
||||
if not payment_term:
|
||||
print(f" Continuing without payment term")
|
||||
|
||||
# Parse dates
|
||||
sale_date = parse_date(row.get('sale_date', ''))
|
||||
|
||||
# Find weight basis
|
||||
wb = None
|
||||
weight_basis_abbr = row.get('wb', '').strip()
|
||||
print(f" Looking for weight basis: '{weight_basis_abbr}'")
|
||||
if weight_basis_abbr:
|
||||
wb = find_weight_basis_by_name(weight_basis_abbr)
|
||||
if not wb:
|
||||
print(f" Continuing without weight basis")
|
||||
|
||||
|
||||
|
||||
# Parse custom numeric fields
|
||||
#number = parse_decimal(row.get('number', ''), 'number')
|
||||
number = row.get('number', '').strip()
|
||||
|
||||
tol_min = parse_decimal(row.get('tol_min', ''), 'tol_min')
|
||||
tol_max = parse_decimal(row.get('tol_max', ''), 'tol_max')
|
||||
|
||||
# Get locations by name
|
||||
from_location = None
|
||||
from_location_name = row.get('from_location_name', '').strip()
|
||||
if from_location_name:
|
||||
from_location_obj = find_location(from_location_name)
|
||||
if from_location_obj:
|
||||
from_location = from_location_obj
|
||||
|
||||
to_location = None
|
||||
to_location_name = row.get('to_location_name', '').strip()
|
||||
if to_location_name:
|
||||
to_location_obj = find_location(to_location_name)
|
||||
if to_location_obj:
|
||||
to_location = to_location_obj
|
||||
|
||||
|
||||
# Get incoterm 2025 by code
|
||||
incoterm = None
|
||||
incoterm_code = row.get('incoterm_name', '').strip()
|
||||
if incoterm_code:
|
||||
incoterm = find_incoterm_by_code(incoterm_code, 2025)
|
||||
|
||||
|
||||
# Prepare sale data
|
||||
sale_data = {
|
||||
'reference': reference, # Keep reference field
|
||||
'number': number, # Add number field (stored via SQL)
|
||||
'party': party,
|
||||
'company': company,
|
||||
'currency': currency,
|
||||
'sale_date': sale_date,
|
||||
'warehouse': warehouse,
|
||||
'payment_term': payment_term,
|
||||
'invoice_method': row.get('invoice_method', '').strip() or DEFAULT_INVOICE_METHOD,
|
||||
'description': row.get('description', '').strip(),
|
||||
'comment': row.get('comment', '').strip(),
|
||||
'state': row.get('state', '').strip() or DEFAULT_STATE,
|
||||
'wb': wb,
|
||||
'tol_min': tol_min,
|
||||
'tol_max': tol_max,
|
||||
'from_location': from_location,
|
||||
'to_location': to_location,
|
||||
'incoterm': incoterm,
|
||||
}
|
||||
|
||||
# Create the sale
|
||||
current_sale = create_sale(sale_data)
|
||||
sales_by_number[number] = current_sale
|
||||
|
||||
print(f" ✓ Created sale header")
|
||||
print(f" Sale ID: {current_sale.id}")
|
||||
print(f" Number: {number}")
|
||||
print(f" Reference: {reference}")
|
||||
print(f" Party: {party.rec_name}")
|
||||
print(f" Currency: {currency.name if hasattr(currency, 'name') else currency.code}")
|
||||
if sale_date:
|
||||
print(f" Sale Date: {sale_date}")
|
||||
if wb is not None:
|
||||
print(f" WB: {wb.name}")
|
||||
if tol_min is not None or tol_max is not None:
|
||||
print(f" Tolerances: Min={tol_min}, Max={tol_max}")
|
||||
if from_location:
|
||||
print(f" Loading: {from_location.name}")
|
||||
if to_location:
|
||||
print(f" Destination: {to_location.name}")
|
||||
if incoterm:
|
||||
print(f" Incoterm: {incoterm.code}")
|
||||
|
||||
imported_count += 1
|
||||
|
||||
# Create sale line if we have a current sale and product data
|
||||
current_sale = sales_by_number.get(number)
|
||||
line_product_code = row.get('line_product_code', '').strip()
|
||||
line_type = row.get('line_type', '').strip() or 'line'
|
||||
|
||||
if current_sale and (line_product_code or line_type != 'line'):
|
||||
|
||||
if line_type == 'line':
|
||||
# Product line
|
||||
product = find_product_by_code(line_product_code)
|
||||
if not product:
|
||||
print(f" ⚠ Warning: Product '{line_product_code}' not found, skipping line")
|
||||
continue
|
||||
|
||||
# Parse line data
|
||||
quantity = parse_decimal(row.get('line_quantity', ''), 'quantity')
|
||||
if quantity is None:
|
||||
print(f" ⚠ Warning: Invalid quantity, skipping line")
|
||||
continue
|
||||
|
||||
unit_price = parse_decimal(row.get('line_unit_price', ''), 'unit_price')
|
||||
if unit_price is None:
|
||||
unit_price = Decimal('0')
|
||||
|
||||
# Parse shipping dates
|
||||
from_del = parse_date(row.get('line_from_del', ''))
|
||||
to_del = parse_date(row.get('line_to_del', ''))
|
||||
|
||||
# Parse pricing estimate data
|
||||
pricing_trigger = row.get('pricing_trigger', '').strip()
|
||||
pricing_estimated_date = parse_date(row.get('pricing_estimated_date', ''))
|
||||
|
||||
# Find UOM if specified
|
||||
unit = None
|
||||
line_unit_code = row.get('line_unit_code', '').strip()
|
||||
if line_unit_code:
|
||||
unit = find_uom_by_code(line_unit_code)
|
||||
|
||||
line_data = {
|
||||
'type': 'line',
|
||||
'product': product,
|
||||
'quantity': quantity,
|
||||
'unit': unit,
|
||||
'unit_price': unit_price,
|
||||
'description': row.get('line_description', '').strip(),
|
||||
'from_del': from_del,
|
||||
'to_del': to_del,
|
||||
'pricing_trigger': pricing_trigger,
|
||||
'pricing_estimated_date': pricing_estimated_date,
|
||||
}
|
||||
|
||||
else:
|
||||
# Non-product line (comment, title, subtotal)
|
||||
line_data = {
|
||||
'type': line_type,
|
||||
'description': row.get('line_description', '').strip(),
|
||||
}
|
||||
|
||||
# Create the line
|
||||
line = create_sale_line(current_sale, line_data)
|
||||
|
||||
print(f" ✓ Added line")
|
||||
if line_type == 'line':
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Quantity: {quantity}")
|
||||
if unit:
|
||||
print(f" Unit: {unit.symbol}")
|
||||
print(f" Unit Price: {unit_price}")
|
||||
if from_del or to_del:
|
||||
print(f" Shipping: {from_del} to {to_del}")
|
||||
if pricing_trigger:
|
||||
print(f" Pricing: {pricing_trigger} ({pricing_estimated_date})")
|
||||
else:
|
||||
print(f" Type: {line_type}")
|
||||
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {number if 'number' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} sales")
|
||||
print(f"Skipped (already exist): {skipped_count} sales")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported sales"""
|
||||
Sale = Model.get('sale.sale')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Sales")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all sales (or limit to recently created ones)
|
||||
sales = Sale.find([], order=[('id', 'DESC')])
|
||||
|
||||
if sales:
|
||||
print(f"Found {len(sales)} sales (showing last 10):\n")
|
||||
print(f"{'ID':<8} {'Number':<15} {'Reference':<15} {'Party':<25} {'State':<12} {'Total':<15}")
|
||||
print("-" * 95)
|
||||
|
||||
for sale in sales[:10]: # Show last 10 created
|
||||
sale_id = sale.id
|
||||
|
||||
# Get number from database since it's a custom field
|
||||
conn = get_db_connection()
|
||||
number = 'N/A'
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT number FROM sale_sale WHERE id = %s", (sale_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
number = str(result[0])[:14]
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
reference = sale.reference[:14] if sale.reference else 'N/A'
|
||||
party = sale.party.rec_name[:24] if sale.party else 'N/A'
|
||||
state = sale.state if sale.state else 'N/A'
|
||||
total = f"{sale.total_amount:.2f}" if sale.total_amount else 'N/A'
|
||||
|
||||
print(f"{sale_id:<8} {number:<15} {reference:<15} {party:<25} {state:<12} {total:<15}")
|
||||
|
||||
# Show lines
|
||||
if sale.lines:
|
||||
print(f" Lines: {len(sale.lines)}")
|
||||
for line in sale.lines[:3]: # Show first 3 lines
|
||||
if line.type == 'line' and line.product:
|
||||
print(f" - {line.product.rec_name[:40]} | Qty: {line.quantity} | Price: {line.unit_price}")
|
||||
else:
|
||||
print(f" - [{line.type}] {(line.description or '')[:40]}")
|
||||
else:
|
||||
print("No sales found")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def prepare_parties_as_clients(csv_file):
|
||||
"""Pre-process: Add CLIENT category to all parties in CSV using Proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Category = Model.get('party.category')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARING PARTIES AS CLIENTS (via Categories)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find CLIENT category
|
||||
client_categories = Category.find([('name', '=', 'CLIENT')])
|
||||
if not client_categories:
|
||||
# Try case-insensitive
|
||||
all_categories = Category.find([])
|
||||
for cat in all_categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
client_categories = [cat]
|
||||
break
|
||||
|
||||
if not client_categories:
|
||||
print(f"✗ CLIENT category not found in the system!")
|
||||
print(f"Please create a party category named 'CLIENT' first.\n")
|
||||
return False
|
||||
|
||||
client_category = client_categories[0]
|
||||
print(f"Found CLIENT category (ID: {client_category.id})\n")
|
||||
|
||||
party_names = set()
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
for row in reader:
|
||||
party_name = row.get('party_name', '').strip()
|
||||
if party_name:
|
||||
party_names.add(party_name)
|
||||
|
||||
print(f"Found {len(party_names)} unique parties in CSV\n")
|
||||
|
||||
updated_count = 0
|
||||
already_client_count = 0
|
||||
not_found_count = 0
|
||||
|
||||
for party_name in party_names:
|
||||
print(f"Processing party: {party_name}")
|
||||
|
||||
# Find party
|
||||
parties = Party.find([('name', '=', party_name)])
|
||||
if not parties:
|
||||
parties = Party.find([('code', '=', party_name)])
|
||||
|
||||
if not parties:
|
||||
print(f" ✗ Not found\n")
|
||||
not_found_count += 1
|
||||
continue
|
||||
|
||||
party = parties[0]
|
||||
|
||||
# Check if already has CLIENT category
|
||||
has_client = False
|
||||
if party.categories:
|
||||
for cat in party.categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
has_client = True
|
||||
break
|
||||
|
||||
if has_client:
|
||||
print(f" ✓ Already has CLIENT category\n")
|
||||
already_client_count += 1
|
||||
continue
|
||||
|
||||
# Add CLIENT category using Proteus
|
||||
try:
|
||||
# Reload party and category in same context
|
||||
party_to_update = Party(party.id)
|
||||
client_cat = Category(client_category.id)
|
||||
|
||||
party_to_update.categories.append(client_cat)
|
||||
party_to_update.save()
|
||||
print(f" ✓ CLIENT category added\n")
|
||||
updated_count += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed: {e}\n")
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARATION SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Already have CLIENT category: {already_client_count}")
|
||||
print(f"CLIENT category added: {updated_count}")
|
||||
print(f"Not found: {not_found_count}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
return True
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}\n")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SALE IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: Prepare all parties as clients first
|
||||
# Uncomment the following line to mark all parties in CSV as clients before importing
|
||||
# prepare_parties_as_clients(CSV_FILE_PATH)
|
||||
|
||||
# Import sales
|
||||
import_sales(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,807 @@
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import psycopg2
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import Model
|
||||
|
||||
from helpers.config import (
|
||||
SALE_CONTRACTS_CSV,
|
||||
connect_to_tryton,
|
||||
get_db_connection,
|
||||
DB_CONFIG # Add this to your config
|
||||
)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
parse_decimal,
|
||||
parse_date,
|
||||
ensure_party_is_client,
|
||||
find_party_by_name,
|
||||
find_uom_by_code,
|
||||
find_currency_by_code,
|
||||
find_warehouse,
|
||||
find_location,
|
||||
find_payment_term_by_name,
|
||||
find_product_by_code,
|
||||
find_incoterm_by_code,
|
||||
find_weight_basis_by_name,
|
||||
get_party_invoice_address,
|
||||
find_sale_contract_by_number
|
||||
)
|
||||
|
||||
# Import migration mapping helper
|
||||
from helpers.migration_mapping import MigrationMapper
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = SALE_CONTRACTS_CSV
|
||||
|
||||
|
||||
# Default values
|
||||
DEFAULT_STATE = 'draft'
|
||||
DEFAULT_INVOICE_METHOD = 'manual'
|
||||
DEFAULT_INVOICE_STATE = 'none'
|
||||
DEFAULT_SHIPMENT_STATE = 'none'
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_CLIENT = True # Set to False to skip auto-enabling client flag
|
||||
SKIP_NON_CLIENTS = False # Set to True to skip parties that aren't clients
|
||||
|
||||
|
||||
def update_sale_custom_fields(sale_id, custom_data):
|
||||
"""Update custom fields in sale using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# First, check what columns exist in sale_sale table
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'sale_sale'
|
||||
AND column_name IN ('number')
|
||||
ORDER BY column_name
|
||||
""")
|
||||
existing_columns = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
print(f" Available custom columns in sale_sale:")
|
||||
for col_name, col_type in existing_columns.items():
|
||||
print(f" - {col_name} ({col_type})")
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if 'number' in existing_columns and custom_data.get('number'):
|
||||
set_clauses.append("number = %s")
|
||||
values.append(custom_data['number'])
|
||||
print(f" Adding number = {custom_data['number']}")
|
||||
|
||||
if set_clauses:
|
||||
values.append(sale_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_sale
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
print(f" Executing UPDATE with fields: {', '.join([c.split('=')[0].strip() for c in set_clauses])}")
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if rows_affected > 0:
|
||||
print(f" ✓ {rows_affected} row(s) updated successfully")
|
||||
else:
|
||||
print(f" ⚠ No rows updated (sale_id={sale_id} not found?)")
|
||||
else:
|
||||
print(f" No custom fields to update (either no data provided or columns not found)")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def update_line_custom_fields(line_id, custom_data):
|
||||
"""Update custom fields in sale line using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update line custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if custom_data.get('from_del'):
|
||||
set_clauses.append("from_del = %s")
|
||||
values.append(custom_data['from_del'])
|
||||
|
||||
if custom_data.get('to_del'):
|
||||
set_clauses.append("to_del = %s")
|
||||
values.append(custom_data['to_del'])
|
||||
|
||||
if set_clauses:
|
||||
values.append(line_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_line
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating line custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_pricing_estimated(line_id, pricing_data):
|
||||
"""Create pricing_estimated record using direct SQL"""
|
||||
if not pricing_data or not pricing_data.get('trigger'):
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not create pricing estimate - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if pricing_estimated table exists and what its structure is
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'pricing_estimated'
|
||||
ORDER BY ordinal_position
|
||||
""")
|
||||
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
if not columns:
|
||||
print(f" Info: pricing_estimated table does not exist, skipping pricing estimate")
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Insert into pricing_estimated table
|
||||
insert_query = """
|
||||
INSERT INTO pricing_estimated (sale_line, trigger, estimated_date, create_date, write_date, create_uid, write_uid)
|
||||
VALUES (%s, %s, %s, NOW(), NOW(), 1, 1)
|
||||
"""
|
||||
|
||||
cursor.execute(insert_query, (
|
||||
line_id,
|
||||
pricing_data['trigger'],
|
||||
pricing_data.get('estimated_date')
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print(f" ✓ Pricing estimate created successfully")
|
||||
return True
|
||||
|
||||
except psycopg2.errors.ForeignKeyViolation as e:
|
||||
# This is expected if pricing_estimated references purchase_line only
|
||||
print(f" Info: Pricing estimate skipped (table references sale_line only, not purchase_line)")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return True # Return True to continue processing
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error creating pricing estimate: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def import_sales(csv_file):
|
||||
"""Import sales from CSV file with migration mapping tracking"""
|
||||
Sale = Model.get('sale.sale')
|
||||
SaleLine = Model.get('sale.line')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"IMPORTING SALES FROM CSV")
|
||||
print(f"{'='*70}\n")
|
||||
print(f"Reading from: {csv_file}\n")
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Get company (assuming single company or default)
|
||||
Company = Model.get('company.company')
|
||||
companies = Company.find([])
|
||||
if not companies:
|
||||
print("✗ Error: No company found in the system")
|
||||
return
|
||||
company = companies[0]
|
||||
print(f"Using company: {company.rec_name}\n")
|
||||
|
||||
# Collect all mappings for batch insert at the end
|
||||
sale_mappings = []
|
||||
line_mappings = []
|
||||
|
||||
try:
|
||||
# Initialize migration mapper
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
row_num = 0
|
||||
|
||||
for row in reader:
|
||||
row_num += 1
|
||||
|
||||
try:
|
||||
# Extract fields from CSV
|
||||
number = row.get('number', '').strip()
|
||||
reference = row.get('reference', '').strip()
|
||||
source_sale_id = row.get('source_id', '').strip() # Source system ID
|
||||
|
||||
if not number:
|
||||
print(f"Row {row_num}: Skipping - no number\n")
|
||||
continue
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Row {row_num}: Processing sale {number}")
|
||||
print(f"{'='*70}")
|
||||
|
||||
# CHECK IF ALREADY IMPORTED using migration mapper
|
||||
if source_sale_id:
|
||||
existing_tryton_id = mapper.get_tryton_id('sale_contract', source_sale_id)
|
||||
if existing_tryton_id:
|
||||
print(f" ⏭ Sale already imported (Source ID: {source_sale_id} -> Tryton ID: {existing_tryton_id})")
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Alternative: Check by number using existing helper
|
||||
existing_sale = find_sale_contract_by_number(number)
|
||||
if existing_sale:
|
||||
print(f" ⏭ Sale {number} already exists (ID: {existing_sale.id})")
|
||||
# Save mapping even if it already exists (for reconciliation)
|
||||
if source_sale_id:
|
||||
sale_mappings.append({
|
||||
'object_type': 'sale_contract',
|
||||
'source_id': source_sale_id,
|
||||
'tryton_model': 'sale.sale',
|
||||
'tryton_id': existing_sale.id,
|
||||
'recon_key': number
|
||||
})
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Parse other fields
|
||||
sale_date = parse_date(row.get('sale_date'))
|
||||
party_name = row.get('party_name', '').strip()
|
||||
|
||||
# Find related records
|
||||
party = find_party_by_name(party_name)
|
||||
if not party:
|
||||
raise ValueError(f"Party not found: {party_name}")
|
||||
|
||||
# Check party is client
|
||||
if not ensure_party_is_client(party, auto_enable=AUTO_ENABLE_CLIENT):
|
||||
if SKIP_NON_CLIENTS:
|
||||
print(f" ⏭ Skipping - party {party_name} is not a client\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f"Party {party_name} is not a client")
|
||||
|
||||
# Reload party after category addition to get fresh data
|
||||
Party = Model.get('party.party')
|
||||
party = Party(party.id)
|
||||
|
||||
# Find invoice address
|
||||
invoice_address = get_party_invoice_address(party)
|
||||
if not invoice_address:
|
||||
raise ValueError(f"No invoice address found for party {party_name}")
|
||||
|
||||
# Parse additional fields
|
||||
currency = find_currency_by_code(row.get('currency', 'USD'))
|
||||
warehouse = find_warehouse(row.get('warehouse'))
|
||||
payment_term = find_payment_term_by_name(row.get('payment_term'))
|
||||
weight_basis_abbr = find_weight_basis_by_name(row.get('weight_basis'))
|
||||
tol_min = parse_decimal(row.get('tol_min', ''), 'tol_min')
|
||||
tol_max = parse_decimal(row.get('tol_max', ''), 'tol_max')
|
||||
|
||||
from_location_name = row.get('from_location_name', '').strip()
|
||||
from_location = find_location(from_location_name)
|
||||
|
||||
to_location_name = row.get('to_location_name', '').strip()
|
||||
to_location = find_location(to_location_name)
|
||||
|
||||
incoterm_code = row.get('incoterm_name', '').strip()
|
||||
incoterm = find_incoterm_by_code(incoterm_code, 2025)
|
||||
|
||||
description = row.get('description', '').strip()
|
||||
comment = row.get('comment', '').strip()
|
||||
|
||||
# CREATE SALE
|
||||
print(f" Creating sale...")
|
||||
sale = Sale()
|
||||
sale.company = company
|
||||
sale.reference = reference
|
||||
sale.party = party
|
||||
sale.invoice_address = invoice_address
|
||||
sale.shipment_address = invoice_address # Default to invoice address
|
||||
sale.sale_date = sale_date
|
||||
sale.currency = currency
|
||||
if warehouse:
|
||||
sale.warehouse = warehouse
|
||||
sale.payment_term = payment_term
|
||||
sale.wb = weight_basis_abbr
|
||||
sale.tol_min = tol_min
|
||||
sale.tol_max = tol_max
|
||||
sale.incoterm = incoterm
|
||||
sale.from_location = from_location
|
||||
sale.to_location = to_location
|
||||
sale.description = description
|
||||
sale.comment = comment
|
||||
sale.state = DEFAULT_STATE
|
||||
sale.invoice_method = DEFAULT_INVOICE_METHOD
|
||||
|
||||
# Save the sale
|
||||
sale.save()
|
||||
print(f" ✓ Sale created (ID: {sale.id})")
|
||||
|
||||
# Update custom fields (like 'number')
|
||||
custom_fields = {'number': number}
|
||||
update_sale_custom_fields(sale.id, custom_fields)
|
||||
|
||||
# SAVE MIGRATION MAPPING for sale
|
||||
if source_sale_id:
|
||||
sale_mappings.append({
|
||||
'object_type': 'sale_contract',
|
||||
'source_id': source_sale_id,
|
||||
'tryton_model': 'sale.sale',
|
||||
'tryton_id': sale.id,
|
||||
'recon_key': number
|
||||
})
|
||||
print(f" 📝 Mapping queued: Source {source_sale_id} -> Tryton {sale.id}")
|
||||
|
||||
# Process sale lines
|
||||
line_type = row.get('line_type', 'line').strip()
|
||||
source_line_id = row.get('source_line_id', '').strip()
|
||||
|
||||
if line_type == 'line':
|
||||
# Regular product line
|
||||
product_code = row.get('line_product_code', '').strip()
|
||||
quantity = parse_decimal(row.get('line_quantity', ''), 'quantity')
|
||||
unit_price = parse_decimal(row.get('line_price', ''), 'unit_price')
|
||||
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
raise ValueError(f"Product not found: {product_code}")
|
||||
|
||||
unit = find_uom_by_code(row.get('line_unit_code', ''))
|
||||
|
||||
# Parse shipping dates
|
||||
from_del = parse_date(row.get('line_from_del', ''))
|
||||
to_del = parse_date(row.get('line_to_del', ''))
|
||||
|
||||
# Create line
|
||||
line = SaleLine()
|
||||
line.sale = sale
|
||||
line.type = 'line'
|
||||
sequence = 1 # Default sequence, can be enhanced to handle multiple lines
|
||||
line.sequence = sequence
|
||||
line.product = product
|
||||
line.quantity = quantity
|
||||
line.unit = unit if unit else product.sale_uom
|
||||
line.unit_price = unit_price
|
||||
line.from_del = from_del
|
||||
line.to_del = to_del
|
||||
|
||||
# Optional fields
|
||||
description = row.get('description', '').strip()
|
||||
if description:
|
||||
line.description = description
|
||||
|
||||
line.save()
|
||||
|
||||
# # Update line custom fields
|
||||
# line_custom = {}
|
||||
# from_del = row.get('from_del', '').strip()
|
||||
# to_del = row.get('to_del', '').strip()
|
||||
# if from_del:
|
||||
# line_custom['from_del'] = from_del
|
||||
# if to_del:
|
||||
# line_custom['to_del'] = to_del
|
||||
|
||||
# if line_custom:
|
||||
# update_line_custom_fields(line.id, line_custom)
|
||||
|
||||
# Create pricing estimate if applicable
|
||||
pricing_trigger = row.get('pricing_trigger', '').strip()
|
||||
pricing_estimated_date = parse_date(row.get('pricing_estimated_date', ''))
|
||||
if pricing_trigger:
|
||||
pricing_data = {
|
||||
'trigger': pricing_trigger,
|
||||
'estimated_date': pricing_estimated_date
|
||||
}
|
||||
create_pricing_estimated(line.id, pricing_data)
|
||||
|
||||
# SAVE MIGRATION MAPPING for line
|
||||
if source_line_id:
|
||||
line_mappings.append({
|
||||
'object_type': 'sale_line',
|
||||
'source_id': source_line_id,
|
||||
'tryton_model': 'sale.line',
|
||||
'tryton_id': line.id,
|
||||
'recon_key': f"{number}-Line {sequence}-{product_code}"
|
||||
})
|
||||
|
||||
print(f" ✓ Added line (ID: {line.id})")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Quantity: {quantity}")
|
||||
|
||||
else:
|
||||
# Comment, subtitle, or other line types
|
||||
line = SaleLine()
|
||||
line.sale = sale
|
||||
line.type = line_type
|
||||
line.description = row.get('description', '').strip()
|
||||
line.save()
|
||||
|
||||
print(f" ✓ Added {line_type} line (ID: {line.id})")
|
||||
|
||||
imported_count += 1
|
||||
print(f"✓ Successfully imported sale {number}\n")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {number if 'number' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# BATCH SAVE ALL MAPPINGS at the end
|
||||
print(f"\n{'='*70}")
|
||||
print("SAVING MIGRATION MAPPINGS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
if sale_mappings:
|
||||
print(f"Saving {len(sale_mappings)} sale mappings...")
|
||||
mapper.save_mappings_batch(sale_mappings)
|
||||
print(f"✓ Sale mappings saved\n")
|
||||
|
||||
if line_mappings:
|
||||
print(f"Saving {len(line_mappings)} line mappings...")
|
||||
mapper.save_mappings_batch(line_mappings)
|
||||
print(f"✓ Line mappings saved\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} sales")
|
||||
print(f"Skipped (already exist): {skipped_count} sales")
|
||||
print(f"Errors: {error_count}")
|
||||
print(f"Migration mappings saved: {len(sale_mappings)} sales, {len(line_mappings)} lines")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported sales and their migration mappings"""
|
||||
Sale = Model.get('sale.sale')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Sales and Migration Mappings")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all sales (or limit to recently created ones)
|
||||
sales = Sale.find([], order=[('id', 'DESC')])
|
||||
|
||||
if sales:
|
||||
print(f"Found {len(sales)} sales (showing last 10):\n")
|
||||
print(f"{'ID':<8} {'Number':<15} {'Reference':<15} {'Party':<25} {'State':<12} {'Source ID':<15}")
|
||||
print("-" * 100)
|
||||
|
||||
# Initialize mapper to look up source IDs
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
for sale in sales[:10]: # Show last 10 created
|
||||
sale_id = sale.id
|
||||
|
||||
# Get number from database since it's a custom field
|
||||
conn = get_db_connection()
|
||||
number = 'N/A'
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT number FROM sale_sale WHERE id = %s", (sale_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
number = str(result[0])[:14]
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
# Look up source ID from migration mapping
|
||||
source_id = 'N/A'
|
||||
try:
|
||||
cursor = mapper.connection.cursor()
|
||||
cursor.execute("""
|
||||
SELECT source_id[1]
|
||||
FROM public.os_migration_mapping
|
||||
WHERE tryton_id = %s
|
||||
AND 'sale_contract' = ANY(object_type)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""", (sale_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
source_id = str(result[0])[:14]
|
||||
cursor.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
reference = sale.reference[:14] if sale.reference else 'N/A'
|
||||
party = sale.party.rec_name[:24] if sale.party else 'N/A'
|
||||
state = sale.state if sale.state else 'N/A'
|
||||
|
||||
print(f"{sale_id:<8} {number:<15} {reference:<15} {party:<25} {state:<12} {source_id:<15}")
|
||||
|
||||
# Show lines
|
||||
if sale.lines:
|
||||
print(f" Lines: {len(sale.lines)}")
|
||||
for line in sale.lines[:3]: # Show first 3 lines
|
||||
if line.type == 'line' and line.product:
|
||||
print(f" - {line.product.rec_name[:40]} | Qty: {line.quantity} | Price: {line.unit_price}")
|
||||
else:
|
||||
print(f" - [{line.type}] {(line.description or '')[:40]}")
|
||||
else:
|
||||
print("No sales found")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def check_mapping_stats():
|
||||
"""Display statistics about migration mappings"""
|
||||
print(f"\n{'='*70}")
|
||||
print("MIGRATION MAPPING STATISTICS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
cursor = mapper.connection.cursor()
|
||||
|
||||
# Count mappings by object type
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
object_type[1] as obj_type,
|
||||
COUNT(*) as count,
|
||||
MIN(write_date) as first_import,
|
||||
MAX(write_date) as last_import
|
||||
FROM public.os_migration_mapping
|
||||
GROUP BY object_type[1]
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
results = cursor.fetchall()
|
||||
|
||||
if results:
|
||||
print(f"{'Object Type':<25} {'Count':<10} {'First Import':<15} {'Last Import':<15}")
|
||||
print("-" * 70)
|
||||
for row in results:
|
||||
obj_type = row[0] or 'N/A'
|
||||
count = row[1]
|
||||
first = row[2].strftime('%Y-%m-%d') if row[2] else 'N/A'
|
||||
last = row[3].strftime('%Y-%m-%d') if row[3] else 'N/A'
|
||||
print(f"{obj_type:<25} {count:<10} {first:<15} {last:<15}")
|
||||
|
||||
# Total count
|
||||
cursor.execute("SELECT COUNT(*) FROM public.os_migration_mapping")
|
||||
total = cursor.fetchone()[0]
|
||||
print(f"\nTotal mappings: {total}")
|
||||
else:
|
||||
print("No migration mappings found")
|
||||
|
||||
cursor.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error retrieving mapping statistics: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def prepare_parties_as_clients(csv_file):
|
||||
"""Pre-process: Add CLIENT category to all parties in CSV using Proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Category = Model.get('party.category')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARING PARTIES AS CLIENTS (via Categories)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find CLIENT category
|
||||
client_categories = Category.find([('name', '=', 'CLIENT')])
|
||||
if not client_categories:
|
||||
# Try case-insensitive
|
||||
all_categories = Category.find([])
|
||||
for cat in all_categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
client_categories = [cat]
|
||||
break
|
||||
|
||||
if not client_categories:
|
||||
print(f"✗ CLIENT category not found in the system!")
|
||||
print(f"Please create a party category named 'CLIENT' first.\n")
|
||||
return False
|
||||
|
||||
client_category = client_categories[0]
|
||||
print(f"Found CLIENT category (ID: {client_category.id})\n")
|
||||
|
||||
party_names = set()
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
for row in reader:
|
||||
party_name = row.get('party_name', '').strip()
|
||||
if party_name:
|
||||
party_names.add(party_name)
|
||||
|
||||
print(f"Found {len(party_names)} unique parties in CSV\n")
|
||||
|
||||
updated_count = 0
|
||||
already_client_count = 0
|
||||
not_found_count = 0
|
||||
|
||||
for party_name in party_names:
|
||||
print(f"Processing party: {party_name}")
|
||||
|
||||
# Find party
|
||||
parties = Party.find([('name', '=', party_name)])
|
||||
if not parties:
|
||||
parties = Party.find([('code', '=', party_name)])
|
||||
|
||||
if not parties:
|
||||
print(f" ✗ Not found\n")
|
||||
not_found_count += 1
|
||||
continue
|
||||
|
||||
party = parties[0]
|
||||
|
||||
# Check if already has CLIENT category
|
||||
has_client = False
|
||||
if party.categories:
|
||||
for cat in party.categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
has_client = True
|
||||
break
|
||||
|
||||
if has_client:
|
||||
print(f" ✓ Already has CLIENT category\n")
|
||||
already_client_count += 1
|
||||
continue
|
||||
|
||||
# Add CLIENT category using Proteus
|
||||
try:
|
||||
# Reload party and category in same context
|
||||
party_to_update = Party(party.id)
|
||||
client_cat = Category(client_category.id)
|
||||
|
||||
party_to_update.categories.append(client_cat)
|
||||
party_to_update.save()
|
||||
print(f" ✓ CLIENT category added\n")
|
||||
updated_count += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed: {e}\n")
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARATION SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Already have CLIENT category: {already_client_count}")
|
||||
print(f"CLIENT category added: {updated_count}")
|
||||
print(f"Not found: {not_found_count}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
return True
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}\n")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SALE IMPORT SCRIPT WITH MIGRATION MAPPING")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: Prepare all parties as clients first
|
||||
# Uncomment the following line to mark all parties in CSV as clients before importing
|
||||
# prepare_parties_as_clients(CSV_FILE_PATH)
|
||||
|
||||
# Import sales with migration mapping
|
||||
import_sales(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
# Show mapping statistics
|
||||
check_mapping_stats()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,356 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'SERVICES' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
|
||||
try:
|
||||
# Connect using XML-RPC with credentials in URL
|
||||
#connection_url = f'{SERVER_URL}/{DATABASE_NAME}/'
|
||||
#print(f'{USERNAME}:{PASSWORD}@{connection_url}')
|
||||
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
#config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search by getting all and comparing
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, try to get default 'Unit'
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using '{DEFAULT_UOM}'")
|
||||
default_uoms = Uom.find([('name', '=', DEFAULT_UOM)])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, categories, uom):
|
||||
"""Create a new service product using proteus"""
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.code = row['code']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price_method = 'fixed' # Services use fixed cost price
|
||||
template.default_uom = uom
|
||||
|
||||
# Link to categories (Many2Many relationship)
|
||||
# Use append() instead of direct assignment
|
||||
if isinstance(categories, list):
|
||||
template.categories.extend(categories) # Use extend for lists
|
||||
else:
|
||||
template.categories.append(categories) # Use append for single category
|
||||
|
||||
template.salable = False # Services are not salable products by default
|
||||
template.purchasable = True # Services are purchasable
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
# Save the template first
|
||||
template.save()
|
||||
|
||||
# Now update the product that was auto-created
|
||||
# When a template is created, Tryton automatically creates a default product
|
||||
if template.products:
|
||||
product = template.products[0]
|
||||
#product.code = row['code']
|
||||
product.suffix_code = row['code'] # Use suffix_code to set product code
|
||||
|
||||
# Set cost price on the product
|
||||
product.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
|
||||
product.save()
|
||||
return product
|
||||
else:
|
||||
raise ValueError("No product was created automatically with template")
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip() or '0.00'
|
||||
cost_price = row.get('cost_price', '0.00').strip() or '0.00'
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product")
|
||||
print(f" Product ID: {product.id}, Template ID: {product.template.id}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<30} {'Categories':<25} {'Sale Price':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:29] if product.template.name else 'N/A'
|
||||
|
||||
# Get categories (Many2Many relationship)
|
||||
if product.template.categories:
|
||||
categories = ', '.join([cat.name for cat in product.template.categories])
|
||||
categories = categories[:24]
|
||||
else:
|
||||
categories = 'N/A'
|
||||
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<30} {categories:<25} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
symbol = f"({uom.symbol})" if hasattr(uom, 'symbol') and uom.symbol else ""
|
||||
print(f" - {uom.name} {symbol}")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,310 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# Configuration
|
||||
DATABASE_NAME = 'tradon'
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'Services' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton database"""
|
||||
print(f"Connecting to Tryton database: {DATABASE_NAME}")
|
||||
try:
|
||||
#config.set_trytond(DATABASE_NAME)
|
||||
config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, return Unit (default)
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using 'Unit'")
|
||||
default_uoms = Uom.find([('name', '=', 'Unit')])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, category, uom):
|
||||
"""Create a new service product"""
|
||||
Product = Model.get('product.product')
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template first
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
template.default_uom = uom
|
||||
template.category = category
|
||||
template.salable = True
|
||||
template.purchasable = False # Services typically not purchased
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
template.save()
|
||||
|
||||
# Create product variant
|
||||
product = Product()
|
||||
product.template = template
|
||||
product.code = row['code']
|
||||
product.save()
|
||||
|
||||
return product
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip()
|
||||
cost_price = row.get('cost_price', '0.00').strip()
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product: {name}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<35} {'Category':<20} {'Sale Price':<12}")
|
||||
print("-" * 80)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:34] if product.template.name else 'N/A'
|
||||
category = product.template.category.name if product.template.category else 'N/A'
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<35} {category:<20} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
print(f" - {uom.name} (Symbol: {uom.symbol if hasattr(uom, 'symbol') else 'N/A'})")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT (using Proteus)")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,397 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from proteus import config, Model
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Customer_Stock_Locations.csv'
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database access)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
# Default values
|
||||
DEFAULT_TYPE = 'storage' # Default location type if not specified
|
||||
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_db_connection():
|
||||
"""Get PostgreSQL database connection"""
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
database=DATABASE_NAME,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
return conn
|
||||
except Exception as e:
|
||||
print(f"✗ Database connection failed: {e}")
|
||||
return None
|
||||
|
||||
def update_location_coordinates(location_id, latitude, longitude):
|
||||
"""Update location coordinates directly in PostgreSQL"""
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update coordinates - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Update lat and lon columns
|
||||
update_query = """
|
||||
UPDATE stock_location
|
||||
SET lat = %s, lon = %s
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, (latitude, longitude, location_id))
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating coordinates: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def check_location_exists_by_name(name):
|
||||
"""Check if location with given name already exists"""
|
||||
Location = Model.get('stock.location')
|
||||
locations = Location.find([('name', '=', name)])
|
||||
return locations[0] if locations else None
|
||||
|
||||
def validate_location_type(loc_type):
|
||||
"""Validate location type"""
|
||||
valid_types = [
|
||||
'supplier', 'customer', 'lost_found', 'warehouse',
|
||||
'storage', 'production', 'drop', 'rental', 'view'
|
||||
]
|
||||
|
||||
if not loc_type or loc_type.lower() not in valid_types:
|
||||
print(f" ⚠ Warning: Invalid type '{loc_type}', using default '{DEFAULT_TYPE}'")
|
||||
return DEFAULT_TYPE
|
||||
|
||||
return loc_type.lower()
|
||||
|
||||
def parse_coordinate(value, coord_name):
|
||||
"""Parse and validate coordinate value"""
|
||||
if not value or value == '':
|
||||
return None
|
||||
|
||||
# Handle 'NULL' or similar string values
|
||||
if isinstance(value, str) and value.strip().upper() in ['NULL', 'NONE', 'N/A', '']:
|
||||
return None
|
||||
|
||||
try:
|
||||
coord = float(value)
|
||||
|
||||
# Validate latitude range (-90 to 90)
|
||||
if coord_name == 'latitude':
|
||||
if coord < -90 or coord > 90:
|
||||
print(f" ⚠ Warning: Latitude {coord} out of range (-90 to 90)")
|
||||
return None
|
||||
|
||||
# Validate longitude range (-180 to 180)
|
||||
if coord_name == 'longitude':
|
||||
if coord < -180 or coord > 180:
|
||||
print(f" ⚠ Warning: Longitude {coord} out of range (-180 to 180)")
|
||||
return None
|
||||
|
||||
return coord
|
||||
except (ValueError, TypeError) as e:
|
||||
print(f" ⚠ Warning: Invalid {coord_name} value '{value}' - {e}")
|
||||
return None
|
||||
|
||||
def create_location(row):
|
||||
"""Create a new location using proteus"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Create location
|
||||
location = Location()
|
||||
location.name = row['name']
|
||||
location.type = row['type']
|
||||
|
||||
# Save the location first
|
||||
location.save()
|
||||
|
||||
# Get coordinates and save them
|
||||
latitude = row.get('latitude')
|
||||
longitude = row.get('longitude')
|
||||
|
||||
# Update coordinates directly in database if provided
|
||||
if latitude is not None or longitude is not None:
|
||||
success = update_location_coordinates(location.id, latitude, longitude)
|
||||
if not success:
|
||||
print(f" ⚠ Location created but coordinates not saved")
|
||||
|
||||
return location, latitude, longitude
|
||||
|
||||
def import_locations(csv_file):
|
||||
"""Import locations from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing locations from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values - get directly from CSV columns
|
||||
name = row.get('name', '').strip()
|
||||
loc_type = row.get('type', '').strip() or DEFAULT_TYPE
|
||||
lat_raw = row.get('lat', '').strip()
|
||||
lon_raw = row.get('lon', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
print(f" CSV Raw values - lat: '{lat_raw}', lon: '{lon_raw}'")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if location already exists in database
|
||||
existing_location = check_location_exists_by_name(name)
|
||||
|
||||
if existing_location:
|
||||
print(f" ⚠ Location '{name}' already exists (ID: {existing_location.id})")
|
||||
print(f" Type: {existing_location.type}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Validate location type
|
||||
loc_type = validate_location_type(loc_type)
|
||||
|
||||
# Parse coordinates
|
||||
latitude = parse_coordinate(lat_raw, 'latitude')
|
||||
longitude = parse_coordinate(lon_raw, 'longitude')
|
||||
|
||||
print(f" Parsed values - lat: {latitude}, lon: {longitude}")
|
||||
|
||||
# Create the location with parsed data
|
||||
location_data = {
|
||||
'name': name,
|
||||
'type': loc_type,
|
||||
'latitude': latitude,
|
||||
'longitude': longitude
|
||||
}
|
||||
|
||||
location, saved_lat, saved_lon = create_location(location_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created location")
|
||||
print(f" Location ID: {location.id}")
|
||||
print(f" Name: {name}")
|
||||
print(f" Type: {loc_type}")
|
||||
if saved_lat is not None:
|
||||
print(f" Latitude: {saved_lat}")
|
||||
if saved_lon is not None:
|
||||
print(f" Longitude: {saved_lon}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name if 'name' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} locations")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} locations")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported locations with coordinates from database"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Stock Locations")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get database connection to read coordinates
|
||||
conn = get_db_connection()
|
||||
|
||||
if not conn:
|
||||
print("Cannot verify - database connection failed")
|
||||
return
|
||||
|
||||
# Find all locations (or limit to recently created ones)
|
||||
locations = Location.find([], order=[('id', 'DESC')])
|
||||
|
||||
if locations:
|
||||
print(f"Found {len(locations)} locations (showing last 20):\n")
|
||||
print(f"{'ID':<8} {'Name':<35} {'Type':<12} {'Lat':<12} {'Lon':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for location in locations[:20]: # Show last 20 created
|
||||
loc_id = location.id
|
||||
name = location.name[:34] if location.name else 'N/A'
|
||||
loc_type = location.type if location.type else 'N/A'
|
||||
|
||||
# Get coordinates from database
|
||||
lat = 'N/A'
|
||||
lon = 'N/A'
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT lat, lon FROM stock_location WHERE id = %s",
|
||||
(loc_id,)
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
lat = f"{result[0]:.6f}" if result[0] is not None else 'N/A'
|
||||
lon = f"{result[1]:.6f}" if result[1] is not None else 'N/A'
|
||||
cursor.close()
|
||||
except Exception as e:
|
||||
print(f"Error reading coordinates for location {loc_id}: {e}")
|
||||
|
||||
print(f"{loc_id:<8} {name:<35} {loc_type:<12} {lat:<12} {lon:<12}")
|
||||
|
||||
conn.close()
|
||||
else:
|
||||
print("No locations found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON STOCK LOCATION IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Using Direct PostgreSQL for lat/lon coordinates")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Test database connection
|
||||
print("Testing PostgreSQL connection...")
|
||||
conn = get_db_connection()
|
||||
if conn:
|
||||
print("✓ PostgreSQL connection successful")
|
||||
|
||||
# Test if lat/lon columns exist
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'stock_location'
|
||||
AND column_name IN ('lat', 'lon')
|
||||
""")
|
||||
columns = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
if columns:
|
||||
print("✓ Found lat/lon columns in stock_location table:")
|
||||
for col in columns:
|
||||
print(f" - {col[0]}: {col[1]}")
|
||||
else:
|
||||
print("✗ WARNING: lat/lon columns NOT found in stock_location table!")
|
||||
print(" Coordinates will not be saved!")
|
||||
except Exception as e:
|
||||
print(f" Could not verify columns: {e}")
|
||||
|
||||
conn.close()
|
||||
print()
|
||||
else:
|
||||
print("✗ PostgreSQL connection failed")
|
||||
print("Coordinates will not be saved!\n")
|
||||
return 1
|
||||
|
||||
# Import locations
|
||||
import_locations(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,165 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
|
||||
# Database connection parameters
|
||||
DB_CONFIG = {
|
||||
'host': '72.61.163.139',
|
||||
'port': 5433,
|
||||
'database': 'tradon',
|
||||
'user': 'postgres',
|
||||
'password': 'dsproject'
|
||||
}
|
||||
|
||||
# CSV file path
|
||||
CSV_FILE = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Vessels.csv'
|
||||
|
||||
def import_vessels():
|
||||
"""Import vessel data from CSV into trade_vessel table"""
|
||||
|
||||
print("=" * 60)
|
||||
print("VESSEL IMPORT PROCESS STARTED")
|
||||
print("=" * 60)
|
||||
|
||||
# Initialize connection and cursor objects
|
||||
conn = None
|
||||
cursor = None
|
||||
|
||||
try:
|
||||
# Connect to PostgreSQL database
|
||||
print(f"\n[1/4] Connecting to database...")
|
||||
print(f" Host: {DB_CONFIG['host']}:{DB_CONFIG['port']}")
|
||||
print(f" Database: {DB_CONFIG['database']}")
|
||||
conn = psycopg2.connect(**DB_CONFIG)
|
||||
cursor = conn.cursor()
|
||||
print(" ✓ Database connection established")
|
||||
|
||||
# Read CSV file with UTF-8-BOM encoding to handle Excel-generated CSVs
|
||||
print(f"\n[2/4] Reading CSV file...")
|
||||
print(f" File: {CSV_FILE}")
|
||||
with open(CSV_FILE, 'r', encoding='utf-8-sig') as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
|
||||
# Initialize counters for tracking import results
|
||||
insert_count = 0
|
||||
skip_count = 0
|
||||
|
||||
print(" ✓ CSV file opened successfully")
|
||||
print(f"\n[3/4] Processing vessel records...")
|
||||
print("-" * 60)
|
||||
|
||||
# Process each row from CSV file
|
||||
for row_num, row in enumerate(csv_reader, start=1):
|
||||
# Extract and clean vessel data from CSV row
|
||||
vessel_name = row['vessel_name'].strip()
|
||||
# Convert empty strings to None for vessel_year
|
||||
vessel_year = row['vessel_year'].strip() if row['vessel_year'].strip() else None
|
||||
# Convert empty strings and 'NULL' text to None for vessel_imo
|
||||
vessel_imo = row['vessel_imo'].strip() if row['vessel_imo'].strip() and row['vessel_imo'].upper() != 'NULL' else None
|
||||
|
||||
print(f"\nRow {row_num}: Processing '{vessel_name}'")
|
||||
print(f" Year: {vessel_year if vessel_year else 'N/A'}")
|
||||
print(f" IMO: {vessel_imo if vessel_imo else 'N/A'}")
|
||||
|
||||
# Check if vessel already exists in database to avoid duplicates
|
||||
cursor.execute("""
|
||||
SELECT id FROM trade_vessel
|
||||
WHERE vessel_name = %s AND vessel_imo = %s
|
||||
""", (vessel_name, vessel_imo))
|
||||
|
||||
existing = cursor.fetchone()
|
||||
|
||||
# Skip insertion if vessel already exists
|
||||
if existing:
|
||||
print(f" ⚠ SKIPPED - Duplicate found (ID: {existing[0]})")
|
||||
skip_count += 1
|
||||
continue
|
||||
|
||||
# Insert new vessel record into trade_vessel table
|
||||
cursor.execute("""
|
||||
INSERT INTO trade_vessel
|
||||
(vessel_name, vessel_year, vessel_imo, active, create_date, create_uid, write_date, write_uid)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
""", (
|
||||
vessel_name, # Vessel name from CSV
|
||||
vessel_year, # Year vessel was built
|
||||
vessel_imo, # IMO number (international maritime identifier)
|
||||
True, # Set active flag to True
|
||||
datetime.now(), # Record creation timestamp
|
||||
1, # User ID who created the record
|
||||
datetime.now(), # Record last modification timestamp
|
||||
1 # User ID who last modified the record
|
||||
))
|
||||
|
||||
# Get the ID of the newly inserted record
|
||||
new_id = cursor.fetchone()[0]
|
||||
|
||||
# Increment insert counter and log success
|
||||
insert_count += 1
|
||||
print(f" ✓ INSERTED successfully (New ID: {new_id})")
|
||||
|
||||
print("-" * 60)
|
||||
|
||||
# Commit all inserts to database
|
||||
print(f"\n[4/4] Committing transaction to database...")
|
||||
conn.commit()
|
||||
print(" ✓ Transaction committed successfully")
|
||||
|
||||
# Display import summary statistics
|
||||
print("\n" + "=" * 60)
|
||||
print("IMPORT SUMMARY")
|
||||
print("=" * 60)
|
||||
print(f"✓ Records inserted: {insert_count}")
|
||||
print(f"⚠ Records skipped: {skip_count}")
|
||||
print(f" Total processed: {insert_count + skip_count}")
|
||||
print("=" * 60)
|
||||
|
||||
except psycopg2.Error as e:
|
||||
# Rollback transaction if database error occurs
|
||||
print("\n" + "!" * 60)
|
||||
print("DATABASE ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
except FileNotFoundError:
|
||||
# Handle case where CSV file doesn't exist
|
||||
print("\n" + "!" * 60)
|
||||
print("FILE NOT FOUND ERROR")
|
||||
print("!" * 60)
|
||||
print(f"CSV file not found: {CSV_FILE}")
|
||||
print("Please check the file path and try again.")
|
||||
print("!" * 60)
|
||||
|
||||
except Exception as e:
|
||||
# Catch any other unexpected errors and rollback
|
||||
print("\n" + "!" * 60)
|
||||
print("UNEXPECTED ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
finally:
|
||||
# Clean up database resources
|
||||
print(f"\n[CLEANUP] Closing database connection...")
|
||||
if cursor:
|
||||
cursor.close()
|
||||
print(" ✓ Cursor closed")
|
||||
if conn:
|
||||
conn.close()
|
||||
print(" ✓ Connection closed")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("VESSEL IMPORT PROCESS COMPLETED")
|
||||
print("=" * 60 + "\n")
|
||||
|
||||
# Execute import when script is run directly
|
||||
if __name__ == "__main__":
|
||||
import_vessels()
|
||||
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Diagnostic Script - Discover Analytic Dimension Assignment Fields
|
||||
|
||||
This script will show you the actual field names in your Tryton instance.
|
||||
Run this to see what fields are available.
|
||||
"""
|
||||
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
def discover_assignment_fields():
|
||||
"""Discover the actual fields in analytic_dimension_assignment model"""
|
||||
|
||||
print("="*70)
|
||||
print("DISCOVERING ANALYTIC DIMENSION ASSIGNMENT FIELDS")
|
||||
print("="*70)
|
||||
|
||||
try:
|
||||
# Get the model
|
||||
AnalyticDimensionAssignment = Model.get('analytic.dimension.assignment')
|
||||
|
||||
# Create a new instance (don't save it)
|
||||
test_assignment = AnalyticDimensionAssignment()
|
||||
|
||||
# Get all attributes
|
||||
print("\nAvailable fields on analytic.dimension.assignment:\n")
|
||||
|
||||
# Method 1: Try _fields if available (Tryton models expose this)
|
||||
if hasattr(test_assignment, '_fields'):
|
||||
for field_name in sorted(test_assignment._fields.keys()):
|
||||
field = test_assignment._fields[field_name]
|
||||
print(f" - {field_name}: {type(field).__name__}")
|
||||
else:
|
||||
# Method 2: Try __dict__ or dir()
|
||||
print("Fields found via dir():")
|
||||
for attr in sorted(dir(test_assignment)):
|
||||
if not attr.startswith('_') and not callable(getattr(test_assignment, attr, None)):
|
||||
print(f" - {attr}")
|
||||
|
||||
print("\n" + "="*70)
|
||||
print("CHECKING EXISTING ASSIGNMENTS (if any)")
|
||||
print("="*70)
|
||||
|
||||
# Try to find an existing assignment to see its structure
|
||||
existing = AnalyticDimensionAssignment.find([], limit=1)
|
||||
if existing:
|
||||
print(f"\nFound existing assignment (ID: {existing[0].id})")
|
||||
print("\nAttributes of existing assignment:")
|
||||
for attr in dir(existing[0]):
|
||||
if not attr.startswith('_') and not callable(getattr(existing[0], attr, None)):
|
||||
try:
|
||||
value = getattr(existing[0], attr)
|
||||
print(f" - {attr}: {type(value).__name__} = {value}")
|
||||
except:
|
||||
print(f" - {attr}: <unable to read>")
|
||||
else:
|
||||
print("\nNo existing assignments found in database")
|
||||
|
||||
except Exception as e:
|
||||
print(f"\nError: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Make sure you're connected to Tryton first
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
print(f"Connected to Tryton database '{DATABASE_NAME}' successfully!")
|
||||
|
||||
discover_assignment_fields()
|
||||
@@ -0,0 +1,239 @@
|
||||
from proteus import config, Model
|
||||
import psycopg2
|
||||
|
||||
# XML-RPC Configuration (default connection method)
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database inspection)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
print("="*80)
|
||||
print("CUSTOM FIELDS IDENTIFICATION FOR purchase.purchase")
|
||||
print("="*80)
|
||||
|
||||
# Connect to Tryton via XML-RPC
|
||||
print(f"\nConnecting via XML-RPC to {SERVER_URL}...")
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully\n")
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Get all fields that Proteus sees
|
||||
proteus_fields = sorted([key for key in dir(Purchase)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find',
|
||||
'copy', 'read', 'write', 'search']])
|
||||
|
||||
print(f"1. FIELDS VISIBLE TO PROTEUS: {len(proteus_fields)} fields")
|
||||
print("-"*80)
|
||||
|
||||
# Standard Tryton purchase.purchase fields (from base module)
|
||||
standard_purchase_fields = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'lines', 'invoices', 'invoices_ignored', 'invoices_recreated',
|
||||
'invoice_lines', 'invoice_lines_ignored', 'moves',
|
||||
'shipment_state', 'invoice_state', 'number', 'reference',
|
||||
'shipments', 'shipment_returns', 'rec_name', 'origin',
|
||||
'untaxed_amount', 'tax_amount', 'total_amount',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache',
|
||||
'delivery_date', 'party_lang', 'contact', 'xml_id'
|
||||
}
|
||||
|
||||
# Identify potential custom fields
|
||||
potential_custom_fields = [f for f in proteus_fields if f not in standard_purchase_fields]
|
||||
|
||||
print(f"\n2. POTENTIAL CUSTOM FIELDS: {len(potential_custom_fields)} fields")
|
||||
print("-"*80)
|
||||
for field in potential_custom_fields:
|
||||
print(f" - {field}")
|
||||
|
||||
# Connect to PostgreSQL to get actual table columns
|
||||
print(f"\n3. COLUMNS IN POSTGRESQL TABLE 'purchase_purchase'")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
dbname=DATABASE_NAME,
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get all columns from purchase_purchase table
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
character_maximum_length,
|
||||
is_nullable,
|
||||
column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'purchase_purchase'
|
||||
ORDER BY ordinal_position;
|
||||
""")
|
||||
|
||||
db_columns = cursor.fetchall()
|
||||
|
||||
print(f"Total columns in database: {len(db_columns)}\n")
|
||||
|
||||
# Standard columns that typically exist in purchase_purchase
|
||||
standard_db_columns = {
|
||||
'id', 'create_date', 'create_uid', 'write_date', 'write_uid',
|
||||
'company', 'party', 'invoice_party', 'invoice_address',
|
||||
'payment_term', 'warehouse', 'currency', 'description',
|
||||
'comment', 'state', 'purchase_date', 'invoice_method',
|
||||
'number', 'reference', 'delivery_date', 'contact',
|
||||
'shipment_state', 'invoice_state', 'origin',
|
||||
'untaxed_amount_cache', 'tax_amount_cache', 'total_amount_cache'
|
||||
}
|
||||
|
||||
db_column_names = [col[0] for col in db_columns]
|
||||
custom_db_columns = [col for col in db_columns if col[0] not in standard_db_columns]
|
||||
|
||||
print("Custom columns in database:")
|
||||
for col in custom_db_columns:
|
||||
col_name, data_type, max_length, nullable, default = col
|
||||
length_info = f"({max_length})" if max_length else ""
|
||||
print(f" - {col_name:<30} {data_type}{length_info:<15} NULL: {nullable}")
|
||||
|
||||
# Compare: Fields in Proteus vs Columns in DB
|
||||
print(f"\n4. COMPARISON: PROTEUS vs DATABASE")
|
||||
print("-"*80)
|
||||
|
||||
# Fields in Proteus but NOT as direct columns in DB (might be related fields, functions, etc.)
|
||||
proteus_only = set(potential_custom_fields) - set(db_column_names)
|
||||
if proteus_only:
|
||||
print(f"\nFields in Proteus but NOT as columns in DB ({len(proteus_only)}):")
|
||||
print("(These might be Many2One, One2Many, Function fields, etc.)")
|
||||
for field in sorted(proteus_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Columns in DB but NOT visible in Proteus (these are the problem!)
|
||||
db_only = set([col[0] for col in custom_db_columns]) - set(proteus_fields)
|
||||
if db_only:
|
||||
print(f"\n⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS ({len(db_only)}):")
|
||||
print("(These fields MUST be added to the Python model!)")
|
||||
for field in sorted(db_only):
|
||||
print(f" - {field}")
|
||||
|
||||
# Fields that exist in BOTH Proteus and DB
|
||||
both = set(potential_custom_fields) & set([col[0] for col in custom_db_columns])
|
||||
if both:
|
||||
print(f"\n✓ Custom fields properly defined in BOTH Proteus and DB ({len(both)}):")
|
||||
for field in sorted(both):
|
||||
print(f" - {field}")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error connecting to PostgreSQL: {e}")
|
||||
|
||||
# Test persistence of custom fields
|
||||
print(f"\n5. TESTING FIELD PERSISTENCE")
|
||||
print("-"*80)
|
||||
|
||||
try:
|
||||
# Find a draft purchase to test
|
||||
drafts = Purchase.find([('state', '=', 'draft')], limit=1)
|
||||
|
||||
if drafts:
|
||||
test_purchase = drafts[0]
|
||||
test_id = test_purchase.id
|
||||
|
||||
print(f"Testing with purchase ID: {test_id}")
|
||||
print("\nTesting custom fields (attempting to set and save):\n")
|
||||
|
||||
# Test a sample of custom fields
|
||||
test_fields = {}
|
||||
|
||||
# Add fields to test if they exist
|
||||
if 'reference' in potential_custom_fields:
|
||||
test_fields['reference'] = 'TEST_REF'
|
||||
if 'crop' in potential_custom_fields:
|
||||
test_fields['crop'] = 'TEST_CROP'
|
||||
if 'forex' in potential_custom_fields:
|
||||
test_fields['forex'] = 'TEST_FOREX'
|
||||
if 'broker' in potential_custom_fields:
|
||||
test_fields['broker'] = 'TEST_BROKER'
|
||||
if 'certif' in potential_custom_fields:
|
||||
test_fields['certif'] = 'TEST_CERT'
|
||||
if 'wb' in potential_custom_fields:
|
||||
test_fields['wb'] = 'TEST_WB'
|
||||
|
||||
for field_name, test_value in test_fields.items():
|
||||
try:
|
||||
original_value = getattr(test_purchase, field_name, None)
|
||||
setattr(test_purchase, field_name, test_value)
|
||||
test_purchase.save()
|
||||
|
||||
# Reload
|
||||
reloaded = Purchase(test_id)
|
||||
new_value = getattr(reloaded, field_name, None)
|
||||
|
||||
if new_value == test_value:
|
||||
print(f" ✓ {field_name}: PERSISTS correctly")
|
||||
# Restore original value
|
||||
setattr(reloaded, field_name, original_value)
|
||||
reloaded.save()
|
||||
else:
|
||||
print(f" ✗ {field_name}: Does NOT persist (expected: '{test_value}', got: '{new_value}')")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ {field_name}: Error - {str(e)[:60]}")
|
||||
else:
|
||||
print("No draft purchases found for testing")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during persistence testing: {e}")
|
||||
|
||||
print("\n" + "="*80)
|
||||
print("SUMMARY & RECOMMENDATIONS")
|
||||
print("="*80)
|
||||
print("""
|
||||
Next steps for your colleague:
|
||||
|
||||
1. Review the "⚠️ COLUMNS IN DATABASE BUT NOT VISIBLE IN PROTEUS" section
|
||||
→ These fields exist in PostgreSQL but are missing from the Python model
|
||||
|
||||
2. Review fields that "Does NOT persist" in the testing section
|
||||
→ These fields are visible but not working correctly
|
||||
|
||||
3. Add missing fields to your custom Tryton module:
|
||||
|
||||
File: modules/your_custom_module/purchase.py
|
||||
|
||||
from trytond.pool import PoolMeta
|
||||
from trytond.model import fields
|
||||
|
||||
class Purchase(metaclass=PoolMeta):
|
||||
__name__ = 'purchase.purchase'
|
||||
|
||||
# Add each missing field with appropriate type:
|
||||
custom_field = fields.Char('Custom Field')
|
||||
custom_number = fields.Integer('Custom Number')
|
||||
custom_date = fields.Date('Custom Date')
|
||||
custom_many2one = fields.Many2One('other.model', 'Reference')
|
||||
# etc...
|
||||
|
||||
4. Increment module version in tryton.cfg
|
||||
|
||||
5. Update module: trytond-admin -d tradon -u your_custom_module
|
||||
|
||||
6. Restart Tryton server
|
||||
|
||||
7. Re-run this script to verify all fields work correctly
|
||||
""")
|
||||
@@ -0,0 +1,46 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# Connect via XML-RPC
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
print(f"Connected to Tryton database '{DATABASE_NAME}' successfully!")
|
||||
|
||||
# Get the model using Model.get()
|
||||
Purchase = Model.get('purchase.purchase') # Using a common model to inspect fields, can be changed to 'purchase.purchase' if available
|
||||
|
||||
try:
|
||||
# Try to get any existing record or create new (without saving)
|
||||
purchases = Purchase.find([], limit=1)
|
||||
if purchases:
|
||||
sample = purchases[0]
|
||||
else:
|
||||
sample = Purchase()
|
||||
|
||||
# Get field names from the instance
|
||||
field_names = sorted([key for key in dir(sample)
|
||||
if not key.startswith('_')
|
||||
and key not in ['create', 'delete', 'save', 'find']])
|
||||
|
||||
print(f"\nTotal fields in analytic.dimension: {len(field_names)}")
|
||||
print("\nField list:")
|
||||
for field in field_names:
|
||||
print(f"{field}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Could not inspect fields via instance: {e}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Connection or operation failed: {e}")
|
||||
print("\nPlease verify:")
|
||||
print(f" - Tryton server is running on {SERVER_URL}")
|
||||
print(f" - Database '{DATABASE_NAME}' exists")
|
||||
print(f" - Username and password are correct")
|
||||
@@ -0,0 +1,35 @@
|
||||
from proteus import config, Model
|
||||
|
||||
# Connect
|
||||
config.set_xmlrpc(f'https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
# Test: Set number on draft purchase
|
||||
print("=== Testing Number Field Persistence ===")
|
||||
draft = Purchase(682) # The ID from your previous test
|
||||
|
||||
print(f"Before: number = {draft.number}, state = {draft.state}")
|
||||
|
||||
# Set number
|
||||
draft.number = "MANUAL_TEST_001"
|
||||
draft.save()
|
||||
print(f"After save: number = {draft.number}")
|
||||
|
||||
# Reload by fetching again from database
|
||||
draft_reloaded = Purchase(682)
|
||||
print(f"After reload: number = {draft_reloaded.number}")
|
||||
|
||||
if draft_reloaded.number == "MANUAL_TEST_001":
|
||||
print("✓ SUCCESS: Number WAS persisted via Proteus!")
|
||||
else:
|
||||
print(f"✗ FAILED: Number NOT persisted. Got: {draft_reloaded.number}")
|
||||
print("\nThis means the 'number' field is likely:")
|
||||
print(" 1. Read-only (controlled by Tryton workflow)")
|
||||
print(" 2. Auto-generated by a sequence")
|
||||
print(" 3. Overwritten by server-side logic")
|
||||
|
||||
# Now verify in PostgreSQL
|
||||
print("\n=== Verify in PostgreSQL ===")
|
||||
print("Run this SQL query to confirm:")
|
||||
print("SELECT id, number, state FROM purchase_purchase WHERE id = 682;")
|
||||
@@ -0,0 +1,44 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2776)
|
||||
|
||||
# Création de la commande d'achat
|
||||
purchase = Purchase()
|
||||
purchase.company = company
|
||||
purchase.party = party
|
||||
purchase.currency = company.currency
|
||||
purchase.tol_min = Decimal(1)
|
||||
purchase.wb = Wb(1)
|
||||
# Ligne d'achat
|
||||
product = Product(12) # id du produit
|
||||
line = purchase.lines.new()
|
||||
line.product = product
|
||||
line.quantity = 10
|
||||
line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
purchase.save()
|
||||
|
||||
print(f"Purchase créée : {purchase.id}")
|
||||
@@ -0,0 +1,45 @@
|
||||
from proteus import config, Model
|
||||
from decimal import getcontext, Decimal, ROUND_HALF_UP
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
|
||||
config = config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
Company = Model.get('company.company')
|
||||
Party = Model.get('party.party')
|
||||
Currency = Model.get('currency.currency')
|
||||
sale = Model.get('sale.sale')
|
||||
Product = Model.get('product.product')
|
||||
Wb = Model.get('purchase.weight.basis')
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Récupération des records
|
||||
company = Company(6)
|
||||
party = Party(2789)
|
||||
fromLocation = Location(1247)
|
||||
|
||||
# Création de la commande de vente
|
||||
sale = sale()
|
||||
sale.company = company
|
||||
sale.party = party
|
||||
sale.currency = company.currency
|
||||
sale.tol_min = Decimal(1)
|
||||
sale.wb = Wb(1)
|
||||
sale.from_location = fromLocation
|
||||
# Ligne d'achat
|
||||
#product = Product(12) # id du produit
|
||||
# line = sale.lines.new()
|
||||
# line.product = product
|
||||
# line.quantity = 10
|
||||
# line.unit_price = product.cost_price
|
||||
|
||||
# Sauvegarde
|
||||
sale.save()
|
||||
|
||||
print(f"sale créée : {sale.id}")
|
||||
11
Reference Data/python_project/scripts/debug_config.py
Normal file
11
Reference Data/python_project/scripts/debug_config.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
# Debug: Print what's available in config
|
||||
import helpers.config as cfg
|
||||
print("Available in config:", dir(cfg))
|
||||
print("PURCHASE_FEES_CSV value:", getattr(cfg, 'PURCHASE_FEES_CSV', 'NOT FOUND'))
|
||||
398
Reference Data/python_project/scripts/import_parties.py
Normal file
398
Reference Data/python_project/scripts/import_parties.py
Normal file
@@ -0,0 +1,398 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Parties.csv'
|
||||
|
||||
# Default values
|
||||
DEFAULT_COUNTRY = 'US' # Default country code if not specified
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_country(country_code):
|
||||
"""Find country by code"""
|
||||
Country = Model.get('country.country')
|
||||
|
||||
if not country_code:
|
||||
country_code = DEFAULT_COUNTRY
|
||||
|
||||
countries = Country.find([('code', '=', country_code.upper())])
|
||||
|
||||
if countries:
|
||||
return countries[0]
|
||||
else:
|
||||
print(f" ⚠ Warning: Country '{country_code}' not found, using '{DEFAULT_COUNTRY}'")
|
||||
default_countries = Country.find([('code', '=', DEFAULT_COUNTRY)])
|
||||
if default_countries:
|
||||
return default_countries[0]
|
||||
|
||||
# Get first available country as last resort
|
||||
all_countries = Country.find([])
|
||||
if all_countries:
|
||||
print(f" ⚠ Using first available country: {all_countries[0].name}")
|
||||
return all_countries[0]
|
||||
|
||||
raise ValueError("No countries found in database!")
|
||||
|
||||
def get_subdivision(country, subdivision_code):
|
||||
"""Find country subdivision (state/province) by code"""
|
||||
if not subdivision_code:
|
||||
return None
|
||||
|
||||
Subdivision = Model.get('country.subdivision')
|
||||
|
||||
# Search for subdivision with matching code and country
|
||||
subdivisions = Subdivision.find([
|
||||
('code', '=', f"{country.code}-{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
# Try without country prefix
|
||||
subdivisions = Subdivision.find([
|
||||
('code', 'ilike', f"%{subdivision_code}"),
|
||||
('country', '=', country.id)
|
||||
])
|
||||
|
||||
if subdivisions:
|
||||
return subdivisions[0]
|
||||
|
||||
print(f" ⚠ Warning: Subdivision '{subdivision_code}' not found for country {country.code}")
|
||||
return None
|
||||
|
||||
def check_party_exists_by_name(name):
|
||||
"""Check if party with given name already exists"""
|
||||
Party = Model.get('party.party')
|
||||
parties = Party.find([('name', '=', name)])
|
||||
return parties[0] if parties else None
|
||||
|
||||
|
||||
|
||||
def create_party_with_addresses(row):
|
||||
"""Create a new party with address(es) using proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Address = Model.get('party.address')
|
||||
|
||||
# Create party - let Tryton auto-generate the code
|
||||
party = Party()
|
||||
party.name = row['name']
|
||||
|
||||
if row.get('tax_identifier'):
|
||||
party.tax_identifier = row['tax_identifier']
|
||||
|
||||
if row.get('vat_code'):
|
||||
party.vat_code = row['vat_code']
|
||||
|
||||
# Save the party FIRST (without addresses)
|
||||
party.save()
|
||||
|
||||
# Check if we have meaningful address data
|
||||
# Require at least street OR city to be present (not empty)
|
||||
has_street = bool(row.get('street'))
|
||||
has_city = bool(row.get('city'))
|
||||
has_postal_code = bool(row.get('postal_code'))
|
||||
has_country = bool(row.get('country_code'))
|
||||
|
||||
# Create address only if we have at least street OR city
|
||||
if has_street or has_city:
|
||||
address = Address()
|
||||
|
||||
# Link to the party we just created
|
||||
address.party = party
|
||||
|
||||
if row.get('address_name'):
|
||||
address.name = row['address_name']
|
||||
|
||||
if has_street:
|
||||
address.street = row['street']
|
||||
|
||||
if has_city:
|
||||
address.city = row['city']
|
||||
|
||||
# Use postal_code instead of zip
|
||||
if has_postal_code:
|
||||
address.postal_code = row['postal_code']
|
||||
|
||||
# Get country
|
||||
if has_country:
|
||||
country_code = row['country_code']
|
||||
country = get_country(country_code)
|
||||
else:
|
||||
country = get_country(DEFAULT_COUNTRY)
|
||||
|
||||
address.country = country
|
||||
|
||||
# Get subdivision (state/province) if provided
|
||||
if row.get('subdivision_code'):
|
||||
subdivision = get_subdivision(country, row['subdivision_code'])
|
||||
if subdivision:
|
||||
address.subdivision = subdivision
|
||||
|
||||
# Save the address separately
|
||||
address.save()
|
||||
|
||||
# Clean up any empty addresses that might have been auto-created
|
||||
# Reload party to get fresh data
|
||||
party = Party(party.id)
|
||||
|
||||
# Find and delete empty addresses
|
||||
addresses_to_delete = []
|
||||
for addr in party.addresses:
|
||||
# Consider an address empty if it has no street, city, or postal_code
|
||||
is_empty = (
|
||||
(not addr.street or not addr.street.strip()) and
|
||||
(not addr.city or not addr.city.strip()) and
|
||||
(not addr.postal_code or not addr.postal_code.strip())
|
||||
)
|
||||
if is_empty:
|
||||
addresses_to_delete.append(addr)
|
||||
|
||||
# Delete empty addresses
|
||||
if addresses_to_delete:
|
||||
Address.delete(addresses_to_delete)
|
||||
print(f" ℹ Cleaned up {len(addresses_to_delete)} empty address(es)")
|
||||
|
||||
# Reload party one more time to return clean data
|
||||
party = Party(party.id)
|
||||
|
||||
return party
|
||||
|
||||
|
||||
def import_parties(csv_file):
|
||||
"""Import parties from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing parties from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
name = row.get('name', '').strip()
|
||||
tax_identifier = row.get('tax_identifier', '').strip()
|
||||
vat_code = row.get('vat_code', '').strip()
|
||||
|
||||
# Address fields
|
||||
address_name = row.get('address_name', '').strip()
|
||||
street = row.get('street', '').strip()
|
||||
city = row.get('city', '').strip()
|
||||
|
||||
# Handle both 'zip' and 'postal_code' column names
|
||||
postal_code = row.get('postal_code', '').strip() or row.get('zip', '').strip()
|
||||
|
||||
country_code = row.get('country_code', '').strip()
|
||||
subdivision_code = row.get('subdivision_code', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
# Skip if postal_code is 'NULL' or '0'
|
||||
if postal_code and postal_code.upper() in ['NULL', '0']:
|
||||
postal_code = ''
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if party already exists in database
|
||||
existing_party = check_party_exists_by_name(name)
|
||||
|
||||
if existing_party:
|
||||
print(f" ⚠ Party '{name}' already exists with code: {existing_party.code}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Create the party with address
|
||||
row_data = {
|
||||
'name': name,
|
||||
'tax_identifier': tax_identifier,
|
||||
'vat_code': vat_code,
|
||||
'address_name': address_name,
|
||||
'street': street,
|
||||
'city': city,
|
||||
'postal_code': postal_code,
|
||||
'country_code': country_code,
|
||||
'subdivision_code': subdivision_code
|
||||
}
|
||||
|
||||
party = create_party_with_addresses(row_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created party")
|
||||
print(f" Party ID: {party.id}")
|
||||
print(f" Auto-generated Code: {party.code}")
|
||||
print(f" Name: {name}")
|
||||
if tax_identifier:
|
||||
print(f" Tax Identifier: {tax_identifier}")
|
||||
if vat_code:
|
||||
print(f" VAT Code: {vat_code}")
|
||||
if party.addresses:
|
||||
print(f" Addresses: {len(party.addresses)}")
|
||||
for addr in party.addresses:
|
||||
addr_street = (addr.street[:50] + '...') if addr.street and len(addr.street) > 50 else (addr.street or 'N/A')
|
||||
addr_city = addr.city if addr.city else 'N/A'
|
||||
addr_postal = addr.postal_code if addr.postal_code else 'N/A'
|
||||
print(f" - {addr_street}")
|
||||
print(f" {addr_city}, {addr_postal}")
|
||||
else:
|
||||
print(f" Addresses: 0 (no address data provided)")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} parties")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} parties")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported parties"""
|
||||
Party = Model.get('party.party')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Parties")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all parties (or limit to recently created ones)
|
||||
parties = Party.find([], order=[('id', 'DESC')])
|
||||
|
||||
if parties:
|
||||
print(f"Found {len(parties)} parties (showing last 20):\n")
|
||||
print(f"{'Code':<15} {'Name':<40} {'Addresses':<10}")
|
||||
print("-" * 70)
|
||||
|
||||
for party in parties[:20]: # Show last 20 created
|
||||
code = party.code or 'N/A'
|
||||
name = party.name[:39] if party.name else 'N/A'
|
||||
addr_count = len(party.addresses) if party.addresses else 0
|
||||
|
||||
print(f"{code:<15} {name:<40} {addr_count:<10}")
|
||||
else:
|
||||
print("No parties found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_countries():
|
||||
"""List all available countries"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE COUNTRIES (first 20)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Country = Model.get('country.country')
|
||||
countries = Country.find([])
|
||||
|
||||
if countries:
|
||||
print(f"Found {len(countries)} countries:\n")
|
||||
for country in countries[:20]: # Show first 20
|
||||
print(f" - {country.code}: {country.name}")
|
||||
if len(countries) > 20:
|
||||
print(f" ... and {len(countries) - 20} more")
|
||||
else:
|
||||
print("No countries found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PARTY IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Party codes will be auto-generated by Tryton")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available countries
|
||||
# Uncomment if you want to see what's available in your database
|
||||
# list_available_countries()
|
||||
|
||||
# Import parties
|
||||
import_parties(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,879 @@
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import psycopg2
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import Model
|
||||
|
||||
from helpers.config import (
|
||||
PURCHASE_CONTRACTS_CSV,
|
||||
connect_to_tryton,
|
||||
get_db_connection,
|
||||
DB_CONFIG # Add this to your config
|
||||
)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
|
||||
parse_decimal,
|
||||
parse_date,
|
||||
ensure_party_is_supplier,
|
||||
find_party_by_name,
|
||||
find_uom_by_code,
|
||||
find_currency_by_code,
|
||||
find_warehouse,
|
||||
find_location,
|
||||
find_payment_term_by_name,
|
||||
find_product_by_code,
|
||||
find_incoterm_by_code,
|
||||
find_weight_basis_by_name,
|
||||
get_party_invoice_address,
|
||||
find_purchase_contract_by_number,
|
||||
find_or_create_analytic_dimension_value,
|
||||
link_analytic_dimensions_to_purchase
|
||||
)
|
||||
|
||||
# Import migration mapping helper
|
||||
from helpers.migration_mapping import MigrationMapper
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = PURCHASE_CONTRACTS_CSV
|
||||
|
||||
|
||||
# Default values
|
||||
DEFAULT_STATE = 'draft'
|
||||
DEFAULT_INVOICE_METHOD = 'manual'
|
||||
DEFAULT_INVOICE_STATE = 'none'
|
||||
DEFAULT_SHIPMENT_STATE = 'none'
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_SUPPLIERS = True # Set to False to skip auto-enabling supplier flag
|
||||
SKIP_NON_SUPPLIERS = False # Set to True to skip parties that aren't suppliers
|
||||
|
||||
|
||||
def update_purchase_custom_fields(purchase_id, custom_data):
|
||||
"""Update custom fields in purchase using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# First, check what columns exist in purchase_purchase table
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'purchase_purchase'
|
||||
AND column_name IN ('number')
|
||||
ORDER BY column_name
|
||||
""")
|
||||
existing_columns = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
print(f" Available custom columns in purchase_purchase:")
|
||||
for col_name, col_type in existing_columns.items():
|
||||
print(f" - {col_name} ({col_type})")
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if 'number' in existing_columns and custom_data.get('number'):
|
||||
set_clauses.append("number = %s")
|
||||
values.append(custom_data['number'])
|
||||
print(f" Adding number = {custom_data['number']}")
|
||||
|
||||
if set_clauses:
|
||||
values.append(purchase_id)
|
||||
update_query = f"""
|
||||
UPDATE purchase_purchase
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
print(f" Executing UPDATE with fields: {', '.join([c.split('=')[0].strip() for c in set_clauses])}")
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if rows_affected > 0:
|
||||
print(f" ✓ {rows_affected} row(s) updated successfully")
|
||||
else:
|
||||
print(f" ⚠ No rows updated (purchase_id={purchase_id} not found?)")
|
||||
else:
|
||||
print(f" No custom fields to update (either no data provided or columns not found)")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def update_line_custom_fields(line_id, custom_data):
|
||||
"""Update custom fields in purchase line using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update line custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if custom_data.get('from_del'):
|
||||
set_clauses.append("from_del = %s")
|
||||
values.append(custom_data['from_del'])
|
||||
|
||||
if custom_data.get('to_del'):
|
||||
set_clauses.append("to_del = %s")
|
||||
values.append(custom_data['to_del'])
|
||||
|
||||
if set_clauses:
|
||||
values.append(line_id)
|
||||
update_query = f"""
|
||||
UPDATE purchase_line
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating line custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_pricing_estimated(line_id, pricing_data):
|
||||
"""Create pricing_estimated record using direct SQL"""
|
||||
if not pricing_data or not pricing_data.get('trigger'):
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not create pricing estimate - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if pricing_estimated table exists and what its structure is
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'pricing_estimated'
|
||||
ORDER BY ordinal_position
|
||||
""")
|
||||
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
if not columns:
|
||||
print(f" Info: pricing_estimated table does not exist, skipping pricing estimate")
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Insert into pricing_estimated table
|
||||
insert_query = """
|
||||
INSERT INTO pricing_estimated (line, trigger, estimated_date, create_date, write_date, create_uid, write_uid)
|
||||
VALUES (%s, %s, %s, NOW(), NOW(), 1, 1)
|
||||
"""
|
||||
|
||||
cursor.execute(insert_query, (
|
||||
line_id,
|
||||
pricing_data['trigger'],
|
||||
pricing_data.get('estimated_date')
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print(f" ✓ Pricing estimate created successfully")
|
||||
return True
|
||||
|
||||
except psycopg2.errors.ForeignKeyViolation as e:
|
||||
# This is expected if pricing_estimated references purchase_line only
|
||||
print(f" Info: Pricing estimate skipped (table references purchase_line only, not sale_line)")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return True # Return True to continue processing
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error creating pricing estimate: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def import_purchases(csv_file):
|
||||
"""Import purchases from CSV file with migration mapping tracking"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
PurchaseLine = Model.get('purchase.line')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"IMPORTING PURCHASES FROM CSV")
|
||||
print(f"{'='*70}\n")
|
||||
print(f"Reading from: {csv_file}\n")
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Get company (assuming single company or default)
|
||||
Company = Model.get('company.company')
|
||||
companies = Company.find([])
|
||||
if not companies:
|
||||
print("✗ Error: No company found in the system")
|
||||
return
|
||||
company = companies[0]
|
||||
print(f"Using company: {company.rec_name}\n")
|
||||
|
||||
# Collect all mappings for batch insert at the end
|
||||
purchase_mappings = []
|
||||
line_mappings = []
|
||||
|
||||
try:
|
||||
# Initialize migration mapper
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
row_num = 0
|
||||
|
||||
for row in reader:
|
||||
row_num += 1
|
||||
|
||||
try:
|
||||
# Extract fields from CSV
|
||||
source_purchase_id = row.get('source_id', '').strip() # Source system ID
|
||||
number = row.get('number', '').strip()
|
||||
reference = row.get('reference', '').strip()
|
||||
our_reference = row.get('our_reference', '').strip()
|
||||
|
||||
|
||||
if not number:
|
||||
print(f"Row {row_num}: Skipping - no number\n")
|
||||
continue
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Row {row_num}: Processing purchase {number}")
|
||||
print(f"{'='*70}")
|
||||
|
||||
# CHECK IF ALREADY IMPORTED using migration mapper
|
||||
if source_purchase_id:
|
||||
existing_tryton_id = mapper.get_tryton_id('purchase_contract', source_purchase_id)
|
||||
if existing_tryton_id:
|
||||
print(f" ⏭ Purchase already imported (Source ID: {source_purchase_id} -> Tryton ID: {existing_tryton_id})")
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Alternative: Check by number using existing helper
|
||||
existing_purchase = find_purchase_contract_by_number(number)
|
||||
if existing_purchase:
|
||||
print(f" ⏭ Purchase {number} already exists (ID: {existing_purchase.id})")
|
||||
# Save mapping even if it already exists (for reconciliation)
|
||||
if source_purchase_id:
|
||||
purchase_mappings.append({
|
||||
'object_type': 'purchase_contract',
|
||||
'source_id': source_purchase_id,
|
||||
'tryton_model': 'purchase.purchase',
|
||||
'tryton_id': existing_purchase.id,
|
||||
'recon_key': number
|
||||
})
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Parse other fields
|
||||
purchase_date = parse_date(row.get('purchase_date'))
|
||||
party_name = row.get('party_name', '').strip()
|
||||
|
||||
# Find related records
|
||||
party = find_party_by_name(party_name)
|
||||
if not party:
|
||||
raise ValueError(f"Party not found: {party_name}")
|
||||
|
||||
# Check party is supplier
|
||||
if not ensure_party_is_supplier(party, auto_enable=AUTO_ENABLE_SUPPLIERS):
|
||||
if SKIP_NON_SUPPLIERS:
|
||||
print(f" ⏭ Skipping - party {party_name} is not a supplier\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f"Party {party_name} is not a supplier")
|
||||
|
||||
# Reload party after category addition to get fresh data
|
||||
Party = Model.get('party.party')
|
||||
party = Party(party.id)
|
||||
|
||||
# Find invoice address
|
||||
invoice_address = get_party_invoice_address(party)
|
||||
if not invoice_address:
|
||||
raise ValueError(f"No invoice address found for party {party_name}")
|
||||
|
||||
# Parse additional fields
|
||||
currency = find_currency_by_code(row.get('currency', 'USD'))
|
||||
warehouse = find_warehouse(row.get('warehouse'))
|
||||
payment_term = find_payment_term_by_name(row.get('payment_term'))
|
||||
weight_basis_abbr = find_weight_basis_by_name(row.get('weight_basis'))
|
||||
tol_min_pct = parse_decimal(row.get('tol_min_pct', ''), 'tol_min_pct')
|
||||
tol_max_pct = parse_decimal(row.get('tol_max_pct', ''), 'tol_max_pct')
|
||||
tol_min_qty = parse_decimal(row.get('tol_min_qty', ''), 'tol_min_qty')
|
||||
tol_max_qty = parse_decimal(row.get('tol_max_qty', ''), 'tol_max_qty')
|
||||
|
||||
from_location_name = row.get('from_location_name', '').strip()
|
||||
from_location = find_location(from_location_name)
|
||||
|
||||
to_location_name = row.get('to_location_name', '').strip()
|
||||
to_location = find_location(to_location_name)
|
||||
|
||||
incoterm_code = row.get('incoterm_name', '').strip()
|
||||
incoterm = find_incoterm_by_code(incoterm_code, 2025)
|
||||
|
||||
description = row.get('description', '').strip()
|
||||
comment = row.get('comment', '').strip()
|
||||
trader_name = row.get('trader', '').strip()
|
||||
operator_name = row.get('operator', '').strip()
|
||||
|
||||
|
||||
|
||||
demurrage = parse_decimal(row.get('demurrage', ''), 'demurrage')
|
||||
laytime_hours = parse_decimal(row.get('laytime_hours', ''), 'laytime_hours')
|
||||
nor_extra_hours = parse_decimal(row.get('nor_extra_hours', ''), 'nor_extra_hours')
|
||||
pumping_rate = parse_decimal(row.get('pumping_rate', ''), 'pumping_rate')
|
||||
|
||||
|
||||
# CREATE PURCHASE
|
||||
print(f" Creating purchase...")
|
||||
purchase= Purchase()
|
||||
purchase.company = company
|
||||
purchase.reference = reference
|
||||
purchase.our_reference = our_reference
|
||||
purchase.party = party
|
||||
purchase.invoice_address = invoice_address
|
||||
|
||||
purchase.purchase_date = purchase_date
|
||||
purchase.currency = currency
|
||||
if warehouse:
|
||||
purchase.warehouse = warehouse
|
||||
purchase.payment_term = payment_term
|
||||
purchase.wb = weight_basis_abbr
|
||||
purchase.tol_min = tol_min_pct
|
||||
purchase.tol_max = tol_max_pct
|
||||
purchase.tol_min_qt = tol_min_qty
|
||||
purchase.tol_max_qt = tol_max_qty
|
||||
purchase.incoterm = incoterm
|
||||
purchase.from_location = from_location
|
||||
purchase.to_location = to_location
|
||||
purchase.description = description
|
||||
purchase.comment = comment
|
||||
purchase.state = DEFAULT_STATE
|
||||
purchase.invoice_method = DEFAULT_INVOICE_METHOD
|
||||
|
||||
# Retrieve trader
|
||||
trader = find_party_by_name(trader_name)
|
||||
if not party:
|
||||
raise ValueError(f"Trader not found: {trader_name}")
|
||||
purchase.trader = trader
|
||||
|
||||
# Retrieve operator
|
||||
operator = find_party_by_name(operator_name)
|
||||
if not party:
|
||||
raise ValueError(f"Operator not found: {operator_name}")
|
||||
purchase.operator = operator
|
||||
|
||||
|
||||
# Save the purchase
|
||||
purchase.save()
|
||||
print(f" ✓ Purchase created (ID: {purchase.id})")
|
||||
|
||||
# Update custom fields (like 'number')
|
||||
custom_fields = {'number': number}
|
||||
update_purchase_custom_fields(purchase.id, custom_fields)
|
||||
|
||||
# SAVE MIGRATION MAPPING for purchase
|
||||
if source_purchase_id:
|
||||
purchase_mappings.append({
|
||||
'object_type': 'purchase_contract',
|
||||
'source_id': source_purchase_id,
|
||||
'tryton_model': 'purchase.purchase',
|
||||
'tryton_id': purchase.id,
|
||||
'recon_key': number
|
||||
})
|
||||
print(f" 📝 Mapping queued: Source {source_purchase_id} -> Tryton {purchase.id}")
|
||||
|
||||
# Process purchase lines
|
||||
line_type = row.get('line_type', 'line').strip()
|
||||
source_line_id = row.get('source_line_id', '').strip()
|
||||
|
||||
if line_type == 'line':
|
||||
# Regular product line
|
||||
product_code = row.get('line_product_code', '').strip()
|
||||
quantity = parse_decimal(row.get('line_quantity', ''), 'quantity')
|
||||
unit_price = parse_decimal(row.get('line_price', ''), 'unit_price')
|
||||
period_at = row.get('period_at', '').strip().lower()
|
||||
concentration = parse_decimal(row.get('concentration', ''), 'concentration')
|
||||
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
raise ValueError(f"Product not found: {product_code}")
|
||||
|
||||
unit = find_uom_by_code(row.get('line_unit_code', ''))
|
||||
|
||||
# Parse shipping dates
|
||||
from_del = parse_date(row.get('line_from_del', ''))
|
||||
to_del = parse_date(row.get('line_to_del', ''))
|
||||
|
||||
# Create line
|
||||
line = PurchaseLine()
|
||||
line.purchase = purchase
|
||||
line.type = 'line'
|
||||
sequence = 1 # Default sequence, can be enhanced to handle multiple lines
|
||||
line.sequence = sequence
|
||||
line.product = product
|
||||
line.quantity = quantity
|
||||
line.unit = unit if unit else product.purchase_uom
|
||||
line.unit_price = unit_price
|
||||
line.from_del = from_del
|
||||
line.to_del = to_del
|
||||
line.period_at = period_at
|
||||
line.concentration = concentration
|
||||
|
||||
|
||||
# Optional fields
|
||||
description = row.get('description', '').strip()
|
||||
if description:
|
||||
line.description = description
|
||||
|
||||
line.save()
|
||||
|
||||
# # Update line custom fields
|
||||
# line_custom = {}
|
||||
# from_del = row.get('from_del', '').strip()
|
||||
# to_del = row.get('to_del', '').strip()
|
||||
# if from_del:
|
||||
# line_custom['from_del'] = from_del
|
||||
# if to_del:
|
||||
# line_custom['to_del'] = to_del
|
||||
|
||||
# if line_custom:
|
||||
# update_line_custom_fields(line.id, line_custom)
|
||||
|
||||
# Create pricing estimate if applicable
|
||||
pricing_trigger = row.get('pricing_trigger', '').strip()
|
||||
pricing_estimated_date = parse_date(row.get('pricing_estimated_date', ''))
|
||||
if pricing_trigger:
|
||||
pricing_data = {
|
||||
'trigger': pricing_trigger,
|
||||
'estimated_date': pricing_estimated_date
|
||||
}
|
||||
create_pricing_estimated(line.id, pricing_data)
|
||||
|
||||
# SAVE MIGRATION MAPPING for line
|
||||
if source_line_id:
|
||||
line_mappings.append({
|
||||
'object_type': 'purchase_line',
|
||||
'source_id': source_line_id,
|
||||
'tryton_model': 'purchase.line',
|
||||
'tryton_id': line.id,
|
||||
'recon_key': f"{number}-Line {sequence}-{product_code}"
|
||||
})
|
||||
|
||||
print(f" ✓ Added line (ID: {line.id})")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Quantity: {quantity}")
|
||||
|
||||
else:
|
||||
# Comment, subtitle, or other line types
|
||||
line = PurchaseLine()
|
||||
line.purchase = purchase
|
||||
line.type = line_type
|
||||
line.description = row.get('description', '').strip()
|
||||
line.save()
|
||||
|
||||
print(f" ✓ Added {line_type} line (ID: {line.id})")
|
||||
|
||||
|
||||
# **NEW: Handle analytic dimensions**
|
||||
book_name = row.get('book', '').strip()
|
||||
strategy_name = row.get('strategy', '').strip()
|
||||
|
||||
print(f"\nProcessing analytic dimensions...")
|
||||
|
||||
# Collect all dimension values in one dictionary
|
||||
dimension_values = {}
|
||||
|
||||
# Handle Book dimension
|
||||
if book_name:
|
||||
book_dimension_value = find_or_create_analytic_dimension_value('Book', book_name)
|
||||
if book_dimension_value:
|
||||
dimension_values['Book'] = book_dimension_value
|
||||
else:
|
||||
print(f" ⚠ Could not find/create Book dimension value: {book_name}")
|
||||
|
||||
# Handle Strategy dimension
|
||||
if strategy_name:
|
||||
strategy_dimension_value = find_or_create_analytic_dimension_value('Strategy', strategy_name)
|
||||
if strategy_dimension_value:
|
||||
dimension_values['Strategy'] = strategy_dimension_value
|
||||
else:
|
||||
print(f" ⚠ Could not find/create Strategy dimension value: {strategy_name}")
|
||||
|
||||
|
||||
# Link ALL dimensions to purchase in ONE call
|
||||
if dimension_values:
|
||||
link_analytic_dimensions_to_purchase(purchase, dimension_values)
|
||||
else:
|
||||
print(f" No analytic dimensions to link")
|
||||
|
||||
|
||||
imported_count += 1
|
||||
print(f"✓ Successfully imported purchase {number}\n")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {number if 'number' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# BATCH SAVE ALL MAPPINGS at the end
|
||||
print(f"\n{'='*70}")
|
||||
print("SAVING MIGRATION MAPPINGS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
if purchase_mappings:
|
||||
print(f"Saving {len(purchase_mappings)} purchase mappings...")
|
||||
mapper.save_mappings_batch(purchase_mappings)
|
||||
print(f"✓ Purchase mappings saved\n")
|
||||
|
||||
if line_mappings:
|
||||
print(f"Saving {len(line_mappings)} line mappings...")
|
||||
mapper.save_mappings_batch(line_mappings)
|
||||
print(f"✓ Line mappings saved\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} purchases")
|
||||
print(f"Skipped (already exist): {skipped_count} purchases")
|
||||
print(f"Errors: {error_count}")
|
||||
print(f"Migration mappings saved: {len(purchase_mappings)} purchases, {len(line_mappings)} lines")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported purchases and their migration mappings"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Purchases and Migration Mappings")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all purchases (or limit to recently created ones)
|
||||
purchases = Purchase.find([], order=[('id', 'DESC')])
|
||||
|
||||
if purchases:
|
||||
print(f"Found {len(purchases)} purchases (showing last 10):\n")
|
||||
print(f"{'ID':<8} {'Number':<15} {'Reference':<15} {'Party':<25} {'State':<12} {'Source ID':<15}")
|
||||
print("-" * 100)
|
||||
|
||||
# Initialize mapper to look up source IDs
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
for purchase in purchases[:10]: # Show last 10 created
|
||||
purchase_id = purchase.id
|
||||
|
||||
# Get number from database since it's a custom field
|
||||
conn = get_db_connection()
|
||||
number = 'N/A'
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT number FROM purchase_purchase WHERE id = %s", (purchase_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
number = str(result[0])[:14]
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
# Look up source ID from migration mapping
|
||||
source_id = 'N/A'
|
||||
try:
|
||||
cursor = mapper.connection.cursor()
|
||||
cursor.execute("""
|
||||
SELECT source_id[1]
|
||||
FROM public.os_migration_mapping
|
||||
WHERE tryton_id = %s
|
||||
AND 'purchase_contract' = ANY(object_type)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""", (purchase_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
source_id = str(result[0])[:14]
|
||||
cursor.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
reference = purchase.reference[:14] if purchase.reference else 'N/A'
|
||||
party = purchase.party.rec_name[:24] if purchase.party else 'N/A'
|
||||
state = purchase.state if purchase.state else 'N/A'
|
||||
|
||||
print(f"{purchase_id:<8} {number:<15} {reference:<15} {party:<25} {state:<12} {source_id:<15}")
|
||||
|
||||
# Show lines
|
||||
if purchase.lines:
|
||||
print(f" Lines: {len(purchase.lines)}")
|
||||
for line in purchase.lines[:3]: # Show first 3 lines
|
||||
if line.type == 'line' and line.product:
|
||||
print(f" - {line.product.rec_name[:40]} | Qty: {line.quantity} | Price: {line.unit_price}")
|
||||
else:
|
||||
print(f" - [{line.type}] {(line.description or '')[:40]}")
|
||||
else:
|
||||
print("No purchases found")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def check_mapping_stats():
|
||||
"""Display statistics about migration mappings"""
|
||||
print(f"\n{'='*70}")
|
||||
print("MIGRATION MAPPING STATISTICS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
cursor = mapper.connection.cursor()
|
||||
|
||||
# Count mappings by object type
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
object_type[1] as obj_type,
|
||||
COUNT(*) as count,
|
||||
MIN(write_date) as first_import,
|
||||
MAX(write_date) as last_import
|
||||
FROM public.os_migration_mapping
|
||||
GROUP BY object_type[1]
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
results = cursor.fetchall()
|
||||
|
||||
if results:
|
||||
print(f"{'Object Type':<25} {'Count':<10} {'First Import':<15} {'Last Import':<15}")
|
||||
print("-" * 70)
|
||||
for row in results:
|
||||
obj_type = row[0] or 'N/A'
|
||||
count = row[1]
|
||||
first = row[2].strftime('%Y-%m-%d') if row[2] else 'N/A'
|
||||
last = row[3].strftime('%Y-%m-%d') if row[3] else 'N/A'
|
||||
print(f"{obj_type:<25} {count:<10} {first:<15} {last:<15}")
|
||||
|
||||
# Total count
|
||||
cursor.execute("SELECT COUNT(*) FROM public.os_migration_mapping")
|
||||
total = cursor.fetchone()[0]
|
||||
print(f"\nTotal mappings: {total}")
|
||||
else:
|
||||
print("No migration mappings found")
|
||||
|
||||
cursor.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error retrieving mapping statistics: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def prepare_parties_as_supplier(csv_file):
|
||||
"""Pre-process: Add SUPPLIER category to all parties in CSV using Proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Category = Model.get('party.category')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARING PARTIES AS SUPPLIERS (via Categories)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find SUPPLIER category
|
||||
supplier_categories = Category.find([('name', '=', 'SUPPLIER')])
|
||||
if not supplier_categories:
|
||||
# Try case-insensitive
|
||||
all_categories = Category.find([])
|
||||
for cat in all_categories:
|
||||
if cat.name.upper() == 'SUPPLIER':
|
||||
supplier_categories = [cat]
|
||||
break
|
||||
|
||||
if not supplier_categories:
|
||||
print(f"✗ SUPPLIER category not found in the system!")
|
||||
print(f"Please create a party category named 'SUPPLIER' first.\n")
|
||||
return False
|
||||
|
||||
supplier_category = supplier_categories[0]
|
||||
print(f"Found SUPPLIER category (ID: {supplier_category.id})\n")
|
||||
|
||||
party_names = set()
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
for row in reader:
|
||||
party_name = row.get('party_name', '').strip()
|
||||
if party_name:
|
||||
party_names.add(party_name)
|
||||
|
||||
print(f"Found {len(party_names)} unique parties in CSV\n")
|
||||
|
||||
updated_count = 0
|
||||
already_supplier_count = 0
|
||||
not_found_count = 0
|
||||
|
||||
for party_name in party_names:
|
||||
print(f"Processing party: {party_name}")
|
||||
|
||||
# Find party
|
||||
parties = Party.find([('name', '=', party_name)])
|
||||
if not parties:
|
||||
parties = Party.find([('code', '=', party_name)])
|
||||
|
||||
if not parties:
|
||||
print(f" ✗ Not found\n")
|
||||
not_found_count += 1
|
||||
continue
|
||||
|
||||
party = parties[0]
|
||||
|
||||
# Check if already has SUPPLIER category
|
||||
has_supplier = False
|
||||
if party.categories:
|
||||
for cat in party.categories:
|
||||
if cat.name.upper() == 'SUPPLIER':
|
||||
has_supplier = True
|
||||
break
|
||||
|
||||
if has_supplier:
|
||||
print(f" ✓ Already has SUPPLIER category\n")
|
||||
already_supplier_count += 1
|
||||
continue
|
||||
|
||||
# Add SUPPLIER category using Proteus
|
||||
try:
|
||||
# Reload party and category in same context
|
||||
party_to_update = Party(party.id)
|
||||
supplier_cat = Category(supplier_category.id)
|
||||
|
||||
party_to_update.categories.append(supplier_cat)
|
||||
party_to_update.save()
|
||||
print(f" ✓ SUPPLIER category added\n")
|
||||
updated_count += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed: {e}\n")
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARATION SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Already have SUPPLIER category: {already_supplier_count}")
|
||||
print(f"SUPPLIER category added: {updated_count}")
|
||||
print(f"Not found: {not_found_count}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
return True
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}\n")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PURCHASE IMPORT SCRIPT WITH MIGRATION MAPPING")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: Prepare all parties as suppliers first
|
||||
# Uncomment the following line to mark all parties in CSV as suppliers before importing
|
||||
prepare_parties_as_supplier(CSV_FILE_PATH)
|
||||
|
||||
# Import purchases with migration mapping
|
||||
import_purchases(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
# Show mapping statistics
|
||||
check_mapping_stats()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
364
Reference Data/python_project/scripts/import_purchase_fees.py
Normal file
364
Reference Data/python_project/scripts/import_purchase_fees.py
Normal file
@@ -0,0 +1,364 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import config, Model
|
||||
|
||||
from helpers.config import (
|
||||
PURCHASE_FEES_CSV,
|
||||
connect_to_tryton)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
find_party_by_name,
|
||||
find_product_by_code,
|
||||
find_purchase_contract_by_ref,
|
||||
find_contract_line_by_sequence,
|
||||
find_currency_by_code,
|
||||
parse_decimal,
|
||||
find_supplier_category,
|
||||
ensure_party_is_supplier,
|
||||
find_fee_mode_by_name,
|
||||
find_payable_receivable_by_name,
|
||||
get_existing_fees_for_line,
|
||||
fee_already_exists)
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = PURCHASE_FEES_CSV
|
||||
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_SUPPLIER = True # Set to False to skip auto-enabling supplier flag
|
||||
SKIP_NON_SUPPLIERS = False # Set to True to skip parties that aren't suppliers
|
||||
|
||||
|
||||
def import_purchase_contract_fees(csv_file):
|
||||
"""Import purchase contract line fees from CSV"""
|
||||
|
||||
print(f"{'='*70}")
|
||||
print("IMPORTING PURCHASE CONTRACT LINE FEES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get models
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
except Exception as e:
|
||||
print(f"✗ Error: Could not load fee.fee model - {e}")
|
||||
print("Please ensure the model name is correct for your Tryton customization")
|
||||
return
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
current_contract_ref = None
|
||||
current_contract = None
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
for row_num, row in enumerate(reader, start=2): # Start at 2 (header is row 1)
|
||||
try:
|
||||
# Extract data from CSV
|
||||
contract_ref = row.get('contract_ref', '').strip()
|
||||
line_sequence = row.get('line_sequence', '').strip()
|
||||
product_code = row.get('product', '').strip()
|
||||
supplier_name = row.get('supplier', '').strip()
|
||||
currency_code = row.get('currency', '').strip()
|
||||
p_r_value = row.get('p_r', '').strip()
|
||||
mode_name = row.get('mode', '').strip()
|
||||
price_value = row.get('price', '').strip()
|
||||
unit_value = row.get('unit', '').strip()
|
||||
|
||||
print(f"Processing row {row_num}: {contract_ref} - Line {line_sequence} - {product_code}")
|
||||
|
||||
# Validate required fields
|
||||
if not contract_ref:
|
||||
print(f" ✗ Skipping: Missing contract_ref\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not line_sequence:
|
||||
print(f" ✗ Skipping: Missing line_sequence\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not product_code:
|
||||
print(f" ✗ Skipping: Missing product\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache contract and line if same as previous row
|
||||
if contract_ref != current_contract_ref:
|
||||
current_contract = find_purchase_contract_by_ref(contract_ref)
|
||||
current_contract_ref = contract_ref
|
||||
current_line_sequence = None
|
||||
current_line = None
|
||||
|
||||
if not current_contract:
|
||||
print(f" ✗ Skipping: Contract not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Cache line if same as previous row
|
||||
if line_sequence != current_line_sequence:
|
||||
current_line = find_contract_line_by_sequence(current_contract, line_sequence)
|
||||
current_line_sequence = line_sequence
|
||||
|
||||
if not current_line:
|
||||
print(f" ✗ Skipping: Contract line not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Find related records
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
print(f" ✗ Skipping: Product not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
supplier = find_party_by_name(supplier_name)
|
||||
if not supplier:
|
||||
print(f" ✗ Skipping: Supplier not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Ensure party has SUPPLIER category
|
||||
supplier, is_supplier = ensure_party_is_supplier(supplier, auto_enable=AUTO_ENABLE_SUPPLIER)
|
||||
|
||||
if not is_supplier:
|
||||
if SKIP_NON_SUPPLIERS:
|
||||
print(f" ⚠ Skipping purchase - party does not have SUPPLIER category\n")
|
||||
skipped_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
else:
|
||||
error_msg = f"Row {row_num}: Party '{supplier.rec_name}' does not have SUPPLIER category"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
current_purchase = None
|
||||
continue
|
||||
|
||||
currency = find_currency_by_code(currency_code)
|
||||
if not currency:
|
||||
print(f" ✗ Skipping: Currency not found\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Parse price
|
||||
price = parse_decimal(price_value, 'price')
|
||||
if price is None:
|
||||
print(f" ✗ Skipping: Invalid price\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Determine payable/receivable
|
||||
payable_receivable = find_payable_receivable_by_name(p_r_value)
|
||||
|
||||
# Find fee mode
|
||||
mode = find_fee_mode_by_name(mode_name)
|
||||
|
||||
# Check if fee already exists
|
||||
existing_fees = get_existing_fees_for_line(current_line)
|
||||
if fee_already_exists(existing_fees, product, supplier, price):
|
||||
print(f" ○ Fee already exists for this line\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Create the fee
|
||||
fee = PurchaseLineFee()
|
||||
fee.line = current_line
|
||||
fee.product = product
|
||||
fee.supplier = supplier
|
||||
fee.currency = currency
|
||||
fee.price = price
|
||||
|
||||
# Set type if found and field exists
|
||||
if mode and hasattr(fee, 'type'):
|
||||
fee.type = 'ordered' # Assuming all imported fees are 'ordered'
|
||||
|
||||
# Set weight_type if found and field exists
|
||||
if mode and hasattr(fee, 'weight_type'):
|
||||
fee.weight_type = 'brut'
|
||||
|
||||
# Set p_r (payable or receivable) if found and field exists
|
||||
if mode and hasattr(fee, 'p_r'):
|
||||
fee.p_r = payable_receivable
|
||||
|
||||
# Set mode if found and field exists
|
||||
if mode and hasattr(fee, 'mode'):
|
||||
fee.mode = mode
|
||||
|
||||
# Set unit if field exists
|
||||
if unit_value and hasattr(fee, 'unit'):
|
||||
# Try to find the unit
|
||||
Unit = Model.get('product.uom')
|
||||
units = Unit.find([('symbol', '=', unit_value)])
|
||||
if not units:
|
||||
units = Unit.find([('name', '=', unit_value)])
|
||||
if units:
|
||||
fee.unit = units[0]
|
||||
|
||||
# Save the fee
|
||||
fee.save()
|
||||
|
||||
print(f" ✓ Fee created successfully")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Supplier: {supplier.rec_name}")
|
||||
print(f" Price: {price} {currency.code}")
|
||||
print(f" Type: {payable_receivable}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {contract_ref}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} fees")
|
||||
print(f"Skipped (missing data or already exist): {skipped_count} fees")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported purchase contract fees"""
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Purchase Contract Line Fees")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
PurchaseLineFee = Model.get('fee.fee')
|
||||
|
||||
# Find all fees (or limit to recently created ones)
|
||||
fees = PurchaseLineFee.find([], order=[('id', 'DESC')])
|
||||
|
||||
if fees:
|
||||
print(f"Found {len(fees)} fees (showing last 50):\n")
|
||||
print(f"{'ID':<8} {'Contract':<15} {'Product':<25} {'Supplier':<25} {'Price':<12} {'Type':<12}")
|
||||
print("-" * 105)
|
||||
|
||||
for fee in fees[:50]: # Show last 50 created
|
||||
fee_id = fee.id
|
||||
|
||||
# Get contract reference
|
||||
contract_ref = 'N/A'
|
||||
if hasattr(fee, 'line') and fee.line:
|
||||
line = fee.line
|
||||
if hasattr(line, 'purchase') and line.purchase:
|
||||
contract = line.purchase
|
||||
if hasattr(contract, 'reference') and contract.reference:
|
||||
contract_ref = str(contract.reference)[:14]
|
||||
|
||||
product = fee.product.rec_name[:24] if hasattr(fee, 'product') and fee.product else 'N/A'
|
||||
supplier = fee.supplier.rec_name[:24] if hasattr(fee, 'supplier') and fee.supplier else 'N/A'
|
||||
price = f"{fee.price:.2f}" if hasattr(fee, 'price') and fee.price else 'N/A'
|
||||
|
||||
# Get type (payable/receivable)
|
||||
fee_type = 'N/A'
|
||||
if hasattr(fee, 'type'):
|
||||
fee_type = fee.type
|
||||
elif hasattr(fee, 'payable_receivable'):
|
||||
fee_type = fee.payable_receivable
|
||||
|
||||
print(f"{fee_id:<8} {contract_ref:<15} {product:<25} {supplier:<25} {price:<12} {fee_type:<12}")
|
||||
else:
|
||||
print("No fees found")
|
||||
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error during verification: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def list_purchase_contracts():
|
||||
"""List purchase contracts for debugging"""
|
||||
Purchase = Model.get('purchase.purchase')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PURCHASE CONTRACTS (first 20)")
|
||||
print(f"{'='*70}")
|
||||
|
||||
contracts = Purchase.find([], order=[('id', 'DESC')], limit=20)
|
||||
|
||||
if contracts:
|
||||
print(f"{'ID':<8} {'Reference':<20} {'Party':<30} {'State':<12}")
|
||||
print("-" * 70)
|
||||
|
||||
for contract in contracts:
|
||||
contract_id = contract.id
|
||||
reference = contract.reference[:19] if contract.reference else 'N/A'
|
||||
party = contract.party.rec_name[:29] if contract.party else 'N/A'
|
||||
state = contract.state if contract.state else 'N/A'
|
||||
|
||||
print(f"{contract_id:<8} {reference:<20} {party:<30} {state:<12}")
|
||||
|
||||
# Show number of lines
|
||||
if hasattr(contract, 'lines') and contract.lines:
|
||||
print(f" Lines: {len(contract.lines)}")
|
||||
else:
|
||||
print("No purchase contracts found")
|
||||
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON PURCHASE CONTRACT FEE IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List purchase contracts for debugging
|
||||
# Uncomment the following line to see available contracts
|
||||
# list_purchase_contracts()
|
||||
|
||||
# Import purchase contract fees
|
||||
import_purchase_contract_fees(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,879 @@
|
||||
# Add parent directory to Python path so we can import helpers
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
parent_dir = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(parent_dir))
|
||||
|
||||
import psycopg2
|
||||
import csv
|
||||
from decimal import Decimal
|
||||
from proteus import Model
|
||||
|
||||
from helpers.config import (
|
||||
SALE_CONTRACTS_CSV,
|
||||
connect_to_tryton,
|
||||
get_db_connection,
|
||||
DB_CONFIG # Add this to your config
|
||||
)
|
||||
|
||||
from helpers.tryton_helpers import (
|
||||
parse_decimal,
|
||||
parse_date,
|
||||
ensure_party_is_client,
|
||||
find_party_by_name,
|
||||
find_uom_by_code,
|
||||
find_currency_by_code,
|
||||
find_warehouse,
|
||||
find_location,
|
||||
find_payment_term_by_name,
|
||||
find_product_by_code,
|
||||
find_incoterm_by_code,
|
||||
find_weight_basis_by_name,
|
||||
get_party_invoice_address,
|
||||
find_sale_contract_by_number,
|
||||
find_or_create_analytic_dimension_value,
|
||||
link_analytic_dimensions_to_sale
|
||||
)
|
||||
|
||||
# Import migration mapping helper
|
||||
from helpers.migration_mapping import MigrationMapper
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = SALE_CONTRACTS_CSV
|
||||
|
||||
|
||||
# Default values
|
||||
DEFAULT_STATE = 'draft'
|
||||
DEFAULT_INVOICE_METHOD = 'manual'
|
||||
DEFAULT_INVOICE_STATE = 'none'
|
||||
DEFAULT_SHIPMENT_STATE = 'none'
|
||||
|
||||
# Import options
|
||||
AUTO_ENABLE_CLIENT = True # Set to False to skip auto-enabling client flag
|
||||
SKIP_NON_CLIENTS = False # Set to True to skip parties that aren't clients
|
||||
|
||||
|
||||
def update_sale_custom_fields(sale_id, custom_data):
|
||||
"""Update custom fields in sale using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# First, check what columns exist in sale_sale table
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'sale_sale'
|
||||
AND column_name IN ('number')
|
||||
ORDER BY column_name
|
||||
""")
|
||||
existing_columns = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
print(f" Available custom columns in sale_sale:")
|
||||
for col_name, col_type in existing_columns.items():
|
||||
print(f" - {col_name} ({col_type})")
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if 'number' in existing_columns and custom_data.get('number'):
|
||||
set_clauses.append("number = %s")
|
||||
values.append(custom_data['number'])
|
||||
print(f" Adding number = {custom_data['number']}")
|
||||
|
||||
if set_clauses:
|
||||
values.append(sale_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_sale
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
print(f" Executing UPDATE with fields: {', '.join([c.split('=')[0].strip() for c in set_clauses])}")
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if rows_affected > 0:
|
||||
print(f" ✓ {rows_affected} row(s) updated successfully")
|
||||
else:
|
||||
print(f" ⚠ No rows updated (sale_id={sale_id} not found?)")
|
||||
else:
|
||||
print(f" No custom fields to update (either no data provided or columns not found)")
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def update_line_custom_fields(line_id, custom_data):
|
||||
"""Update custom fields in sale line using direct SQL"""
|
||||
if not custom_data:
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update line custom fields - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build UPDATE query for custom fields
|
||||
set_clauses = []
|
||||
values = []
|
||||
|
||||
if custom_data.get('from_del'):
|
||||
set_clauses.append("from_del = %s")
|
||||
values.append(custom_data['from_del'])
|
||||
|
||||
if custom_data.get('to_del'):
|
||||
set_clauses.append("to_del = %s")
|
||||
values.append(custom_data['to_del'])
|
||||
|
||||
if set_clauses:
|
||||
values.append(line_id)
|
||||
update_query = f"""
|
||||
UPDATE sale_line
|
||||
SET {', '.join(set_clauses)}
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, values)
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating line custom fields: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def create_pricing_estimated(line_id, pricing_data):
|
||||
"""Create pricing_estimated record using direct SQL"""
|
||||
if not pricing_data or not pricing_data.get('trigger'):
|
||||
return True
|
||||
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not create pricing estimate - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if pricing_estimated table exists and what its structure is
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'pricing_estimated'
|
||||
ORDER BY ordinal_position
|
||||
""")
|
||||
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
if not columns:
|
||||
print(f" Info: pricing_estimated table does not exist, skipping pricing estimate")
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
# Insert into pricing_estimated table
|
||||
insert_query = """
|
||||
INSERT INTO pricing_estimated (sale_line, trigger, estimated_date, create_date, write_date, create_uid, write_uid)
|
||||
VALUES (%s, %s, %s, NOW(), NOW(), 1, 1)
|
||||
"""
|
||||
|
||||
cursor.execute(insert_query, (
|
||||
line_id,
|
||||
pricing_data['trigger'],
|
||||
pricing_data.get('estimated_date')
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
print(f" ✓ Pricing estimate created successfully")
|
||||
return True
|
||||
|
||||
except psycopg2.errors.ForeignKeyViolation as e:
|
||||
# This is expected if pricing_estimated references purchase_line only
|
||||
print(f" Info: Pricing estimate skipped (table references sale_line only, not purchase_line)")
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return True # Return True to continue processing
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error creating pricing estimate: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
|
||||
def import_sales(csv_file):
|
||||
"""Import sales from CSV file with migration mapping tracking"""
|
||||
Sale = Model.get('sale.sale')
|
||||
SaleLine = Model.get('sale.line')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"IMPORTING SALES FROM CSV")
|
||||
print(f"{'='*70}\n")
|
||||
print(f"Reading from: {csv_file}\n")
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Get company (assuming single company or default)
|
||||
Company = Model.get('company.company')
|
||||
companies = Company.find([])
|
||||
if not companies:
|
||||
print("✗ Error: No company found in the system")
|
||||
return
|
||||
company = companies[0]
|
||||
print(f"Using company: {company.rec_name}\n")
|
||||
|
||||
# Collect all mappings for batch insert at the end
|
||||
sale_mappings = []
|
||||
line_mappings = []
|
||||
|
||||
try:
|
||||
# Initialize migration mapper
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
row_num = 0
|
||||
|
||||
for row in reader:
|
||||
row_num += 1
|
||||
|
||||
try:
|
||||
# Extract fields from CSV
|
||||
source_sale_id = row.get('source_id', '').strip() # Source system ID
|
||||
number = row.get('number', '').strip()
|
||||
reference = row.get('reference', '').strip()
|
||||
our_reference = row.get('our_reference', '').strip()
|
||||
|
||||
|
||||
if not number:
|
||||
print(f"Row {row_num}: Skipping - no number\n")
|
||||
continue
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Row {row_num}: Processing sale {number}")
|
||||
print(f"{'='*70}")
|
||||
|
||||
# CHECK IF ALREADY IMPORTED using migration mapper
|
||||
if source_sale_id:
|
||||
existing_tryton_id = mapper.get_tryton_id('sale_contract', source_sale_id)
|
||||
if existing_tryton_id:
|
||||
print(f" ⏭ Sale already imported (Source ID: {source_sale_id} -> Tryton ID: {existing_tryton_id})")
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Alternative: Check by number using existing helper
|
||||
existing_sale = find_sale_contract_by_number(number)
|
||||
if existing_sale:
|
||||
print(f" ⏭ Sale {number} already exists (ID: {existing_sale.id})")
|
||||
# Save mapping even if it already exists (for reconciliation)
|
||||
if source_sale_id:
|
||||
sale_mappings.append({
|
||||
'object_type': 'sale_contract',
|
||||
'source_id': source_sale_id,
|
||||
'tryton_model': 'sale.sale',
|
||||
'tryton_id': existing_sale.id,
|
||||
'recon_key': number
|
||||
})
|
||||
skipped_count += 1
|
||||
print()
|
||||
continue
|
||||
|
||||
# Parse other fields
|
||||
sale_date = parse_date(row.get('sale_date'))
|
||||
party_name = row.get('party_name', '').strip()
|
||||
|
||||
# Find related records
|
||||
party = find_party_by_name(party_name)
|
||||
if not party:
|
||||
raise ValueError(f"Party not found: {party_name}")
|
||||
|
||||
# Check party is client
|
||||
if not ensure_party_is_client(party, auto_enable=AUTO_ENABLE_CLIENT):
|
||||
if SKIP_NON_CLIENTS:
|
||||
print(f" ⏭ Skipping - party {party_name} is not a client\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f"Party {party_name} is not a client")
|
||||
|
||||
# Reload party after category addition to get fresh data
|
||||
Party = Model.get('party.party')
|
||||
party = Party(party.id)
|
||||
|
||||
# Find invoice address
|
||||
invoice_address = get_party_invoice_address(party)
|
||||
if not invoice_address:
|
||||
raise ValueError(f"No invoice address found for party {party_name}")
|
||||
|
||||
# Parse additional fields
|
||||
currency = find_currency_by_code(row.get('currency', 'USD'))
|
||||
warehouse = find_warehouse(row.get('warehouse'))
|
||||
payment_term = find_payment_term_by_name(row.get('payment_term'))
|
||||
weight_basis_abbr = find_weight_basis_by_name(row.get('weight_basis'))
|
||||
tol_min_pct = parse_decimal(row.get('tol_min_pct', ''), 'tol_min_pct')
|
||||
tol_max_pct = parse_decimal(row.get('tol_max_pct', ''), 'tol_max_pct')
|
||||
tol_min_qty = parse_decimal(row.get('tol_min_qty', ''), 'tol_min_qty')
|
||||
tol_max_qty = parse_decimal(row.get('tol_max_qty', ''), 'tol_max_qty')
|
||||
|
||||
from_location_name = row.get('from_location_name', '').strip()
|
||||
from_location = find_location(from_location_name)
|
||||
|
||||
to_location_name = row.get('to_location_name', '').strip()
|
||||
to_location = find_location(to_location_name)
|
||||
|
||||
incoterm_code = row.get('incoterm_name', '').strip()
|
||||
incoterm = find_incoterm_by_code(incoterm_code, 2025)
|
||||
|
||||
description = row.get('description', '').strip()
|
||||
comment = row.get('comment', '').strip()
|
||||
trader_name = row.get('trader', '').strip()
|
||||
operator_name = row.get('operator', '').strip()
|
||||
|
||||
|
||||
|
||||
demurrage = parse_decimal(row.get('demurrage', ''), 'demurrage')
|
||||
laytime_hours = parse_decimal(row.get('laytime_hours', ''), 'laytime_hours')
|
||||
nor_extra_hours = parse_decimal(row.get('nor_extra_hours', ''), 'nor_extra_hours')
|
||||
pumping_rate = parse_decimal(row.get('pumping_rate', ''), 'pumping_rate')
|
||||
|
||||
|
||||
# CREATE SALE
|
||||
print(f" Creating sale...")
|
||||
sale = Sale()
|
||||
sale.company = company
|
||||
sale.reference = reference
|
||||
sale.our_reference = our_reference
|
||||
sale.party = party
|
||||
sale.invoice_address = invoice_address
|
||||
sale.shipment_address = invoice_address # Default to invoice address
|
||||
|
||||
sale.sale_date = sale_date
|
||||
sale.currency = currency
|
||||
if warehouse:
|
||||
sale.warehouse = warehouse
|
||||
sale.payment_term = payment_term
|
||||
sale.wb = weight_basis_abbr
|
||||
sale.tol_min = tol_min_pct
|
||||
sale.tol_max = tol_max_pct
|
||||
sale.tol_min_qt = tol_min_qty
|
||||
sale.tol_max_qt = tol_max_qty
|
||||
sale.incoterm = incoterm
|
||||
sale.from_location = from_location
|
||||
sale.to_location = to_location
|
||||
sale.description = description
|
||||
sale.comment = comment
|
||||
sale.state = DEFAULT_STATE
|
||||
sale.invoice_method = DEFAULT_INVOICE_METHOD
|
||||
|
||||
# Retrieve trader
|
||||
trader = find_party_by_name(trader_name)
|
||||
if not party:
|
||||
raise ValueError(f"Trader not found: {trader_name}")
|
||||
sale.trader = trader
|
||||
|
||||
# Retrieve operator
|
||||
operator = find_party_by_name(operator_name)
|
||||
if not party:
|
||||
raise ValueError(f"Operator not found: {operator_name}")
|
||||
sale.operator = operator
|
||||
|
||||
|
||||
# Save the sale
|
||||
sale.save()
|
||||
print(f" ✓ Sale created (ID: {sale.id})")
|
||||
|
||||
# Update custom fields (like 'number')
|
||||
custom_fields = {'number': number}
|
||||
update_sale_custom_fields(sale.id, custom_fields)
|
||||
|
||||
# SAVE MIGRATION MAPPING for sale
|
||||
if source_sale_id:
|
||||
sale_mappings.append({
|
||||
'object_type': 'sale_contract',
|
||||
'source_id': source_sale_id,
|
||||
'tryton_model': 'sale.sale',
|
||||
'tryton_id': sale.id,
|
||||
'recon_key': number
|
||||
})
|
||||
print(f" 📝 Mapping queued: Source {source_sale_id} -> Tryton {sale.id}")
|
||||
|
||||
# Process sale lines
|
||||
line_type = row.get('line_type', 'line').strip()
|
||||
source_line_id = row.get('source_line_id', '').strip()
|
||||
|
||||
if line_type == 'line':
|
||||
# Regular product line
|
||||
product_code = row.get('line_product_code', '').strip()
|
||||
quantity = parse_decimal(row.get('line_quantity', ''), 'quantity')
|
||||
unit_price = parse_decimal(row.get('line_price', ''), 'unit_price')
|
||||
period_at = row.get('period_at', '').strip().lower()
|
||||
concentration = parse_decimal(row.get('concentration', ''), 'concentration')
|
||||
|
||||
product = find_product_by_code(product_code)
|
||||
if not product:
|
||||
raise ValueError(f"Product not found: {product_code}")
|
||||
|
||||
unit = find_uom_by_code(row.get('line_unit_code', ''))
|
||||
|
||||
# Parse shipping dates
|
||||
from_del = parse_date(row.get('line_from_del', ''))
|
||||
to_del = parse_date(row.get('line_to_del', ''))
|
||||
|
||||
# Create line
|
||||
line = SaleLine()
|
||||
line.sale = sale
|
||||
line.type = 'line'
|
||||
sequence = 1 # Default sequence, can be enhanced to handle multiple lines
|
||||
line.sequence = sequence
|
||||
line.product = product
|
||||
line.quantity = quantity
|
||||
line.unit = unit if unit else product.sale_uom
|
||||
line.unit_price = unit_price
|
||||
line.from_del = from_del
|
||||
line.to_del = to_del
|
||||
line.period_at = period_at
|
||||
line.concentration = concentration
|
||||
|
||||
# Optional fields
|
||||
description = row.get('description', '').strip()
|
||||
if description:
|
||||
line.description = description
|
||||
|
||||
line.save()
|
||||
|
||||
# # Update line custom fields
|
||||
# line_custom = {}
|
||||
# from_del = row.get('from_del', '').strip()
|
||||
# to_del = row.get('to_del', '').strip()
|
||||
# if from_del:
|
||||
# line_custom['from_del'] = from_del
|
||||
# if to_del:
|
||||
# line_custom['to_del'] = to_del
|
||||
|
||||
# if line_custom:
|
||||
# update_line_custom_fields(line.id, line_custom)
|
||||
|
||||
# Create pricing estimate if applicable
|
||||
pricing_trigger = row.get('pricing_trigger', '').strip()
|
||||
pricing_estimated_date = parse_date(row.get('pricing_estimated_date', ''))
|
||||
if pricing_trigger:
|
||||
pricing_data = {
|
||||
'trigger': pricing_trigger,
|
||||
'estimated_date': pricing_estimated_date
|
||||
}
|
||||
create_pricing_estimated(line.id, pricing_data)
|
||||
|
||||
# SAVE MIGRATION MAPPING for line
|
||||
if source_line_id:
|
||||
line_mappings.append({
|
||||
'object_type': 'sale_line',
|
||||
'source_id': source_line_id,
|
||||
'tryton_model': 'sale.line',
|
||||
'tryton_id': line.id,
|
||||
'recon_key': f"{number}-Line {sequence}-{product_code}"
|
||||
})
|
||||
|
||||
print(f" ✓ Added line (ID: {line.id})")
|
||||
print(f" Product: {product.rec_name}")
|
||||
print(f" Quantity: {quantity}")
|
||||
|
||||
else:
|
||||
# Comment, subtitle, or other line types
|
||||
line = SaleLine()
|
||||
line.sale = sale
|
||||
line.type = line_type
|
||||
line.description = row.get('description', '').strip()
|
||||
line.save()
|
||||
|
||||
print(f" ✓ Added {line_type} line (ID: {line.id})")
|
||||
|
||||
|
||||
|
||||
# **NEW: Handle analytic dimensions**
|
||||
book_name = row.get('book', '').strip()
|
||||
strategy_name = row.get('strategy', '').strip()
|
||||
|
||||
print(f"\nProcessing analytic dimensions...")
|
||||
|
||||
# Collect all dimension values in one dictionary
|
||||
dimension_values = {}
|
||||
|
||||
# Handle Book dimension
|
||||
if book_name:
|
||||
book_dimension_value = find_or_create_analytic_dimension_value('Book', book_name)
|
||||
if book_dimension_value:
|
||||
dimension_values['Book'] = book_dimension_value
|
||||
else:
|
||||
print(f" ⚠ Could not find/create Book dimension value: {book_name}")
|
||||
|
||||
# Handle Strategy dimension
|
||||
if strategy_name:
|
||||
strategy_dimension_value = find_or_create_analytic_dimension_value('Strategy', strategy_name)
|
||||
if strategy_dimension_value:
|
||||
dimension_values['Strategy'] = strategy_dimension_value
|
||||
else:
|
||||
print(f" ⚠ Could not find/create Strategy dimension value: {strategy_name}")
|
||||
|
||||
|
||||
# Link ALL dimensions to purchase in ONE call
|
||||
if dimension_values:
|
||||
link_analytic_dimensions_to_sale(sale, dimension_values)
|
||||
else:
|
||||
print(f" No analytic dimensions to link")
|
||||
|
||||
|
||||
imported_count += 1
|
||||
print(f"✓ Successfully imported sale {number}\n")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {number if 'number' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# BATCH SAVE ALL MAPPINGS at the end
|
||||
print(f"\n{'='*70}")
|
||||
print("SAVING MIGRATION MAPPINGS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
if sale_mappings:
|
||||
print(f"Saving {len(sale_mappings)} sale mappings...")
|
||||
mapper.save_mappings_batch(sale_mappings)
|
||||
print(f"✓ Sale mappings saved\n")
|
||||
|
||||
if line_mappings:
|
||||
print(f"Saving {len(line_mappings)} line mappings...")
|
||||
mapper.save_mappings_batch(line_mappings)
|
||||
print(f"✓ Line mappings saved\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} sales")
|
||||
print(f"Skipped (already exist): {skipped_count} sales")
|
||||
print(f"Errors: {error_count}")
|
||||
print(f"Migration mappings saved: {len(sale_mappings)} sales, {len(line_mappings)} lines")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported sales and their migration mappings"""
|
||||
Sale = Model.get('sale.sale')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Sales and Migration Mappings")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all sales (or limit to recently created ones)
|
||||
sales = Sale.find([], order=[('id', 'DESC')])
|
||||
|
||||
if sales:
|
||||
print(f"Found {len(sales)} sales (showing last 10):\n")
|
||||
print(f"{'ID':<8} {'Number':<15} {'Reference':<15} {'Party':<25} {'State':<12} {'Source ID':<15}")
|
||||
print("-" * 100)
|
||||
|
||||
# Initialize mapper to look up source IDs
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
for sale in sales[:10]: # Show last 10 created
|
||||
sale_id = sale.id
|
||||
|
||||
# Get number from database since it's a custom field
|
||||
conn = get_db_connection()
|
||||
number = 'N/A'
|
||||
if conn:
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT number FROM sale_sale WHERE id = %s", (sale_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
number = str(result[0])[:14]
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
# Look up source ID from migration mapping
|
||||
source_id = 'N/A'
|
||||
try:
|
||||
cursor = mapper.connection.cursor()
|
||||
cursor.execute("""
|
||||
SELECT source_id[1]
|
||||
FROM public.os_migration_mapping
|
||||
WHERE tryton_id = %s
|
||||
AND 'sale_contract' = ANY(object_type)
|
||||
ORDER BY write_date DESC
|
||||
LIMIT 1
|
||||
""", (sale_id,))
|
||||
result = cursor.fetchone()
|
||||
if result and result[0]:
|
||||
source_id = str(result[0])[:14]
|
||||
cursor.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
reference = sale.reference[:14] if sale.reference else 'N/A'
|
||||
party = sale.party.rec_name[:24] if sale.party else 'N/A'
|
||||
state = sale.state if sale.state else 'N/A'
|
||||
|
||||
print(f"{sale_id:<8} {number:<15} {reference:<15} {party:<25} {state:<12} {source_id:<15}")
|
||||
|
||||
# Show lines
|
||||
if sale.lines:
|
||||
print(f" Lines: {len(sale.lines)}")
|
||||
for line in sale.lines[:3]: # Show first 3 lines
|
||||
if line.type == 'line' and line.product:
|
||||
print(f" - {line.product.rec_name[:40]} | Qty: {line.quantity} | Price: {line.unit_price}")
|
||||
else:
|
||||
print(f" - [{line.type}] {(line.description or '')[:40]}")
|
||||
else:
|
||||
print("No sales found")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def check_mapping_stats():
|
||||
"""Display statistics about migration mappings"""
|
||||
print(f"\n{'='*70}")
|
||||
print("MIGRATION MAPPING STATISTICS")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
with MigrationMapper(DB_CONFIG) as mapper:
|
||||
cursor = mapper.connection.cursor()
|
||||
|
||||
# Count mappings by object type
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
object_type[1] as obj_type,
|
||||
COUNT(*) as count,
|
||||
MIN(write_date) as first_import,
|
||||
MAX(write_date) as last_import
|
||||
FROM public.os_migration_mapping
|
||||
GROUP BY object_type[1]
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
results = cursor.fetchall()
|
||||
|
||||
if results:
|
||||
print(f"{'Object Type':<25} {'Count':<10} {'First Import':<15} {'Last Import':<15}")
|
||||
print("-" * 70)
|
||||
for row in results:
|
||||
obj_type = row[0] or 'N/A'
|
||||
count = row[1]
|
||||
first = row[2].strftime('%Y-%m-%d') if row[2] else 'N/A'
|
||||
last = row[3].strftime('%Y-%m-%d') if row[3] else 'N/A'
|
||||
print(f"{obj_type:<25} {count:<10} {first:<15} {last:<15}")
|
||||
|
||||
# Total count
|
||||
cursor.execute("SELECT COUNT(*) FROM public.os_migration_mapping")
|
||||
total = cursor.fetchone()[0]
|
||||
print(f"\nTotal mappings: {total}")
|
||||
else:
|
||||
print("No migration mappings found")
|
||||
|
||||
cursor.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error retrieving mapping statistics: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def prepare_parties_as_clients(csv_file):
|
||||
"""Pre-process: Add CLIENT category to all parties in CSV using Proteus"""
|
||||
Party = Model.get('party.party')
|
||||
Category = Model.get('party.category')
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARING PARTIES AS CLIENTS (via Categories)")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find CLIENT category
|
||||
client_categories = Category.find([('name', '=', 'CLIENT')])
|
||||
if not client_categories:
|
||||
# Try case-insensitive
|
||||
all_categories = Category.find([])
|
||||
for cat in all_categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
client_categories = [cat]
|
||||
break
|
||||
|
||||
if not client_categories:
|
||||
print(f"✗ CLIENT category not found in the system!")
|
||||
print(f"Please create a party category named 'CLIENT' first.\n")
|
||||
return False
|
||||
|
||||
client_category = client_categories[0]
|
||||
print(f"Found CLIENT category (ID: {client_category.id})\n")
|
||||
|
||||
party_names = set()
|
||||
|
||||
try:
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
for row in reader:
|
||||
party_name = row.get('party_name', '').strip()
|
||||
if party_name:
|
||||
party_names.add(party_name)
|
||||
|
||||
print(f"Found {len(party_names)} unique parties in CSV\n")
|
||||
|
||||
updated_count = 0
|
||||
already_client_count = 0
|
||||
not_found_count = 0
|
||||
|
||||
for party_name in party_names:
|
||||
print(f"Processing party: {party_name}")
|
||||
|
||||
# Find party
|
||||
parties = Party.find([('name', '=', party_name)])
|
||||
if not parties:
|
||||
parties = Party.find([('code', '=', party_name)])
|
||||
|
||||
if not parties:
|
||||
print(f" ✗ Not found\n")
|
||||
not_found_count += 1
|
||||
continue
|
||||
|
||||
party = parties[0]
|
||||
|
||||
# Check if already has CLIENT category
|
||||
has_client = False
|
||||
if party.categories:
|
||||
for cat in party.categories:
|
||||
if cat.name.upper() == 'CLIENT':
|
||||
has_client = True
|
||||
break
|
||||
|
||||
if has_client:
|
||||
print(f" ✓ Already has CLIENT category\n")
|
||||
already_client_count += 1
|
||||
continue
|
||||
|
||||
# Add CLIENT category using Proteus
|
||||
try:
|
||||
# Reload party and category in same context
|
||||
party_to_update = Party(party.id)
|
||||
client_cat = Category(client_category.id)
|
||||
|
||||
party_to_update.categories.append(client_cat)
|
||||
party_to_update.save()
|
||||
print(f" ✓ CLIENT category added\n")
|
||||
updated_count += 1
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed: {e}\n")
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"PREPARATION SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Already have CLIENT category: {already_client_count}")
|
||||
print(f"CLIENT category added: {updated_count}")
|
||||
print(f"Not found: {not_found_count}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
return True
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}\n")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"✗ Error: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SALE IMPORT SCRIPT WITH MIGRATION MAPPING")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: Prepare all parties as clients first
|
||||
# Uncomment the following line to mark all parties in CSV as clients before importing
|
||||
prepare_parties_as_clients(CSV_FILE_PATH)
|
||||
|
||||
# Import sales with migration mapping
|
||||
import_sales(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
# Show mapping statistics
|
||||
check_mapping_stats()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
356
Reference Data/python_project/scripts/import_services v2.py
Normal file
356
Reference Data/python_project/scripts/import_services v2.py
Normal file
@@ -0,0 +1,356 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'SERVICES' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
|
||||
try:
|
||||
# Connect using XML-RPC with credentials in URL
|
||||
#connection_url = f'{SERVER_URL}/{DATABASE_NAME}/'
|
||||
#print(f'{USERNAME}:{PASSWORD}@{connection_url}')
|
||||
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
|
||||
#config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search by getting all and comparing
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, try to get default 'Unit'
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using '{DEFAULT_UOM}'")
|
||||
default_uoms = Uom.find([('name', '=', DEFAULT_UOM)])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, categories, uom):
|
||||
"""Create a new service product using proteus"""
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.code = row['code']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price_method = 'fixed' # Services use fixed cost price
|
||||
template.default_uom = uom
|
||||
|
||||
# Link to categories (Many2Many relationship)
|
||||
# Use append() instead of direct assignment
|
||||
if isinstance(categories, list):
|
||||
template.categories.extend(categories) # Use extend for lists
|
||||
else:
|
||||
template.categories.append(categories) # Use append for single category
|
||||
|
||||
template.salable = False # Services are not salable products by default
|
||||
template.purchasable = True # Services are purchasable
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
# Save the template first
|
||||
template.save()
|
||||
|
||||
# Now update the product that was auto-created
|
||||
# When a template is created, Tryton automatically creates a default product
|
||||
if template.products:
|
||||
product = template.products[0]
|
||||
#product.code = row['code']
|
||||
product.suffix_code = row['code'] # Use suffix_code to set product code
|
||||
|
||||
# Set cost price on the product
|
||||
product.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
|
||||
product.save()
|
||||
return product
|
||||
else:
|
||||
raise ValueError("No product was created automatically with template")
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip() or '0.00'
|
||||
cost_price = row.get('cost_price', '0.00').strip() or '0.00'
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product")
|
||||
print(f" Product ID: {product.id}, Template ID: {product.template.id}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<30} {'Categories':<25} {'Sale Price':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:29] if product.template.name else 'N/A'
|
||||
|
||||
# Get categories (Many2Many relationship)
|
||||
if product.template.categories:
|
||||
categories = ', '.join([cat.name for cat in product.template.categories])
|
||||
categories = categories[:24]
|
||||
else:
|
||||
categories = 'N/A'
|
||||
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<30} {categories:<25} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
symbol = f"({uom.symbol})" if hasattr(uom, 'symbol') and uom.symbol else ""
|
||||
print(f" - {uom.name} {symbol}")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
310
Reference Data/python_project/scripts/import_services.py
Normal file
310
Reference Data/python_project/scripts/import_services.py
Normal file
@@ -0,0 +1,310 @@
|
||||
import csv
|
||||
from proteus import config, Model
|
||||
from decimal import Decimal
|
||||
|
||||
# Configuration
|
||||
DATABASE_NAME = 'tradon'
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Services.csv' # UPDATE THIS PATH!
|
||||
|
||||
# Product configuration
|
||||
PRODUCT_TYPE = 'service' # Service type products
|
||||
DEFAULT_CATEGORY = 'Services' # Default category name if not found
|
||||
DEFAULT_UOM = 'Mt' # Default UOM if not found
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton database"""
|
||||
print(f"Connecting to Tryton database: {DATABASE_NAME}")
|
||||
try:
|
||||
#config.set_trytond(DATABASE_NAME)
|
||||
config.set_xmlrpc('https://admin:dsproject@itsa.open-squared.tech/tradon/')
|
||||
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
return False
|
||||
|
||||
def get_or_create_category(category_name):
|
||||
"""Find or create a product category"""
|
||||
Category = Model.get('product.category')
|
||||
|
||||
# Try to find existing category
|
||||
categories = Category.find([('name', '=', category_name)])
|
||||
|
||||
if categories:
|
||||
print(f" Found existing category: {category_name}")
|
||||
return categories[0]
|
||||
else:
|
||||
# Create new category
|
||||
new_category = Category()
|
||||
new_category.name = category_name
|
||||
new_category.save()
|
||||
print(f" ✓ Created new category: {category_name}")
|
||||
return new_category
|
||||
|
||||
def get_uom(uom_name):
|
||||
"""Find Unit of Measure by name"""
|
||||
Uom = Model.get('product.uom')
|
||||
|
||||
# Try exact match first
|
||||
uoms = Uom.find([('name', '=', uom_name)])
|
||||
|
||||
if uoms:
|
||||
return uoms[0]
|
||||
|
||||
# Try case-insensitive search
|
||||
all_uoms = Uom.find([])
|
||||
for uom in all_uoms:
|
||||
if uom.name.lower() == uom_name.lower():
|
||||
return uom
|
||||
|
||||
# If not found, return Unit (default)
|
||||
print(f" ⚠ Warning: UOM '{uom_name}' not found, using 'Unit'")
|
||||
default_uoms = Uom.find([('name', '=', 'Unit')])
|
||||
if default_uoms:
|
||||
return default_uoms[0]
|
||||
|
||||
# If even Unit is not found, get the first available
|
||||
all_uoms = Uom.find([])
|
||||
if all_uoms:
|
||||
print(f" ⚠ Using first available UOM: {all_uoms[0].name}")
|
||||
return all_uoms[0]
|
||||
|
||||
raise ValueError("No UOM found in database!")
|
||||
|
||||
def check_product_exists(code):
|
||||
"""Check if product with given code already exists"""
|
||||
Product = Model.get('product.product')
|
||||
products = Product.find([('code', '=', code)])
|
||||
return products[0] if products else None
|
||||
|
||||
def create_service_product(row, category, uom):
|
||||
"""Create a new service product"""
|
||||
Product = Model.get('product.product')
|
||||
Template = Model.get('product.template')
|
||||
|
||||
# Create template first
|
||||
template = Template()
|
||||
template.name = row['name']
|
||||
template.type = PRODUCT_TYPE
|
||||
template.list_price = Decimal(row['sale_price']) if row['sale_price'] else Decimal('0.00')
|
||||
template.cost_price = Decimal(row['cost_price']) if row['cost_price'] else Decimal('0.00')
|
||||
template.default_uom = uom
|
||||
template.category = category
|
||||
template.salable = True
|
||||
template.purchasable = False # Services typically not purchased
|
||||
|
||||
if row.get('description'):
|
||||
template.description = row['description']
|
||||
|
||||
template.save()
|
||||
|
||||
# Create product variant
|
||||
product = Product()
|
||||
product.template = template
|
||||
product.code = row['code']
|
||||
product.save()
|
||||
|
||||
return product
|
||||
|
||||
def import_services(csv_file):
|
||||
"""Import services from CSV file"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing service products from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values
|
||||
code = row.get('code', '').strip()
|
||||
name = row.get('name', '').strip()
|
||||
category_name = row.get('category', DEFAULT_CATEGORY).strip() or DEFAULT_CATEGORY
|
||||
uom_name = row.get('uom', DEFAULT_UOM).strip() or DEFAULT_UOM
|
||||
sale_price = row.get('sale_price', '0.00').strip()
|
||||
cost_price = row.get('cost_price', '0.00').strip()
|
||||
description = row.get('description', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not code and not name:
|
||||
continue
|
||||
|
||||
# Validate required fields
|
||||
if not code or not name:
|
||||
errors.append(f"Row {row_num}: Missing code or name")
|
||||
error_count += 1
|
||||
print(f"✗ Row {row_num}: Missing required fields")
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {code} - {name}")
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = check_product_exists(code)
|
||||
|
||||
if existing_product:
|
||||
print(f" ⚠ Product code '{code}' already exists: {existing_product.template.name}")
|
||||
print(f" Skipping...")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Get or create category
|
||||
category = get_or_create_category(category_name)
|
||||
|
||||
# Get UOM
|
||||
uom = get_uom(uom_name)
|
||||
print(f" Using UOM: {uom.name}")
|
||||
|
||||
# Create the product
|
||||
row_data = {
|
||||
'code': code,
|
||||
'name': name,
|
||||
'sale_price': sale_price,
|
||||
'cost_price': cost_price,
|
||||
'description': description
|
||||
}
|
||||
|
||||
product = create_service_product(row_data, category, uom)
|
||||
|
||||
print(f" ✓ Created service product: {name}")
|
||||
print(f" Code: {code}")
|
||||
print(f" Category: {category.name}")
|
||||
print(f" Sale Price: {sale_price}")
|
||||
print(f" Cost Price: {cost_price}")
|
||||
if description:
|
||||
print(f" Description: {description[:50]}...")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {code} ({name}): {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} service products")
|
||||
print(f"Skipped (already exist): {skipped_count} products")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported service products"""
|
||||
Product = Model.get('product.product')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Service Products")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Find all service type products
|
||||
products = Product.find([('template.type', '=', 'service')])
|
||||
|
||||
if products:
|
||||
print(f"Found {len(products)} service products:\n")
|
||||
print(f"{'Code':<12} {'Name':<35} {'Category':<20} {'Sale Price':<12}")
|
||||
print("-" * 80)
|
||||
|
||||
for product in products:
|
||||
code = product.code or 'N/A'
|
||||
name = product.template.name[:34] if product.template.name else 'N/A'
|
||||
category = product.template.category.name if product.template.category else 'N/A'
|
||||
sale_price = f"{product.template.list_price:.2f}" if product.template.list_price else '0.00'
|
||||
|
||||
print(f"{code:<12} {name:<35} {category:<20} {sale_price:<12}")
|
||||
else:
|
||||
print("No service products found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_uoms():
|
||||
"""List all available UOMs in the database"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE UNITS OF MEASURE")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Uom = Model.get('product.uom')
|
||||
uoms = Uom.find([])
|
||||
|
||||
if uoms:
|
||||
print(f"Found {len(uoms)} UOMs:\n")
|
||||
for uom in uoms:
|
||||
print(f" - {uom.name} (Symbol: {uom.symbol if hasattr(uom, 'symbol') else 'N/A'})")
|
||||
else:
|
||||
print("No UOMs found")
|
||||
|
||||
print()
|
||||
|
||||
def list_available_categories():
|
||||
"""List all available product categories"""
|
||||
print(f"\n{'='*70}")
|
||||
print("AVAILABLE PRODUCT CATEGORIES")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
Category = Model.get('product.category')
|
||||
categories = Category.find([])
|
||||
|
||||
if categories:
|
||||
print(f"Found {len(categories)} categories:\n")
|
||||
for cat in categories:
|
||||
print(f" - {cat.name}")
|
||||
else:
|
||||
print("No categories found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON SERVICE PRODUCT IMPORT SCRIPT (using Proteus)")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Optional: List available UOMs and categories
|
||||
# Uncomment these if you want to see what's available in your database
|
||||
# list_available_uoms()
|
||||
# list_available_categories()
|
||||
|
||||
# Import service products
|
||||
import_services(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
@@ -0,0 +1,397 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from proteus import config, Model
|
||||
|
||||
|
||||
# CSV Configuration
|
||||
CSV_FILE_PATH = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Customer_Stock_Locations.csv'
|
||||
|
||||
# XML-RPC Configuration
|
||||
HTTPS = 'https://'
|
||||
SERVER_URL = 'itsa.open-squared.tech'
|
||||
DATABASE_NAME = 'tradon'
|
||||
USERNAME = 'admin'
|
||||
PASSWORD = 'dsproject'
|
||||
|
||||
# PostgreSQL Configuration (for direct database access)
|
||||
DB_HOST = '72.61.163.139'
|
||||
DB_PORT = 5433
|
||||
DB_USER = 'postgres'
|
||||
DB_PASSWORD = 'dsproject'
|
||||
|
||||
# Default values
|
||||
DEFAULT_TYPE = 'storage' # Default location type if not specified
|
||||
|
||||
|
||||
def connect_to_tryton():
|
||||
"""Establish connection to Tryton via XML-RPC"""
|
||||
print(f"Connecting to Tryton server: {SERVER_URL}")
|
||||
print(f"Database: {DATABASE_NAME}")
|
||||
print(f"Username: {USERNAME}")
|
||||
|
||||
try:
|
||||
config.set_xmlrpc(f'{HTTPS}{USERNAME}:{PASSWORD}@{SERVER_URL}/{DATABASE_NAME}/')
|
||||
print("✓ Connected successfully!\n")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Connection failed: {e}")
|
||||
print("\nTroubleshooting:")
|
||||
print(" - Verify the server URL is correct and accessible")
|
||||
print(" - Check that the Tryton server is running")
|
||||
print(" - Verify username and password are correct")
|
||||
print(" - Make sure you can access the server in a browser")
|
||||
return False
|
||||
|
||||
def get_db_connection():
|
||||
"""Get PostgreSQL database connection"""
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=DB_HOST,
|
||||
port=DB_PORT,
|
||||
database=DATABASE_NAME,
|
||||
user=DB_USER,
|
||||
password=DB_PASSWORD
|
||||
)
|
||||
return conn
|
||||
except Exception as e:
|
||||
print(f"✗ Database connection failed: {e}")
|
||||
return None
|
||||
|
||||
def update_location_coordinates(location_id, latitude, longitude):
|
||||
"""Update location coordinates directly in PostgreSQL"""
|
||||
conn = get_db_connection()
|
||||
if not conn:
|
||||
print(f" ⚠ Could not update coordinates - database connection failed")
|
||||
return False
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Update lat and lon columns
|
||||
update_query = """
|
||||
UPDATE stock_location
|
||||
SET lat = %s, lon = %s
|
||||
WHERE id = %s
|
||||
"""
|
||||
|
||||
cursor.execute(update_query, (latitude, longitude, location_id))
|
||||
rows_affected = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
print(f" ⚠ Error updating coordinates: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if conn:
|
||||
conn.rollback()
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def check_location_exists_by_name(name):
|
||||
"""Check if location with given name already exists"""
|
||||
Location = Model.get('stock.location')
|
||||
locations = Location.find([('name', '=', name)])
|
||||
return locations[0] if locations else None
|
||||
|
||||
def validate_location_type(loc_type):
|
||||
"""Validate location type"""
|
||||
valid_types = [
|
||||
'supplier', 'customer', 'lost_found', 'warehouse',
|
||||
'storage', 'production', 'drop', 'rental', 'view'
|
||||
]
|
||||
|
||||
if not loc_type or loc_type.lower() not in valid_types:
|
||||
print(f" ⚠ Warning: Invalid type '{loc_type}', using default '{DEFAULT_TYPE}'")
|
||||
return DEFAULT_TYPE
|
||||
|
||||
return loc_type.lower()
|
||||
|
||||
def parse_coordinate(value, coord_name):
|
||||
"""Parse and validate coordinate value"""
|
||||
if not value or value == '':
|
||||
return None
|
||||
|
||||
# Handle 'NULL' or similar string values
|
||||
if isinstance(value, str) and value.strip().upper() in ['NULL', 'NONE', 'N/A', '']:
|
||||
return None
|
||||
|
||||
try:
|
||||
coord = float(value)
|
||||
|
||||
# Validate latitude range (-90 to 90)
|
||||
if coord_name == 'latitude':
|
||||
if coord < -90 or coord > 90:
|
||||
print(f" ⚠ Warning: Latitude {coord} out of range (-90 to 90)")
|
||||
return None
|
||||
|
||||
# Validate longitude range (-180 to 180)
|
||||
if coord_name == 'longitude':
|
||||
if coord < -180 or coord > 180:
|
||||
print(f" ⚠ Warning: Longitude {coord} out of range (-180 to 180)")
|
||||
return None
|
||||
|
||||
return coord
|
||||
except (ValueError, TypeError) as e:
|
||||
print(f" ⚠ Warning: Invalid {coord_name} value '{value}' - {e}")
|
||||
return None
|
||||
|
||||
def create_location(row):
|
||||
"""Create a new location using proteus"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
# Create location
|
||||
location = Location()
|
||||
location.name = row['name']
|
||||
location.type = row['type']
|
||||
|
||||
# Save the location first
|
||||
location.save()
|
||||
|
||||
# Get coordinates and save them
|
||||
latitude = row.get('latitude')
|
||||
longitude = row.get('longitude')
|
||||
|
||||
# Update coordinates directly in database if provided
|
||||
if latitude is not None or longitude is not None:
|
||||
success = update_location_coordinates(location.id, latitude, longitude)
|
||||
if not success:
|
||||
print(f" ⚠ Location created but coordinates not saved")
|
||||
|
||||
return location, latitude, longitude
|
||||
|
||||
def import_locations(csv_file):
|
||||
"""Import locations from CSV file"""
|
||||
|
||||
imported_count = 0
|
||||
skipped_count = 0
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
# Track names we've already processed in this run
|
||||
processed_names = set()
|
||||
|
||||
print(f"{'='*70}")
|
||||
print(f"Importing locations from: {csv_file}")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
try:
|
||||
# Open with utf-8-sig to handle BOM
|
||||
with open(csv_file, 'r', encoding='utf-8-sig') as file:
|
||||
reader = csv.DictReader(file)
|
||||
|
||||
# Debug: Show detected columns
|
||||
print(f"Detected columns: {reader.fieldnames}\n")
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
try:
|
||||
# Clean up values - get directly from CSV columns
|
||||
name = row.get('name', '').strip()
|
||||
loc_type = row.get('type', '').strip() or DEFAULT_TYPE
|
||||
lat_raw = row.get('lat', '').strip()
|
||||
lon_raw = row.get('lon', '').strip()
|
||||
|
||||
# Skip empty rows
|
||||
if not name:
|
||||
continue
|
||||
|
||||
print(f"Processing Row {row_num}: {name}")
|
||||
print(f" CSV Raw values - lat: '{lat_raw}', lon: '{lon_raw}'")
|
||||
|
||||
# Check if we've already processed this name in this import run
|
||||
if name in processed_names:
|
||||
print(f" ⚠ Duplicate name in CSV: '{name}'")
|
||||
print(f" Skipping duplicate entry...\n")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if location already exists in database
|
||||
existing_location = check_location_exists_by_name(name)
|
||||
|
||||
if existing_location:
|
||||
print(f" ⚠ Location '{name}' already exists (ID: {existing_location.id})")
|
||||
print(f" Type: {existing_location.type}")
|
||||
print(f" Skipping...\n")
|
||||
skipped_count += 1
|
||||
processed_names.add(name)
|
||||
continue
|
||||
|
||||
# Validate location type
|
||||
loc_type = validate_location_type(loc_type)
|
||||
|
||||
# Parse coordinates
|
||||
latitude = parse_coordinate(lat_raw, 'latitude')
|
||||
longitude = parse_coordinate(lon_raw, 'longitude')
|
||||
|
||||
print(f" Parsed values - lat: {latitude}, lon: {longitude}")
|
||||
|
||||
# Create the location with parsed data
|
||||
location_data = {
|
||||
'name': name,
|
||||
'type': loc_type,
|
||||
'latitude': latitude,
|
||||
'longitude': longitude
|
||||
}
|
||||
|
||||
location, saved_lat, saved_lon = create_location(location_data)
|
||||
|
||||
# Mark this name as processed
|
||||
processed_names.add(name)
|
||||
|
||||
print(f" ✓ Created location")
|
||||
print(f" Location ID: {location.id}")
|
||||
print(f" Name: {name}")
|
||||
print(f" Type: {loc_type}")
|
||||
if saved_lat is not None:
|
||||
print(f" Latitude: {saved_lat}")
|
||||
if saved_lon is not None:
|
||||
print(f" Longitude: {saved_lon}")
|
||||
print()
|
||||
|
||||
imported_count += 1
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Row {row_num} - {name if 'name' in locals() else 'Unknown'}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
error_count += 1
|
||||
print(f"✗ Error on row {row_num}: {e}\n")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
# Summary
|
||||
print(f"{'='*70}")
|
||||
print("IMPORT SUMMARY")
|
||||
print(f"{'='*70}")
|
||||
print(f"Successfully imported: {imported_count} locations")
|
||||
print(f"Skipped (already exist or duplicates): {skipped_count} locations")
|
||||
print(f"Errors: {error_count}")
|
||||
|
||||
if errors:
|
||||
print(f"\nError details:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
|
||||
except FileNotFoundError:
|
||||
print(f"✗ Error: CSV file not found at {csv_file}")
|
||||
print(f"Please update CSV_FILE_PATH in the script with the correct path.")
|
||||
except Exception as e:
|
||||
print(f"✗ Fatal error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
def verify_import():
|
||||
"""Verify imported locations with coordinates from database"""
|
||||
Location = Model.get('stock.location')
|
||||
|
||||
print(f"\n{'='*70}")
|
||||
print("VERIFICATION - Stock Locations")
|
||||
print(f"{'='*70}\n")
|
||||
|
||||
# Get database connection to read coordinates
|
||||
conn = get_db_connection()
|
||||
|
||||
if not conn:
|
||||
print("Cannot verify - database connection failed")
|
||||
return
|
||||
|
||||
# Find all locations (or limit to recently created ones)
|
||||
locations = Location.find([], order=[('id', 'DESC')])
|
||||
|
||||
if locations:
|
||||
print(f"Found {len(locations)} locations (showing last 20):\n")
|
||||
print(f"{'ID':<8} {'Name':<35} {'Type':<12} {'Lat':<12} {'Lon':<12}")
|
||||
print("-" * 85)
|
||||
|
||||
for location in locations[:20]: # Show last 20 created
|
||||
loc_id = location.id
|
||||
name = location.name[:34] if location.name else 'N/A'
|
||||
loc_type = location.type if location.type else 'N/A'
|
||||
|
||||
# Get coordinates from database
|
||||
lat = 'N/A'
|
||||
lon = 'N/A'
|
||||
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT lat, lon FROM stock_location WHERE id = %s",
|
||||
(loc_id,)
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
lat = f"{result[0]:.6f}" if result[0] is not None else 'N/A'
|
||||
lon = f"{result[1]:.6f}" if result[1] is not None else 'N/A'
|
||||
cursor.close()
|
||||
except Exception as e:
|
||||
print(f"Error reading coordinates for location {loc_id}: {e}")
|
||||
|
||||
print(f"{loc_id:<8} {name:<35} {loc_type:<12} {lat:<12} {lon:<12}")
|
||||
|
||||
conn.close()
|
||||
else:
|
||||
print("No locations found")
|
||||
|
||||
print()
|
||||
|
||||
def main():
|
||||
print("="*70)
|
||||
print("TRYTON STOCK LOCATION IMPORT SCRIPT")
|
||||
print("Using Proteus with XML-RPC Connection")
|
||||
print("Using Direct PostgreSQL for lat/lon coordinates")
|
||||
print("="*70)
|
||||
print()
|
||||
|
||||
# Connect to Tryton using XML-RPC
|
||||
if not connect_to_tryton():
|
||||
return 1
|
||||
|
||||
# Test database connection
|
||||
print("Testing PostgreSQL connection...")
|
||||
conn = get_db_connection()
|
||||
if conn:
|
||||
print("✓ PostgreSQL connection successful")
|
||||
|
||||
# Test if lat/lon columns exist
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'stock_location'
|
||||
AND column_name IN ('lat', 'lon')
|
||||
""")
|
||||
columns = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
if columns:
|
||||
print("✓ Found lat/lon columns in stock_location table:")
|
||||
for col in columns:
|
||||
print(f" - {col[0]}: {col[1]}")
|
||||
else:
|
||||
print("✗ WARNING: lat/lon columns NOT found in stock_location table!")
|
||||
print(" Coordinates will not be saved!")
|
||||
except Exception as e:
|
||||
print(f" Could not verify columns: {e}")
|
||||
|
||||
conn.close()
|
||||
print()
|
||||
else:
|
||||
print("✗ PostgreSQL connection failed")
|
||||
print("Coordinates will not be saved!\n")
|
||||
return 1
|
||||
|
||||
# Import locations
|
||||
import_locations(CSV_FILE_PATH)
|
||||
|
||||
# Verify import
|
||||
verify_import()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
165
Reference Data/python_project/scripts/import_vessels.py
Normal file
165
Reference Data/python_project/scripts/import_vessels.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import csv
|
||||
import psycopg2
|
||||
from datetime import datetime
|
||||
|
||||
# Database connection parameters
|
||||
DB_CONFIG = {
|
||||
'host': '72.61.163.139',
|
||||
'port': 5433,
|
||||
'database': 'tradon',
|
||||
'user': 'postgres',
|
||||
'password': 'dsproject'
|
||||
}
|
||||
|
||||
# CSV file path
|
||||
CSV_FILE = r'C:\Users\SylvainDUVERNAY\Open Squared\Production - Documents\TRADON Implementation\ITSA\Reference Data\Loaders\Vessels.csv'
|
||||
|
||||
def import_vessels():
|
||||
"""Import vessel data from CSV into trade_vessel table"""
|
||||
|
||||
print("=" * 60)
|
||||
print("VESSEL IMPORT PROCESS STARTED")
|
||||
print("=" * 60)
|
||||
|
||||
# Initialize connection and cursor objects
|
||||
conn = None
|
||||
cursor = None
|
||||
|
||||
try:
|
||||
# Connect to PostgreSQL database
|
||||
print(f"\n[1/4] Connecting to database...")
|
||||
print(f" Host: {DB_CONFIG['host']}:{DB_CONFIG['port']}")
|
||||
print(f" Database: {DB_CONFIG['database']}")
|
||||
conn = psycopg2.connect(**DB_CONFIG)
|
||||
cursor = conn.cursor()
|
||||
print(" ✓ Database connection established")
|
||||
|
||||
# Read CSV file with UTF-8-BOM encoding to handle Excel-generated CSVs
|
||||
print(f"\n[2/4] Reading CSV file...")
|
||||
print(f" File: {CSV_FILE}")
|
||||
with open(CSV_FILE, 'r', encoding='utf-8-sig') as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
|
||||
# Initialize counters for tracking import results
|
||||
insert_count = 0
|
||||
skip_count = 0
|
||||
|
||||
print(" ✓ CSV file opened successfully")
|
||||
print(f"\n[3/4] Processing vessel records...")
|
||||
print("-" * 60)
|
||||
|
||||
# Process each row from CSV file
|
||||
for row_num, row in enumerate(csv_reader, start=1):
|
||||
# Extract and clean vessel data from CSV row
|
||||
vessel_name = row['vessel_name'].strip()
|
||||
# Convert empty strings to None for vessel_year
|
||||
vessel_year = row['vessel_year'].strip() if row['vessel_year'].strip() else None
|
||||
# Convert empty strings and 'NULL' text to None for vessel_imo
|
||||
vessel_imo = row['vessel_imo'].strip() if row['vessel_imo'].strip() and row['vessel_imo'].upper() != 'NULL' else None
|
||||
|
||||
print(f"\nRow {row_num}: Processing '{vessel_name}'")
|
||||
print(f" Year: {vessel_year if vessel_year else 'N/A'}")
|
||||
print(f" IMO: {vessel_imo if vessel_imo else 'N/A'}")
|
||||
|
||||
# Check if vessel already exists in database to avoid duplicates
|
||||
cursor.execute("""
|
||||
SELECT id FROM trade_vessel
|
||||
WHERE vessel_name = %s AND vessel_imo = %s
|
||||
""", (vessel_name, vessel_imo))
|
||||
|
||||
existing = cursor.fetchone()
|
||||
|
||||
# Skip insertion if vessel already exists
|
||||
if existing:
|
||||
print(f" ⚠ SKIPPED - Duplicate found (ID: {existing[0]})")
|
||||
skip_count += 1
|
||||
continue
|
||||
|
||||
# Insert new vessel record into trade_vessel table
|
||||
cursor.execute("""
|
||||
INSERT INTO trade_vessel
|
||||
(vessel_name, vessel_year, vessel_imo, active, create_date, create_uid, write_date, write_uid)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id
|
||||
""", (
|
||||
vessel_name, # Vessel name from CSV
|
||||
vessel_year, # Year vessel was built
|
||||
vessel_imo, # IMO number (international maritime identifier)
|
||||
True, # Set active flag to True
|
||||
datetime.now(), # Record creation timestamp
|
||||
1, # User ID who created the record
|
||||
datetime.now(), # Record last modification timestamp
|
||||
1 # User ID who last modified the record
|
||||
))
|
||||
|
||||
# Get the ID of the newly inserted record
|
||||
new_id = cursor.fetchone()[0]
|
||||
|
||||
# Increment insert counter and log success
|
||||
insert_count += 1
|
||||
print(f" ✓ INSERTED successfully (New ID: {new_id})")
|
||||
|
||||
print("-" * 60)
|
||||
|
||||
# Commit all inserts to database
|
||||
print(f"\n[4/4] Committing transaction to database...")
|
||||
conn.commit()
|
||||
print(" ✓ Transaction committed successfully")
|
||||
|
||||
# Display import summary statistics
|
||||
print("\n" + "=" * 60)
|
||||
print("IMPORT SUMMARY")
|
||||
print("=" * 60)
|
||||
print(f"✓ Records inserted: {insert_count}")
|
||||
print(f"⚠ Records skipped: {skip_count}")
|
||||
print(f" Total processed: {insert_count + skip_count}")
|
||||
print("=" * 60)
|
||||
|
||||
except psycopg2.Error as e:
|
||||
# Rollback transaction if database error occurs
|
||||
print("\n" + "!" * 60)
|
||||
print("DATABASE ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
except FileNotFoundError:
|
||||
# Handle case where CSV file doesn't exist
|
||||
print("\n" + "!" * 60)
|
||||
print("FILE NOT FOUND ERROR")
|
||||
print("!" * 60)
|
||||
print(f"CSV file not found: {CSV_FILE}")
|
||||
print("Please check the file path and try again.")
|
||||
print("!" * 60)
|
||||
|
||||
except Exception as e:
|
||||
# Catch any other unexpected errors and rollback
|
||||
print("\n" + "!" * 60)
|
||||
print("UNEXPECTED ERROR")
|
||||
print("!" * 60)
|
||||
if conn:
|
||||
conn.rollback()
|
||||
print("✓ Transaction rolled back")
|
||||
print(f"Error details: {e}")
|
||||
print("!" * 60)
|
||||
|
||||
finally:
|
||||
# Clean up database resources
|
||||
print(f"\n[CLEANUP] Closing database connection...")
|
||||
if cursor:
|
||||
cursor.close()
|
||||
print(" ✓ Cursor closed")
|
||||
if conn:
|
||||
conn.close()
|
||||
print(" ✓ Connection closed")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("VESSEL IMPORT PROCESS COMPLETED")
|
||||
print("=" * 60 + "\n")
|
||||
|
||||
# Execute import when script is run directly
|
||||
if __name__ == "__main__":
|
||||
import_vessels()
|
||||
Reference in New Issue
Block a user