contratos = \
[
"InventTable",
"EcoResProductTranslation",
"EcoResProductCategory",
"EcoResCategory",
"AgreementHeader",
"AgreementClassification",
"SalesLine",
"CustInvoiceTrans",
"CustInvoiceJour",
"CustInvoiceJour_BR",
"SalesPurchOperationType_BR",
"FiscalDocument_BR",
"PurchLine",
"VendInvoiceTrans",
"VendInvoiceJour",
"InventDim",
"AgreementLine",
"VendTable",
"DirPartyTable",
"CustTable",
"CustInvoiceTrans",
"PurchTable_BR"
]
estoque = \
[
"InventSumHistorical",
"InventBatch",
"InventDim",
"InventSum",
"InventTable",
"EcoResProductTranslation",
"EcoResProductCategory",
"EcoResCategory"
]
recebiveis = \
[
"FiscalEstablishment_BR"
, "DimensionAttributeValueSet"
, "DimensionAttributeAalueSetItem"
, "DimensionAttributeValue"
, "DimensionAttribute"
, "DirPartyLocation"
, "CustLedger"
, "CustSettlement"
, "CustTrans"
, "CustTransOpen"
]
perdaseganhos = \
[
"dimensionattributevalueset"
, "generaljournalaccountentry"
, "generaljournalentry"
, "UserInfo"
]
todas = estoque + ["InventTransferTable", "InventTransferLine"] + ["UNITOFMEASURE", "UnitOfMeasureConversion"] + ["INVENTTABLE", "DIMENSIONATTRIBUTEVALUESET", "DIMENSIONATTRIBUTEVALUESETITEM", "DIMENSIONATTRIBUTEVALUE", "DIMENSIONATTRIBUTE", "DIMENSIONFINANCIALTAG"] + ["PurchLine", "VendInvoiceTrans", "VendInvoiceJour", "AgreementHeader", "AgreementLine", "AgreementClassification"] + perdaseganhos + recebiveis + contratos + ["PurchTable" , "EcoResProductVariantDimensionValue" , "EcoResConfiguration"]
sucess = []
fails = []
try:
allcdm.remove("abfss://dynamics365-financeandoperations@datalakename.dfs.core.windows.net/name.operations.dynamics.com/Tables/Custom/INVENTSUMHISTORICAL.cdm.json")
allcdm.remove("abfss://dynamics365-financeandoperations@datalakename.dfs.core.windows.net/name.operations.dynamics.com/Tables/Custom/UNITOFMEASURE.cdm.json")
allcdm.remove("abfss://dynamics365-financeandoperations@datalakename.dfs.core.windows.net/name.operations.dynamics.com/Tables/Custom/UNITOFMEASURECONVERSION.cdm.json")
except:
pass
for item in allcdm:
if "manifest" not in item and "resolved" not in item:
tname = item.split("/")[-1].replace(".cdm.json", "").lower()
if tname in [x.lower() for x in todas]:
try:
dftemp = leTabelaDynamics(item)
try:
spark.sql("drop table main.dynamics." + tname)
except:
pass
dftemp.write.mode("overwrite").saveAsTable("main.dynamics." + tname)
sucess.append(item)
print("sucesso: " + item.replace("abfss://dynamics365-financeandoperations@datalakename.dfs.core.windows.net/name.operations.dynamics.com/Tables/", ""))
except:
print("error on " + item)
fails.append(item)
You can see it's a very hard code here.
So, what i was planning is:
1- to make this code easier to maintain, without coding all the tables
2- instead of sendind the tables do databricks, save them at lakehouse
I tried to adapt the code to Fabric's Notebook (dbutils doesnt work on Fabric btw), but without success.
Even when i modified all the code and worked it all, doesnt ingested the tables in my lakehouse.
Did someone tried something like this or can help me with this case?