-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathSankhyaExtractFromDateAlter.py
More file actions
202 lines (161 loc) · 6.35 KB
/
SankhyaExtractFromDateAlter.py
File metadata and controls
202 lines (161 loc) · 6.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
from dotenv import load_dotenv
from datetime import datetime, timedelta, date
import pandas as pd
import requests
import glob
import logging
import json
import os
logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)
class SankhyaAPI:
# Class to connect to Sankhya API
def __init__(self, user: str, password: str, host: str, port: str):
self.user = user
self.password = password
self.host = host
self.port = port
self.jsessionid = None
def login(self):
# Login to Sankhya API
logger.info("Logging in to SankhyaAPI")
params = {
"serviceName": "MobileLoginSP.login",
"requestBody": {
"NOMUSU": {"$": self.user},
"INTERNO": {"$": self.password},
},
}
service_name = params["serviceName"]
url = f"http://{self.host}:{self.port}/mge/service.sbr?serviceName={service_name}&outputType=json"
try:
response = requests.post(url, json=params, timeout=90)
data = json.loads(response.content)
data_jsession = data["responseBody"]["jsessionid"]
self.jsessionid = str(data_jsession["$"])
logger.info("Login successful")
except requests.RequestException as e:
logger.error(f"Login failed: {e}")
def dbExplorer(self):
logger.info("Executing SQL query")
def query_each_day(year, month):
# Get the number of days in the given month and year
if month == 12:
numero_de_dias_do_mes = (
datetime.date(year + 1, 1, 1) - datetime.date(year, month, 1)
).days
else:
numero_de_dias_do_mes = (
datetime.date(year, month + 1, 1) - datetime.date(year, month, 1)
).days
# Loop through each day in the month
for day in range(1, numero_de_dias_do_mes + 1):
# Construct the query for the specific day
query = f"""
SELECT
NUNOTA,
CODEMP,
CODPROD,
USOPROD,
QTDNEG,
QTDENTREGUE,
VLRUNIT,
VLRTOT,
VLRCUS,
VLRIPI,
ALIQIPI,
PRECOBASE,
CODVEND,
CUSTO
CODVOL,
STATUSNOTA,
DTALTER
FROM TGFITE
WHERE
EXTRACT(YEAR FROM "DTALTER") = {year}
AND EXTRACT(MONTH FROM "DTALTER") = {month}
AND EXTRACT(DAY FROM "DTALTER") = {day}
"""
# Headers
headers = {"Cookie": f"JSESSIONID={self.jsessionid}"}
# Parameters
params = {
"serviceName": "DbExplorerSP.executeQuery",
"MAXRESULTSIZE": 10000,
"requestBody": {"sql": query},
}
# URL
service_name = params["serviceName"]
url = f"http://{self.host}:{self.port}/mge/service.sbr?serviceName={service_name}&outputType=json&mgeSession={self.jsessionid}"
# Get response
response = requests.get(url, json=params, headers=headers, timeout=30)
# Get the data into a json file
json_data = json.loads(response.content.decode("latin1"))
columns = [
column["name"]
for column in json_data["responseBody"]["fieldsMetadata"]
] # Extracting column names from the json response
rows = json_data["responseBody"][
"rows"
] # Extracting rows from the json response
# Creating the dataframe with the rows and columns of the json response
df = pd.DataFrame(rows, columns=columns)
# Format Data from dataframe: CHANGE HERE
df["DTALTER"] = pd.to_datetime(df["DTALTER"], format="%d%m%Y %H:%M:%S")
dateNow = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
df.to_csv(
f"./raw/Sankhya-Extract-{year}-{month}-{day}-{dateNow}.csv",
index=False,
mode="w",
)
logger.info("SQL query executed successfully")
# Calling the function below will return the result between january through dezember each day
for i in range(1, 13):
query_each_day(2023, i)
def logout(self):
params = {
"serviceName": "MobileLoginSP.logout",
"status": "1",
"pendingPrinting": "false",
}
# URL
service_name = params["serviceName"]
url = f"http://{self.host}:{self.port}/mge/service.sbr?serviceName={service_name}&outputType=json"
# Post request
requests.post(url, params=params)
logger.info("Logout successful")
def getAlldata(self, path, export_path):
arquivos_csv = glob.glob(f"{path}*.csv")
lista_df = []
for arquivo in arquivos_csv:
df = pd.read_csv(arquivo, delimiter=",", header=0)
lista_df.append(df)
# Concatena todos os DataFrames na lista em um único DataFrame
df_final = pd.concat(lista_df, ignore_index=True)
df_final.to_csv(f"{export_path}", index=False)
# Point Mount
def main():
load_dotenv()
# Login
sankhya = SankhyaAPI(
user=os.environ.get("SANKHYA_USER"),
password=os.environ.get("SANKHYA_PASSWORD"),
host=os.environ.get("SANKHYA_HOST"),
port=os.environ.get("SANKHYA_PORT"),
)
# Steps to pull data from Sankhya service
sankhya.login()
sankhya.dbExplorer()
sankhya.logout()
sankhya.getAlldata("./raw/", "./files.csv")
if __name__ == "__main__":
main()
# query_to_get_day_a_day = """
# SELECT * FROM TGFITE
# WHERE
# EXTRACT(YEAR FROM DTALTER) = 2023
# AND EXTRACT(MONTH FROM DTALTER) = 1
# AND EXTRACT(DAY FROM DTALTER) = 23
# """