Issue implementing the python code around cx_Oracle library - python

I am getting an error while implementing the below code:
'''
from pandas.core.frame import DataFrame
import cx_Oracle
import pandas as pd
import sys
class IFTDataCore:
def __init__(self, accountCode):
i = 0
all_Procedures = []
dns_tns = cx_Oracle.makedsn("gbhenora06vd.corp.amvescap.net", "1525", "INVU")
db=cx_Oracle.connect("CORP-SVC-IFT", "C$Rp$vc1ftUat",dns_tns)
cursor = db.cursor()
cursor.execute("select procedure_name from all_procedures where object_name = 'PK_IVZ_IFT_EXTRACT' ")
rows = cursor.fetchall()
procedureName = ['PK_IVZ_IFT_EXTRACT.'+str(list(rows[indexRow])[0]) for indexRow in range(0,len(list(rows)))]
l_cur = cursor.var(cx_Oracle.CURSOR)
while i < len(procedureName):
if procedureName[i] == 'PK_IVZ_IFT_EXTRACT.SP_IVZ_IFT_EXTRACT_ACCOUNTS':
ret_cursor = cursor.callproc(procedureName[i],(l_cur,))
dfx = pd.DataFrame(ret_cursor[0])
all_Procedures.append(dfx)
else:
ret_cursor = cursor.callproc(procedureName[i],(l_cur,accountCode))
dfx = pd.DataFrame(ret_cursor[0])
all_Procedures.append(dfx)
i += 1
self.all_Procedures = all_Procedures
cursor.close()
db.close()
#property
def getallProcedures(self):
return self.all_Procedures
if __name__ == '__main__':
Procedures = []
all_Proc = IFTDataCore('TOUHI')
Procedures = all_Proc.getallProcedures()
print(Procedures[0])
PS: The code works fine if I do not put the logic in init and call the def logic directly in code. Please let me know the possible reason why when class initialization is done in main, the definition starts throwing error.

The solution works fine now as per the below code:
from pandas.core.frame import DataFrame
import cx_Oracle
import pandas as pd
import sys
import json
from pathlib import Path
import os
class IFTDataCore:
def __init__(self):
try:
db = cx_Oracle.connect('invest/invest#INVD.WORLD')
cursor = db.cursor()
cursor.execute("select procedure_name from all_procedures where object_name = 'PK_IVZ_IFT_EXTRACT' ")
rows = cursor.fetchall()
procedureName = ['PK_IVZ_IFT_EXTRACT.'+str(list(rows[indexRow])[0]) for indexRow in range(0,len(list(rows))-1)]
# To convert Accounts procedure to JSON format
l_cur_Account = cursor.var(cx_Oracle.CURSOR)
ret_cursor_Account = cursor.callproc(procedureName[1],(l_cur_Account,))
self.dfx_Account = pd.DataFrame(ret_cursor_Account[0])
self.dfx_Account.columns = ['fundCode', 'fundName', 'legalEntitiyIdentifier','isin']
result_Account = self.dfx_Account.to_json(orient='records')
except BaseException as e:
raise
def lambda_handler(event, context):
positional_data = IFTDataCore()
df_acct = positional_data.dfx_Account
df_acct=df_acct.fillna("")
Json=df_acct.to_json(orient='records')
lambda_response = __lambda_response__('200', Json)
return lambda_response
def __lambda_response__(status_code, response_body):
return {
'statusCode': status_code,
'headers': {
'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'OPTIONS,GET'
},
'body': response_body
}

Related

Mocking sqlobject function call for test db

I am trying to mock sqlbuilder.func for test cases with pytest
I successfully mocked sqlbuilder.func.TO_BASE64 with correct output but when I tried mocking sqlbuilder.func.FROM_UNIXTIME I didn't get any error but the resulted output is incorrect with the generated query. Below is the minimal working example of the problem.
models.py
from sqlobject import (
sqlbuilder,
sqlhub,
SQLObject,
StringCol,
BLOBCol,
TimestampCol,
)
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = sqlhub.getConnection()
query = connection.sqlrepr(query)
print(query)
queryResult = connection.queryAll(query)
return queryResult
conftest.py
import pytest
from models import Store
from sqlobject import sqlhub
from sqlobject.sqlite import sqliteconnection
#pytest.fixture(autouse=True, scope="session")
def sqlite_db_session(tmpdir_factory):
file = tmpdir_factory.mktemp("db").join("sqlite.db")
conn = sqliteconnection.SQLiteConnection(str(file))
sqlhub.processConnection = conn
init_tables()
yield conn
conn.close()
def init_tables():
Store.createTable(ifNotExists=True)
test_ex1.py
import pytest
from sqlobject import sqlbuilder
from models import retrieve
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64")
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", MagicMock(side_effect=lambda x: x))
# #mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", new_callable=MagicMock(side_effect=lambda x: x))
#mock.patch("sqlobject.sqlbuilder.func.TO_BASE64", TO_BASE64)
#mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME", FROM_UNIXTIME)
def test_retrieve():
result = retrieve('Some')
assert result == []
Current SQL:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND (1))
Expected SQL:
SELECT
store.sample
FROM
store
WHERE
store.name = 'Some'
AND
strftime(
'%Y%m%d',
datetime(store.created_at, 'unixepoch', 'localtime')
) >= strftime(
'%Y%m%d',
datetime('2018-10-12', 'unixepoch', 'localtime')
)
Edit Example
#! /usr/bin/env python
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return 'strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x)
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
By default, sqlbuilder.func is an SQLExpression that passes its attribute (sqlbuilder.func.datetime, e.g.) to the SQL backend as a constant (sqlbuilder.func actually is an alias for sqlbuilder.ConstantSpace). See the docs about SQLExpression, the FAQ and the code for func.
When you mock an attribute in func namespace it's evaluated by SQLObject and passed to the backend in reduced form. If you want to return a string literal from the mocking function you need to tell SQLObject it's a value that has to be passed to the backend as is, unevaluated. The way to do it is to wrap the literal in SQLConstant like this:
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
See SQLConstant.
The entire test script now looks this
#! /usr/bin/env python3.7
from sqlobject import *
__connection__ = "sqlite:/:memory:?debug=1&debugOutput=1"
try:
import mock
from mock import MagicMock
except ImportError:
from unittest import mock
from unittest.mock import MagicMock
class Store(SQLObject):
name = StringCol()
sample = BLOBCol()
createdAt = TimestampCol()
Store.createTable()
DATE_FORMAT = "%Y-%m-%d"
def retrieve(name):
query = sqlbuilder.Select([
sqlbuilder.func.TO_BASE64(Store.q.sample),
],
sqlbuilder.AND(
Store.q.name == name,
sqlbuilder.func.FROM_UNIXTIME(Store.q.createdAt, DATE_FORMAT) >= sqlbuilder.func.FROM_UNIXTIME("2018-10-12", DATE_FORMAT)
)
)
connection = Store._connection
query = connection.sqlrepr(query)
queryResult = connection.queryAll(query)
return queryResult
def TO_BASE64(x):
return x
def FROM_UNIXTIME(x, y):
return sqlbuilder.SQLConstant('strftime("%Y%m%d", datetime({},"unixepoch", "localtime"))'.format(x))
for p in [
mock.patch("sqlobject.sqlbuilder.func.TO_BASE64",TO_BASE64),
mock.patch("sqlobject.sqlbuilder.func.FROM_UNIXTIME",FROM_UNIXTIME),
]:
p.start()
retrieve('Some')
mock.patch.stopall()
The output is:
1/Query : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
1/QueryR : CREATE TABLE store (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
sample TEXT,
created_at TIMESTAMP
)
2/QueryAll: SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryR : SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))
2/QueryAll-> []
PS. Full disclosure: I'm the current maintainer of SQLObject.
As #phd pointed that SQLObject evaluate the expression before passing it to backend in reducted form.
Then we can also pass expression directly which SQLObject will evaluate so instead of passing string literal we can also do as below
def FROM_UNIXTIME(x, y):
return sqlbuilder.func.strftime("%Y%m%d", sqlbuilder.func.datetime(x, "unixepoch", "localtime"))
Output:
SELECT store.sample FROM store WHERE (((store.name) = ('Some')) AND ((strftime("%Y%m%d", datetime(store.created_at,"unixepoch", "localtime"))) >= (strftime("%Y%m%d", datetime(2018-10-12,"unixepoch", "localtime")))))

List of chooses depending on user permissions

I'm building simple app on streamlit. I'm struggling with making list of chooses depending on user permissions.
utils.py
Dict with pages:
pages = {
"Start": start_page,
"Test kabli": cable_test_page,
"ARP": arp_page,
"FGRestart": fg_restart_page,
"MACTable": mac_table_page,
"PingGateWAN": ping_gate_wan_page,
"PInterface": psyhical_interface_page,
"RoutingTable": routing_table_page,
"TestŁączaWWAN(LTE)": wan_lte_test_page,
"WanMAC": wan_mac_page,
"TestŁączaWAN": wan_test_page,
"ResetPortów": reset_poe_page,
"RestartSwitcha": switch_restart_page,
}
Function for selecting items from a list
def select_page(ssh):
page = st.selectbox("Select item", tuple(pages.keys()))
pages[page](ssh)
Permissions to each item in a list are made like this:
permissions = {
'cable_diag': ["user1","user2","user3"],
'ping':[ "user1","user2","user3"],
'arp': ["user1","user2","user3"],
'fgrestart':["user1","user2","user3"],
'mactable':["user1","user2","user3"],
'Pinterface':["user1","user2","user3"],
'poe':["user1","user2","user3"],
'routingtable':["user1","user3"],
'srestart':["user1","user2","user3"],
'lte':["user2","user3"],
'wanmac':["user1","user2","user3"],
'wan':["user2","user3"],}
def decorator(func):
#wraps(func)
def wrapper(*args, **kwargs):
if st.session_state["username"] in permissions[module_name]:
print('Accessed')
return func(*args, **kwargs)
else:
st.text('ACCESS DENIED')
return wrapper
return decorator
All file with page have assigned role like this:
#has_role('page_name')
It's working but i want that if user1 don't have permissions to site 'wan' i also want that he will not be able to see this page in the list. I really don't have idea how to solve it
Full utils.py
import streamlit as st
import pandas as pd
import paramiko
from permissions import permissions
from modules.streamlit.pages.cable_test import cable_test_page
from modules.streamlit.pages.arp import arp_page
from modules.streamlit.pages.fg_restart import fg_restart_page
from modules.streamlit.pages.mac_table import mac_table_page
from modules.streamlit.pages.ping_gate_wan import ping_gate_wan_page
from modules.streamlit.pages.psyhical_interface import psyhical_interface_page
from modules.streamlit.pages.routing_table import routing_table_page
from modules.streamlit.pages.switch_restart import switch_restart_page
from modules.streamlit.pages.wan_lte_test import wan_lte_test_page
from modules.streamlit.pages.wan_mac import wan_mac_page
from modules.streamlit.pages.wan_test import wan_test_page
from modules.streamlit.pages.reset_poe import reset_poe_page
from modules.streamlit.pages.start import start_page
from modules.streamlit.pages.test import test_page
pages = {
"Start": start_page,
"Test kabli": cable_test_page,
"ARP": arp_page,
"FGRestart": fg_restart_page,
"MACTable": mac_table_page,
"PingGateWAN": ping_gate_wan_page,
"PInterface": psyhical_interface_page,
"RoutingTable": routing_table_page,
"TestŁączaWWAN(LTE)": wan_lte_test_page,
"WanMAC": wan_mac_page,
"TestŁączaWAN": wan_test_page,
"ResetPortów": reset_poe_page,
"RestartSwitcha": switch_restart_page,
}
custom_options = {"sideBar": False, "enableCellTextSelection": True}
#st.cache
def load_markets_xls():
df = pd.read_excel("path",
index_col=None, engine="openpyxl")
return df
def choose_market(df):
shop_number = st.text_input('Number', '')
if shop_number:
df = df[df["Host_Name"].notna()]
df['Host_Name'] = df['Host_Name'].astype(int).astype(str)
try:
single_row = df.loc[df['Host_Name'] == shop_number].to_dict('records')[0]
ip = single_row['ip']
return ip
except IndexError:
st.text("No found ")
def connect_to_market(ssh,market_ip):
print(market_ip)
try:
ssh.connect(hostname=str(market_ip), port=22, username='user',
password='password', allow_agent=False, timeout=None, compress=False)
st.text('Connected!')
return True
except Exception as e:
st.text('----- ')
return False
def select_page(ssh):
page = st.selectbox("Choose", tuple(pages.keys()))
pages[page](ssh)
Full permissions.py:
from functools import wraps
import streamlit as st
import pandas as pd
import paramiko
permissions = {
'cable_diag': ["user1","user2","user3"],
'ping':[ "user1","user2","user3"],
'arp': ["user1","user2","user3"],
'fgrestart':["user1","user2","user3"],
'mactable':["user1","user2","user3"],
'Pinterface':["user1","user2","user3"],
'poe':["user1","user2","user3"],
'routingtable':["user1","user3"],
'srestart':["user1","user2","user3"],
'lte':["user2","user3"],
'wanmac':["user1","user2","user3"],
'wan':["user2","user3"],}
def has_role(module_name):
def decorator(func):
#wraps(func)
def wrapper(*args, **kwargs):
if st.session_state["username"] in permissions[module_name]:
print('jest dostep')
return func(*args, **kwargs)
else:
st.text('ACCESS DENIED!')
return wrapper
return decorator```
app.py:
import streamlit as st
import pandas as pd
import paramiko
import streamlit_authenticator as stauth
from utils import load_markets_xls, choose_market, connect_to_market, select_page
from cred import hashed_passwords , names, usernames
from PIL import Image
import base64
ssh = paramiko.SSHClient()
# ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
hide_menu_style = """
<style>
#MainMenu {visibility: hidden;}
</style>
"""
custom_options = {"sideBar": False, "enableCellTextSelection": True}
def main():
st.set_page_config(page_title='Market Tests', layout="wide")
st.markdown(hide_menu_style, unsafe_allow_html=True)
authenticator = stauth.Authenticate(names,usernames,hashed_passwords, 'cookie','some_key', cookie_expiry_days=0)
col1, col2, col3 = st.columns(3)
with col1:
st.write(' ')
with col2:
image = Image.open('path')
st.image(image)
with col3:
st.write(' ')
name, authentication_status, username = authenticator.login('Panel Logowania','main')
if authentication_status:
authenticator.logout('Logout', 'main')
data_load_state = st.text('Loading data...')
df = load_markets_xls()
data_load_state.text('Markets loaded!')
market_ip = choose_market(df)
if (market_ip):
if connect_to_market(ssh, market_ip):
select_page(ssh)
elif authentication_status == False:
st.error('Username/password is incorrect')
elif authentication_status == None:
st.warning('Please enter your username and password')
def refresh_data():
st.legacy_caching.clear_cache()
raise st.script_runner.RerunException(st.script_request_queue.RerunData(None))
if __name__ == "__main__":
main()
sample page:
import streamlit as st
from permissions import has_role
custom_options = {
"enableCellTextSelection": True,
"sideBar":True
}
#has_role('arp')
def arp_page(ssh):
st.title('ARP')
stdin, stdout, stderr = ssh.exec_command("get system arp")
for line in iter(stdout.readline, ""):
st.text(line)

Selecting a Design Pattern in Python

The below code I modified from Multiple Inheritance to Facade Data Pattern , however it is still breaking the concept of the Facade Data pattern as my subsystems(ES) are sharing amongst themselves .
All the ES are working on a Structured data stream and enriching the data together , they all are run in Async and then gathered into a list (using async gather) . I wanted to know which data pattern suits this
Used case ,
Where I can keep adding ES as per my requirement.
Each ES can share data amongst itself like dictonary .
And I if I have to add new functionality I follow "Single Responsibility Principle"
Multiple Inheritance
import os
import asyncio
import psycopg2
import websockets
from datetime import datetime
from websockets.extensions import permessage_deflate
from structure import Structure
import sys
sys.path.append('..')
from event_stations.es3a import ES3A
from event_stations.es3b import ES3B
from event_stations.es3c import ES3C
from event_stations.es3d import ES3D
from event_stations.es1a import ES1A
from event_stations.es1b import ES1B
from event_stations.es2a import ES2A
from event_stations.es2b import ES2B
class FR_Server(ES1A, ES2A, ES2B, ES3A, ES3B, ES3C, ES3D, Structure):
unique_id = "100"
event_format_list = []
fr_config_table = 'detail_event.app_fer_config'
psql_db = None
def __init__(self):
print("Receiver Called INIT")
self.psql_db = self.connect_to_psql()
self._get_metadata()
super(FR_Server, self).__init__()
# self.start_receiver(self.mapped_dict)
def connect_to_psql(self):
db = psycopg2.connect("dbname=trimble user=postgres password=admin")
return db
def _get_parent_classname(self):
_parent_classes = []
_cls_obj = eval("FR_Server")
for parent in _cls_obj.__bases__:
_parent_classes.append(parent.__name__)
return _parent_classes
def _get_metadata(self):
_parents = self._get_parent_classname()
print(_parents, "pppp")
cur = self.psql_db.cursor()
cur.execute(f"Select * from {self.fr_config_table}")
results = cur.fetchall()
for result in results:
event_station_id = result[0]
if event_station_id in _parents:
event_station_classname = eval(event_station_id)
setattr(event_station_classname, "cache_table_name", result[1])
setattr(event_station_classname, "ignite_port", result[2])
setattr(event_station_classname, "ignite_host", result[3])
def get_port(self):
return os.getenv('WS_PORT', '10011')
def get_host(self):
return os.getenv('WS_HOST', 'localhost')
async def start(self):
return await websockets.serve(self.handler, self.get_host(), self.get_port(), ping_interval=None, max_size=None,
max_queue=None, close_timeout=None, extensions=[
permessage_deflate.ServerPerMessageDeflateFactory(
server_max_window_bits=11,
client_max_window_bits=11,
compress_settings={'memLevel': 4},
),
])
def generate_event_id(self, index):
return "".join(['%02d%02d%d%d%d%d%d' % (datetime.now().day, datetime.now().month, datetime.now().year,
datetime.now().hour,datetime.now().minute, datetime.now().second,
datetime.now().microsecond), self.unique_id,index])
async def handler(self, websocket, path):
async with websockets.connect('ws://localhost:10015', ping_interval=None, max_size=None,
max_queue=None, close_timeout=None,
extensions=[permessage_deflate.ClientPerMessageDeflateFactory(
server_max_window_bits=11,
client_max_window_bits=11,
compress_settings={'memLevel': 4},
),
]) as websocket_rb:
async for row in websocket:
lst_row = row.decode().split(",")
uid = self.generate_event_id(lst_row[0])
lst_row = [uid] + lst_row
results = await asyncio.gather(self.enrich_column_es3a_dict(lst_row[1]),
self.enrich_column_es3b_dict(lst_row[1]),
self.enrich_column_es3c_dict(lst_row[1]),
self.enrich_column_es3d_dict(lst_row[1]))
await websocket_rb.send(str(lst_row + results).encode())
def start_receiver(self, mapped_list):
self.event_format_list = mapped_list
asyncio.get_event_loop().run_until_complete(self.start())
asyncio.get_event_loop().run_forever()
Facade Data pattern :
from __future__ import annotations
from event_stations.es1a import ES1A
from event_stations.es2a import ES2A
from event_stations.es2b import ES2B
import psycopg2
class Foundation_Facade(object):
psql_db = None
client = None
def __init__(self, es1a: ES1A, es2a: ES2A) -> None:
self._es1a = es1a or ES1A()
self._es2a = es2a or ES2A()
def operation(self):
print("Called")
results = []
self.psql_db = self._es1a._connect_psql()
self._es1a._get_metadata(self.psql_db.cursor())
self.client = self._es1a.connect_ignite_client(self._es1a.ignite_host, self._es1a.ignite_port)
self._es2a._get_metadata(self.psql_db.cursor())
self._es2a.put_data(self.client)
self._es2b._get_metadata(self.psql_db.cursor())
self._es2b.put_data(self.client)
print(self._es2b.static_df.head())
# results.append(self._es1a._get_metadata())
return results
if __name__ == '__main__':
es1a = ES1A()
es2a = ES2A()
es2b = ES2B()
facade = Foundation_Facade(es1a, es2a)
from fr_server_1 import Server
Server(facade)

CherryPy WS is not returning string in UTF-8

I'm trying to build a REST Web Service with CherryPy and Python. It works, but when I access it through Chrome, it's not displaying in UTF-8.
This web service queries a MongoDB and then gives the results in a list of dictionaries. I did a print(ticketME) and it's showing the right characters:
But when it displays in Chrome, is not displaying right(and I'm also realizing that "solucion" or "problema" are not showing...):
As you can see in the code, I set the charset to UTF-8:
import cherrypy
import pymongo
import urllib
import pyodbc
import mysql.connector
from datetime import datetime
import time
import sys
import numpy as np
class HelloWorld(object):
#cherrypy.expose
#cherrypy.tools.json_out()
def index(self):
password = * password *
myclient = pymongo.MongoClient(*mongoDB connection string*)
mydb = myclient["moica2"]
mycol = mydb["moicaTickets"]
myquery = *mongoDB aggregate query*
mydoc = mycol.aggregate(myquery)
mydb = None
myclient.close()
mycol = None
resultadoTicketsME = []
for x in mydoc:
try:
asunto = x['pAsunto']
nrotkt = x['pTicket']
estado = x['pEstado']
fechaCreacion = x['pFechaCreacion']
fechaCierre = x['pFechaCierre']
nodoCRM = x['pNodoCRM']
nodoCMTS = x['pNodoCMTS']
if ('COMPLETO' in nodoCMTS):
nodoCMTS = "Completo"
RTs = x['pRTs']
notas = x['pNotas']
asuntoCierre = x['pAsuntoCierre']
estadoEtaClick = x['pEstadoEtaClick']
afectacion = x['pAfectacion']
problema = x['pProblema']
solucion = x['pSolucion']
arbolCreacion = x['pElArbolCreacion']
arbolActual = x['pElarbolActual']
idFuente = int(x['pFuente']['idFuente'])
ticketME = {
'nrotkt': nrotkt,
'asunto': asunto,
'estado': estado,
'fechaCreacion': fechaCreacion,
'fechaCierre': fechaCierre,
'nodoCRM': nodoCRM,
'nodoCMTS': nodoCMTS,
'RTs': RTs,
'notas': notas,
'asuntoCierre': asuntoCierre,
'estadoEtaClick': estadoEtaClick,
'afectacion': afectacion,
'problema': problema,
'solucion': solucion,
'arbolCreacion': arbolCreacion,
'arbolActual': arbolActual,
'idFuente': idFuente
}
print(ticketME)
resultadoTicketsME.append(ticketME)
except:
lf = open("error.log", "a+")
lf.write("MOICA2FUENTESME %s : No se pudo insertar el tkt %s\n" % (datetime.now().strftime("%Y-%m-%d %H:%M:%S"),x['pTicket']))
lf.close()
cherrypy.response.headers['Content-Type'] = "text/html;charset=utf-8"
return resultadoTicketsME
USERS = {'ngabioud': 'password'}
def validate_password(realm, username, password):
if username in USERS and USERS[username] == password:
return True
return False
cherrypy.config.update({'tools.encode.on': True,
'tools.encode.encoding': 'utf-8',
'tools.decode.on': True,
'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'localhost',
'tools.auth_basic.checkpassword': validate_password,
'tools.auth_basic.accept_charset': 'UTF-8',
})
cherrypy.quickstart(HelloWorld())
Is there anything else I could try?
Thank you,
Best regards
As stated by snakecharmerb in the comment, it was a Chrome representing issue. I did a .php seting encoding to utf-8 and it showed correctly.

key error while trying to pass data as a parameter using a json file

code is given below
import time
import unittest
import logging as log
from loggenerator import set_log_params,move_latest_log_to_persistent_file
from parse_test_json import get_test_params
from scrapping import SC
class ScrapeTest(unittest.TestCase):
def setup(self):
self.start_time=time.time()
def teardown(self):
self.end_time=time.time()
def test_SC_scrape(self):
try:
test_name="test scrape"
set_log_params(log_file=test_name,level=log.INFO)
log.info("step1:set all test params")
test_params = get_test_params(self.__class__.__name__, test_name=test_name)
log.info("step 2:set")
ik=SC()
log.info("step3:calling scrapper for")
log.debug(ik.parseURL(party_name=test_params["party_name"],start_date=test_params["start_date"],end_date=test_params["end_date"]))
except Exception as e:
raise e
move_latest_log_to_persistent_file(log_file=test_name)
####
import json, os
from builtins import *
def get_test_params(test_class_name = None, test_name = None):
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = "/test_param_jsons/" + test_class_name + "params.json"
json_file = dir_path + file_path
with open(json_file) as test_data:
test_json = json.load(test_data)
return test_json[test_class_name][test_name]
this function is raising error key error.
this should work as long as you have a json file available at: <SCRIPT_PATH>/test_param_jsons/MyClass_params.json
Also, in order to avoid KeyError you'll need to ensure that your input json file contains key: value, test_class_name : test_name
import json, os
from builtins import *
class MyClass:
def get_test_params (self, test_class_name = None, test_name = None):
with open(os.path.join(os.path.dirname(__file__),"test_param_jsons\\params.json"), 'r') as test_data:
test_json = json.load (test_data)
try:
return test_json[test_class_name][test_name]
except KeyError as e:
print ('KeyError: {}'.format (e))
def mymain(self, test_name):
''' mymain is defined to accomodate your requirement to use __class__ as a parameter '''
test_params = self.get_test_params (self.__class__.__name__, test_name = test_name)
return test_params
if __name__ == '__main__':
test_name = 'sth'
myclass = MyClass ()
result = myclass.mymain (test_name)
print (result)

Categories

Resources