Jupyter Notebook is stuck on "Loading..." - python

So I am trying to run this pymongo app through jupyter notebook and all it does is give me a "Loading..." output and then the kernel goes idle... when I just print the "df", it gives me the data so I know that it is working data-wise. I have tried restarting the kernel; deleting the output, restarting the kernel, and reinputting the input; Restarting and clearing output - and all do not seem to have any effect. I am thinking it may have something to do with my inputted data but I am not sure what exactly...
This is the .ipynb:
from jupyter_plotly_dash import JupyterDash
import dash
import dash_leaflet as dl
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import dash_table
from dash.dependencies import Input, Output
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from pymongo import MongoClient
#### FIX ME #####
# change animal_shelter and AnimalShelter to match your CRUD Python module file name and class name
from AAC import AnimalShelter
###########################
# Data Manipulation / Model
###########################
# FIX ME update with your username and password and CRUD Python module name
username = "aacuser"
password = "monogoadmin"
shelter = AnimalShelter(username, password)
# class read method must support return of cursor object and accept projection json input
df = pd.DataFrame.from_records(shelter.read({}))
#########################
# Dashboard Layout / View
#########################
app = JupyterDash('Dash DataTable Only')
app.layout = html.Div([
html.Div(id='hidden-div', style={'display':'none'}),
html.Center(html.B(html.H1('SNHU CS-340 Dashboard'))),
html.Hr(),
dash_table.DataTable(
id='datatable-interactivity',
columns=[
{"name": i, "id": i, "deletable": False, "selectable": True} for i in df.columns
],
data=df.to_dict('records'),
#FIXME: Set up the features for your interactive data table to make it user-friendly for your client
editable=False,
filter_action="native",
sort_action="native",
sort_mode="multi",
column_selectable=False,
row_selectable=False,
row_deletable=False,
selected_columns=[],
selected_rows=[],
page_action="native",
page_current= 0,
page_size= 10,
),
html.Br(),
html.Hr(),
])
app
This is the .py file:
import pymongo
from pymongo import MongoClient
from bson.objectid import ObjectId
class AnimalShelter(object):
"""CRUD operations for Animal collection in Mongodatabase"""
#Initializes MongoClient
def __init__(self, username, password):
self.client = MongoClient('mongodb://127.0.0.1:38574', username='aacuser', password='mongoadmin', authSource='AAC', authMechanism='SCRAM-SHA-1')
self.database = self.client['AAC']
#Implement create method
def create(self, data):
if data is not None:
return self.database.animals.insert_one(data)
else:
raise Exception("Nothing to save, because data parameter is empty")
#Implement read method
def read(self, data):
if data is not None:
return self.database.animals.find(data)
else:
raise Exception("Nothing to read, because data parameter is empty")
#Implement update method
def update(self, data):
if find is not None:
return self.database.animals.update_one(data)
else:
raise Exception("Nothing to update, because data parameter is empty")
#Implement delete method
def delete(self, data):
if data is not None:
return self.database.animals.delete_one(data)
else:
raise Exception("Nothing to delete, because data parameter is empty")
Any help is greatly appreciated!
EDIT:
Here is the requested screen shot
I also wanted to note that I cleared cache and cookies and this had no effect.

Related

create bootstrap drop down menu in plotly dash using one function to set it up

The logic behind the dash core component dropdown and the dash bootstrap components is a bit different and I would like to have the best of both worlds: the nice style from dbc and the functionality from dcc. However, modifying the css for the dcc to make it look nicer is complicated and I could not find an existing solution. To set up the dbc component requires some set up effort as each element in the drop down has an own id. Also if you want to directly get from the dropdown what is the selected value (if you ask the dropdown what are you actually showing) you cannot do that directly.
I thus wanted to setup a function that sets the drop down and its callback up automatically but I run into the problem that the callback is a nested function and is therefore not available globally. How can I change that? Or is there another way to build it.
What I want in the end is an easy way to set up the dbc dropdown such that it shows the selected value.
This is what I have so far (the not working solution):
import dash
import dash_bootstrap_components as dbc
import dash_html_components as html
from dash.dependencies import Input, Output
def selectable_dropdown(item_id="dropdown",
options=['option_1','option_2']):
# create the items and the ids for each item
dropdown_items = [dbc.DropdownMenuItem(item, id=item_id+'_'+item)
for item in options]
# create the dropdown menu
dropdown = dbc.DropdownMenu(
dropdown_items,
label="none",
addon_type="prepend",
bs_size="sm",
id=item_id
)
output = Output(item_id, "label")
inputs = [Input(item_id+'_'+item, "n_clicks") for item in options]
#app.callback(output,inputs)
def update_label(*args):
# get the triggered item
ctx = dash.callback_context
triggered_id = ctx.triggered[0]["prop_id"].split(".")[0]
# get the label for the triggered id or return no selection
if (np.array([n==None for n in args]).all()) or not ctx.triggered:
return "no selection"
else:
return [label for label in options if item_id+'_'+label == triggered_id]
return dropdown
app = dash.Dash(
external_stylesheets=[dbc.themes.BOOTSTRAP]
)
app.config['suppress_callback_exceptions'] = True
app.layout = \
html.Div([selectable_dropdown(item_id="target_select",
options=["option1 ", "option 2", "option3"])])
if __name__ == "__main__":
app.run_server(debug=False, host = '0.0.0.0', port = 1234)
thats how it should look like (a working example) but I wnat it in a more generalized way and in the best way just in one function or class:
import dash
import numpy as np
import dash_bootstrap_components as dbc
import dash_html_components as html
from dash.dependencies import Input, Output
options=["option1 ", "option 2", "option3"]
item_id = 'dropdown'
dropdown_items = [dbc.DropdownMenuItem(item, id=item_id+'_'+item)
for item in options]
# create the dropdown menu
dropdown = dbc.DropdownMenu(
dropdown_items,
label="none",
addon_type="prepend",
bs_size="sm",
id=item_id)
output = Output(item_id, "label")
inputs = [Input(item_id+'_'+item, "n_clicks") for item in options]
app = dash.Dash(
external_stylesheets=[dbc.themes.BOOTSTRAP,'./assets/stylesheet.css']
)
app.config['suppress_callback_exceptions'] = True
app.layout = \
html.Div([dropdown])
#app.callback(output,inputs)
def update_label(*args):
# get the triggered item
ctx = dash.callback_context
triggered_id = ctx.triggered[0]["prop_id"].split(".")[0]
# get the label for the triggered id or return no selection
if (np.array([n==None for n in args]).all()) or not ctx.triggered:
return "no selection"
else:
return [label for label in options if item_id+'_'+label == triggered_id]
if __name__ == "__main__":
app.run_server(debug=False, host = '0.0.0.0', port = 1234)

Unable to access dataframe while uploading form plotly-dash app

I am new to python and plotly-dash.
I am trying to use "Hidden Div" to store a data frame as suggested in dash tutorial 5.
But I am not able to process the uploaded file.
import base64
import io
import dash
from dash.dependencies import Input, Output, State
import dash_core_components as dcc
import dash_html_components as html
import dash_table
import pandas as pd
#global_df = pd.read_csv('...')
app = dash.Dash(__name__)
app.layout = html.Div([
dcc.Graph(id='graph'),
html.Table(id='table'),
dcc.Upload(
id='datatable-upload',
children=html.Div(['Drag and Drop or ',html.A('Select Files')]),
),
# Hidden div inside the app that stores the intermediate value
html.Div(id='intermediate-value', style={'display': 'none'})
])
def parse_contents(contents, filename):
content_type, content_string = contents.split(',') #line 28
decoded = base64.b64decode(content_string)
if 'csv' in filename:
# Assume that the user uploaded a CSV file
return pd.read_csv(
io.StringIO(decoded.decode('utf-8')))
elif 'xls' in filename:
# Assume that the user uploaded an excel file
return pd.read_excel(io.BytesIO(decoded))
elif 'xlsx' in filename:
# Assume that the user uploaded an excel file
return pd.read_excel(io.BytesIO(decoded))
#app.callback(Output('intermediate-value', 'children'),
[Input('datatable-upload', 'contents')],
[State('datatable-upload', 'filename')])
def update_output(contents, filename):
# some expensive clean data step
cleaned_df = parse_contents(contents, filename)
# more generally, this line would be
# json.dumps(cleaned_df)
return cleaned_df.to_json(date_format='iso', orient='split')
#app.callback(Output('graph', 'figure'), [Input('intermediate-value', 'children')])
def update_graph(jsonified_cleaned_data):
# more generally, this line would be
# json.loads(jsonified_cleaned_data)
dff = pd.read_json(jsonified_cleaned_data, orient='split')
figure = create_figure(dff)
return figure
#app.callback(Output('table', 'children'), [Input('intermediate-value', 'children')])
def update_table(jsonified_cleaned_data):
dff = pd.read_json(jsonified_cleaned_data, orient='split')
table = create_table(dff)
return table
if __name__ == '__main__':
app.run_server(port=8050, host='0.0.0.0')
I am getting the following error while running the code:
File "ipython-input-12-4bd6fe1b7399", line 28, in parse_contents
content_type, content_string = contents.split(',')
AttributeError: 'NoneType' object has no attribute 'split'
The callback is likely running on initialization with empty values. You can prevent this by adding something like this at the top of your callback:
if contents is None:
raise dash.exceptions.PreventUpdate

Plotly in Dash Python is not getting updated

When I run the code i get the graph but the graph is not getting updated i am calling the data from my sql laptop sql management studio.
Kindly let me know what needs to be done the X axis contains date and time and Y axis contains data in numeric form which is getting updated automatically
Code:
import pandas as pd
import pyodbc
import numpy as np
server ='LAPTOP-OO3V36UA\SQLEXPRESS'
db='addy'
conn=pyodbc.connect('DRIVER={SQL Server}; SERVER=' +server + ';DATABASE=' + db +
';Trusted_connection=yes')
sql="""
SELECT * FROM Summry
"""
df=pd.read_sql(sql ,conn)
import dash
from dash.dependencies import Output, Input
import dash_core_components as dcc
import dash_html_components as html
from random import random
import plotly
app = dash.Dash(__name__)
app.layout = html.Div([
dcc.Graph(id='live-update-graph-scatter', animate=True),
dcc.Interval(
id='interval-component',
interval=1*1000
)
])
#app.callback(Output('live-update-graph-scatter', 'figure'),
[Input('interval-component', 'interval')])
def update_graph_scatter():
df=pd.read_sql(sql ,conn)
trace1=go.Scatter(
y=df['ACL'],
x = df['DateandnTime'],
mode='lines',
name='ACL'
)
layout = go.Layout(
title='Daily Monitoring'
)
return {'data': trace1, 'layout': layout}
if __name__ == '__main__':
app.run_server()
You've set your callback's input to
Input('interval-component', 'interval')
But you want
Input('interval-component', 'n_intervals')
The interval property sets how frequently n_intervals gets updated. The change in n_intervals is what can be used to trigger the callback.
Here's the documentation: https://dash.plotly.com/dash-core-components/interval

How can I cache my SQL result so I don't have to call SQL repeatedly to get data for Dash plots?

I am trying to build a dashboard that will generate several plots based on a single SQL data query. I want the query to be modifiable via the dashboard (e.g. to query a different order amount or similar), and then change all plots at once. The query maybe expensive so I don't want it to run N times for N different plots.
I have tried to do this using the flask cache decorator #cache.memoize(), similar to the example given in the docs: https://dash.plotly.com/performance
Here is a stripped back version of what I'm doing. I can tell that the query_data function is not doing what I intend because:
1. the resulting graphs show different data points on the x-axis. If it was using the same cached dataset the data points in x should be the same
2. The print statements in the query_data function come out twice everytime I change an input cell.
Can anyone explain why this isn't working or how I can achieve what I want.
import sys
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
from dash.dependencies import Input, Output
from setup_redshift import setup_connection
from flask_caching import Cache
from datetime import datetime
import pandas as pd
conn = setup_connection()
app = dash.Dash(__name__)
cache = Cache(app.server, config={
# 'CACHE_TYPE': 'filesystem',
'CACHE_TYPE': 'memcached',
'CACHE_DIR': 'cache-directory'
})
sql_query = '''select i.order_amount_in_usd, r.calibrated_score, r.score
from datalake.investigations i
inner join datalagoon.prod_model_decision r
ON i.investigation_id = r.investigation_id
where i.team_id = {}
AND i.order_amount_in_usd < {}
AND r.calibrated_score >= 0
order by RANDOM()
limit 1000'''
#cache.memoize()
def query_data(team_id, max_usd):
print("Calling data query now with team_id={} and max_usd={} at time {}".format(team_id, max_usd, datetime.now()))
_sql = sql_query.format(team_id, max_usd)
print(_sql)
data = pd.read_sql(sql_query.format(team_id, max_usd), conn)
print("data is {} rows ".format(len(data)))
print("data max usd is {}".format(data['order_amount_in_usd'].max()))
return data
#app.callback(Output(component_id='output-graph', component_property='figure'),
[Input(component_id='data-select-team-id', component_property='value'),
Input(component_id='data-select-max-usd', component_property='value')])
def plot_data(team_id, max_usd):
print("calling query_data at from graph at {}".format(datetime.now()))
in_data = query_data(team_id, max_usd)
print("going to make graph1 now at {}".format(datetime.now()))
fig = px.scatter(in_data,
x='order_amount_in_usd',
y='calibrated_score')
return fig
#app.callback(Output(component_id='output-graph2', component_property='figure'),
[Input(component_id='data-select-team-id', component_property='value'),
Input(component_id='data-select-max-usd', component_property='value')])
def plot_second_data(team_id, max_usd):
print("calling query_data at from graph2 at {}".format(datetime.now()))
in_data = query_data(team_id, max_usd)
print("going to make graph2 now at {}".format(datetime.now()))
fig = px.scatter(in_data,
x='order_amount_in_usd',
y='score')
return fig
app.layout = html.Div( # style={'backgroundColor': colors['background']},
children=[dcc.Input(id='data-select-team-id',
value=7625,
placeholder='Input Team ID',
type='number',
min=0,
max=1_000_000_000,
debounce=True
),
dcc.Input(id='data-select-max-usd',
value=5000,
type='number',
debounce=True),
dcc.Graph(id='output-graph'),
dcc.Graph(id='output-graph2')]
)
if __name__ == '__main__':
app.run_server(debug=True)
In the past Ive stored the results using dcc.Store (see here)
You could structure your app like this:
Run the SQL query and store the results using dcc.Store (local or
memory depending on your use case). This only runs once (per app load, interval timer or user button refresh etc)
Callbacks to generate different
cuts of the data in dash tables or charts would load the store
If the results of the query are large (see 'Storage Limitations; in the above link) then you should save the results to a local flat file such as JSON or CSV and read that each time.
An alternative is to use PostgreSQL and materialized views to make the SQL query cheap (with a trade off on storage space)
These approaches makes the dash app appear very responsive to the user while allowing the analysis of large data

How to export IB position data to a data frame?

I was trying to export IB position/account value into data frame for further processing purposes in python. But failed to figure out how to achieve this. Can anyone help?
import pandas as pd
import numpy as np
import time
import ibapi
from ibapi.client import EClient
from ibapi.wrapper import EWrapper
import threading
import sys
import queue
from ibapi.contract import Contract
class MyWrapper(EWrapper):
##property
def updatePortfolio(self, contract: Contract, position: float, marketPrice: float, marketValue: float, averageCost: float, unrealizedPNL: float, realizedPNL: float, accountName: str):
super().updatePortfolio(contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName)
if (len(contract.symbol)<5) & (contract.secType == 'STK'):
new_symbol = contract.symbol.zfill(5)
else:
new_symbol = contract.symbol
print (contract.secType, contract.exchange, new_symbol, "Position:", position, "MarketPrice:", marketPrice, "MarketValue:", marketValue, "AverageCost:", averageCost, "UnrealizedPNL:", unrealizedPNL, "RealizedPNL:", realizedPNL)
accountName = ''
callback = MyWrapper() # wrapper = MyWrapper()
#Instntiate My Wrapper.callback
tws = EClient(callback) # app = EClient(wrapper)
#Instantiate EClient and return data to call back
host = '127.0.0.1'
port = 4001
clientID = 8
tws.connect(host, port, clientID)
print("serverVersion:%s connectionTime:%s" % (tws.serverVersion(), tws.twsConnectionTime()))
print(tws.isConnected())
tws.reqAccountUpdates(1, accountName)
time.sleep(2)
tws.run()
accvalue = pd.DataFrame(callback.updatePortfolio, columns = ['Symbol','Position','MarketPrice','MarketValue',
'AverageCost', 'UnrealisedPnL', 'RealisedPnL'])
#accvalue = callback.updateAccountValue
print ('Account: \n' + accvalue)
You are on the right track. You need to set up the queue class objects inside of the wrapper to collect the response from the client function you are calling. Then, you can do anything you want with the data. Take a look at this blog --> https://qoppac.blogspot.com/2017/03/interactive-brokers-native-python-api.html
There is some code there you can reuse to help with the implementation.

Categories

Resources