How to reuse class from another file - python

I import technicals.py into bot.py and want to reuse the variable sl and tp from the class instance process_candles.
If a constant number is given to sl and tp in bot.py, the script is able to work. However, the desired result is to get variable sl and tp which is calculated in the class instance process_candles. from technicals.py.
snippet technicals.py as below:
df['PAIR'] = self.pair
decision = NONE
tp = 0
sl = 0
if c[-2]>o[-2]:
if ca[-1]>h[-2]+0.0010:
decision = BUY
tp = ca[-1]+0.010
sl = l[-2]-0.010
elif o[-2]>c[-2]:
if cb[-1]<l[-2]-0.0010:
decision = SELL
tp = cb[-1]-0.010
sl = h[-2]+0.010
else:
decision = NONE
snippet bot.py
def process_pairs(self):
trades_to_make = []
for pair in self.trade_pairs:
if self.timings[pair].ready == True:
self.log_message(f"Ready to trade {pair}")
techs = Technicals(self.settings[pair], self.api, pair, GRANULARITY, log=self.tech_log)
decision = techs.get_trade_decision(self.timings[pair].last_candle)
print ("process decision")
print (decision)
units = decision * self.settings[pair].units
#tp = "154"
#sl = "153"
if units != 0:
trades_to_make.append({'pair': pair, 'units': units,'take_profit':tp, 'stop_loss':sl})
Full script are as below:
technicals.py
import pandas as pd
import numpy as np
from defs import BUY, SELL, NONE
class Technicals():
def __init__(self, settings, api, pair, granularity, log=None):
self.settings = settings
self.log = log
self.api = api
self.pair = pair
self.granularity = granularity
def log_message(self, msg):
if self.log is not None:
self.log.logger.debug(msg)
def fetch_candles(self, row_count, candle_time):
status_code, df = self.api.fetch_candles(self.pair, count=row_count, granularity=self.granularity)
if df is None:
self.log_message(f"Error fetching candles for pair:{self.pair} {candle_time}, df None")
return None
elif df.iloc[-1].time != candle_time:
self.log_message(f"Error fetching candles for pair:{self.pair} {candle_time} vs {df.iloc[-1].time}")
return None
else:
return df
def process_candles(self, df):
open = df.mid_o
o = np.array(open,dtype='float')
#print (o)
high = df.mid_h
h = np.array(high,dtype='float')
#print (h)
low = df.mid_l
l = np.array(low,dtype='float')
#print (l)
close = df.mid_c
c = np.array(close,dtype='float')
print (c)
close_ask = df.ask_c
ca = np.array(close_ask,dtype='float')
print (ca)
close_bid = df.bid_c
cb = np.array(close_bid,dtype='float')
print (cb)
df['PAIR'] = self.pair
decision = NONE
tp = 0
sl = 0
if c[-2]>o[-2]:
if ca[-1]>h[-2]+0.0010:
decision = BUY
tp = ca[-1]+0.010
sl = l[-2]-0.010
elif o[-2]>c[-2]:
if cb[-1]<l[-2]-0.0010:
decision = SELL
tp = cb[-1]-0.010
sl = h[-2]+0.010
else:
decision = NONE
log_cols = ['time','volume','PAIR','bid_c','ask_c','mid_o','mid_h','mid_l','mid_c']
self.log_message(f"Processed_df\n{df[log_cols].tail(3)}")
self.log_message(f"Trade_decision:{decision}")
self.log_message("")
return decision
def get_trade_decision(self, candle_time):
max_rows = self.settings.long_ma + 2
self.log_message("")
self.log_message(f"get_trade_decision() pair:{self.pair} max_rows:{max_rows}")
df = self.fetch_candles(max_rows, candle_time)
print ("xxxx")
print (df)
if df is not None:
return self.process_candles(df)
print("get trade decision")
print(self.process_candles(df))
return NONE
bot.py
import pprint
import time
from settings import Settings
from log_wrapper import LogWrapper
from timing import Timing
from oanda_api import OandaAPI
from technicals import Technicals
from defs import NONE, BUY, SELL
from trade_manager import TradeManager
GRANULARITY = "M1"
SLEEP = 10.0
class TradingBot():
def __init__(self):
self.log = LogWrapper("Bot")
self.tech_log = LogWrapper("Technicals")
self.trade_log = LogWrapper("Trade")
self.trade_pairs = Settings.get_pairs()
self.settings = Settings.load_settings()
self.api = OandaAPI()
self.trade_manager = TradeManager(self.api, self.settings, self.trade_log)
self.timings = { p: Timing(self.api.last_complete_candle(p, GRANULARITY)) for p in self.trade_pairs }
self.log_message(f"Bot started with\n{pprint.pformat(self.settings)}")
self.log_message(f"Bot Timings\n{pprint.pformat(self.timings)}")
print (self.api)
def log_message(self, msg):
self.log.logger.debug(msg)
def update_timings(self):
for pair in self.trade_pairs:
current = self.api.last_complete_candle(pair, GRANULARITY)
self.timings[pair].ready = False
if current > self.timings[pair].last_candle:
self.timings[pair].ready = True
self.timings[pair].last_candle = current
self.log_message(f"{pair} new candle {current}")
def process_pairs(self):
trades_to_make = []
for pair in self.trade_pairs:
if self.timings[pair].ready == True:
self.log_message(f"Ready to trade {pair}")
techs = Technicals(self.settings[pair], self.api, pair, GRANULARITY, log=self.tech_log)
decision = techs.get_trade_decision(self.timings[pair].last_candle)
print ("process decision")
print (decision)
units = decision * self.settings[pair].units
#tp = "154"
#sl = "153"
if units != 0:
trades_to_make.append({'pair': pair, 'units': units,'take_profit':tp, 'stop_loss':sl})
if len(trades_to_make) > 0:
print("bot")
print(trades_to_make)
self.trade_manager.place_trades(trades_to_make)
def run(self):
while True:
self.update_timings()
self.process_pairs()
time.sleep(SLEEP)
if __name__ == "__main__":
b = TradingBot()
b.run()
defs.py
API_KEY = "xxxx"
ACCOUNT_ID = "xyz"
OANDA_URL = 'https://api-fxpractice.oanda.com/v3'
SECURE_HEADER = {
'Authorization': f'Bearer {API_KEY}',
'Content-Type': 'application/json'
}
BUY = 1
SELL = -1
NONE = 0

Instead of just returning the decision, also return the take profit and stop loss values:
return decision, tp, sl
Then you can unpack the tuple in process_pairs:
decision, tp, sl = techs.get_trade_decision(self.timings[pair].last_candle)

You can define your tp and sl as class variables of Technicals.
class Technicals(object):
tp: int = 0
sl: int = 0
and use them within Technicals as:
cls.tp = ... # if you are inside class-method
self.tp = ... # if you are inside instance-method
And in the TradingBot you can then simple import Technicals and use the class-Vars like:
tp = Technicals.tp # you can use the class
tp = techs.tp # or the instance you already have

Related

Output more than one excel/csv file from a class. OOP. Class Inheritance

I am getting a AttributeError as follows:
self.filtered_df.to_excel(self.output_filepath, index=False)
AttributeError: 'tuple' object has no attribute 'to_excel'
I am inheriting a class for another class I am developing, currently the inheritance allows me to output one excel file, can I change the method in this class to allow me to output more than one excel file in the new class I am developing?
Here is the class, inherited:
class ReportQueryCommand(LogCommand):
"""Performs reports through a ReportQueryStrategy instance.
It is possible to overwrite existing queries; this means that it is possible
to perform subsequent filters by launching multiple commands having
key_input = strategy.name.
Attributes:
state: DatasetState containing so-far computed DataFrames.
strategy: defines how to perform the query.
filepath: optional. Defines where to save the queried DataFrame.
key_input: optional (None if df_to_filter is not None).
Defines a key that allows to access a DataFrame in the State.
df_to_filter: optional (None if key is not None).
DataFrame to apply the query on.
"""
def __init__(self,
strategy: ReportQueryStrategy,
base_path: Optional[str] = None,
key_input: Optional[str] = None,
key_output: Optional[str] = None,
df_to_filter: Optional[pd.DataFrame] = None,
filepath: Optional[str] = None,
directory_path: Optional[str] = None,
write_output: bool = True):
super().__init__(base_path=base_path,
directory_path=directory_path,
copy_before_update=False,
write_output=write_output)
self.state = DatasetState()
self.strategy = strategy
self.filepath = filepath
self.key_input = key_input
self._key_output = key_output
self.df_to_filter = df_to_filter
self.filtered_df = None
#property
def _output_file(self) -> str:
return self.strategy.output_filename
def write_to_file(self):
if self.filtered_df is None:
raise ValueError("Missing computed dataframe")
self.filtered_df.to_excel(self.output_filepath, index=False)
#property
def key_output(self) -> str:
# first scenario: key was defined
if self._key_output is not None:
return self._key_output
# second scenario: key was not defined, by default,
# concatenate key_input and strategy
strategy_output_name = self.strategy.output_name
if self.key_input is not None:
return f"{self.key_input}-{strategy_output_name}"
# WCS: no key defined, just assign the strategy key
return strategy_output_name
def execute(self, output_from_previous: Any = None, **kwargs) -> Any:
self.filtered_df = self.strategy.transform(key=self.key_input,
df=self.df_to_filter)
self.state.query_reports[self.key_output] = self.filtered_df
super().execute(output_from_previous=output_from_previous, **kwargs)
As you can see here the method I need to update is 'def write_to_file(self):'
Here is snippets of the relevant code in development to show where this problems 'could be occuring'. The following bits of code below are relevant, this might need to be updated to allow me to output two excel files or more:
class TtestStrategy(ReportQueryStrategy):
"""
"""
#staticmethod
def _strategy_key():
return 't-test', 'fisher-test'
def __init__(self,
query_name: Optional[str] = None,
reference_query: str = None,
sample_query: str = None,
alpha: float = 0.05,
chemical_key: str = 'chemical',
plate_key: str = 'plate',
value_key: str = 'processed_relative_fp',
group_column: str = 'Lot',
return_pivoted: bool = True):
super().__init__(query_name=query_name)
self.pvalues = []
self.alpha = alpha
self.chemical_key, self.plate_key, self.value_key, self.reference_query, self.sample_query = (chemical_key,
plate_key,
value_key,
reference_query,
sample_query)
self.group_column = group_column
self.return_pivoted = return_pivoted
def fishers_method(self, pvalues) -> tuple[bool, float, float, float]:
pvalues = [item for item in pvalues if not pd.isnull(item)]
comb_z, comb_pval = stats.combine_pvalues(pvalues, method="fisher")
k = len(pvalues)
mean_FDR = (self.alpha * (k + 1)) / (2 * k)
if comb_pval > mean_FDR:
decision = False
else:
decision = True
return decision, comb_z, comb_pval, mean_FDR
def transform(self,
key: Optional[str] = None,
df: Optional[pd.DataFrame] = None) -> tuple[Any, DataFrame]:
counter_nosig = 0
counter_sig_st_t = 0
pval_store_st_t = {}
pval_store_st_t_dec = {}
pval_store_st_t_val = {}
pval_store_st_t_val_adj = {}
pval_store_norm_A = {}
pval_store_norm_B = {}
var_store_A = {}
var_store_B = {}
pval_store_lev_bart = {}
decisions = []
st_pval_arr = []
df = super().transform(key=key, df=df)
df = df.loc[(df[self.group_column] == self.reference_query) | (df[self.group_column] == self.sample_query)]
df_i = df.filter(['plate', 'chemical', 'processed_relative_fp'], axis=1)
df_i = df.pivot(columns='plate', values='processed_relative_fp', index='chemical')
df_i.index.name = None
df_i.columns.name = ''
plates = {exps: {"processed_relative_fp": series}
for exps, series in df_i.to_dict("series").items()}
chem_order = list(df['chemical'].unique())
for chem in chem_order:
if chem != 'empty':
pool = []
sample_l = []
for q in [self.reference_query, self.sample_query]:
sample = pd.Series([sample for sample in plates])
sample = sample[sample.str.contains(q)]
for s in sample:
sample_l.append(s)
record = plates[s]
if record['processed_relative_fp'] is not None:
rel_fp = record['processed_relative_fp']
lot = s.split("_")[1]
cell = s.split('_')[2]
rep = s.split("_")[4]
val = rel_fp[chem]
pool.append({"chems": chem, "lot": lot, "cell": cell, "replicate": rep,
"key": chem + "::" + lot + "::" + rep + "::" + cell, "value": val})
pool = pd.DataFrame(pool)
massage = []
averages = []
sort_lots = list(pool['lot'].unique())
for lot in sort_lots:
value = list(pool[pool.lot.eq(lot)]['value'].dropna())
averages.append(np.mean(np.array(value)))
massage.append(value)
averages = np.array(averages)
min_ = np.nanmin(averages)
max_ = np.nanmax(averages)
pos_min_ = np.where(averages == min_)[0][0]
pos_max_ = np.where(averages == max_)[0][0]
perc_diff = (max_ - min_) * 100
fvalue_st_t, pvalue_st_t = stats.ttest_ind(*massage, equal_var=False)
st_pval_arr.append(pvalue_st_t)
st_pval_array = np.asarray(st_pval_arr)
mask = np.isfinite(st_pval_array)
st_t_t_pval_adj = np.empty(st_pval_array.shape)
st_t_t_pval_adj.fill(np.nan)
rej_st_t, st_t_t_pval_adj[mask], _, _ = sm.stats.multipletests(st_pval_array[mask], method='fdr_bh')
for v in st_t_t_pval_adj:
float(v)
pval_store_st_t_val_adj[chem] = v
for z in rej_st_t:
float(z)
if len(massage[0]) >= 3:
test_stat_norm_A, pvalue_norm_A = stats.shapiro(np.array(massage[0]))
if pvalue_norm_A < 0.05:
pval_store_norm_A[chem] = 'No'
else:
pval_store_norm_A[chem] = 'Yes'
else:
pval_store_norm_A[chem] = 'Not enough data'
if len(massage[1]) >= 3:
test_stat_norm_B, pvalue_norm_B = stats.shapiro(np.array(massage[1]))
if pvalue_norm_B < 0.05:
pval_store_norm_B[chem] = 'No'
else:
pval_store_norm_B[chem] = 'Yes'
else:
pval_store_norm_B[chem] = 'not enough data'
var_stat_A = stats.variation(np.array(massage[0]))
var_stat_B = stats.variation(np.array(massage[1]))
stat_lev_bart, pvalue_lev_bart = stats.levene(*massage, center='mean')
pval_store_st_t[chem] = pvalue_st_t
pval_store_st_t_val[chem] = pvalue_st_t
var_store_A[chem] = var_stat_A
var_store_B[chem] = var_stat_B
pval_store_lev_bart[chem] = pvalue_lev_bart
if pvalue_st_t < 0.05:
pval_store_st_t[chem] = 'diff'
else:
pval_store_st_t[chem] = 'same'
if v > 0.05:
pval_store_st_t_dec[chem] = 'same'
if v < 0.05 and perc_diff > 0.0:
pval_store_st_t_dec[chem] = 'diff'
counter_sig_st_t += 1
else:
counter_nosig += 1
decisions.append(pval_store_st_t)
decisions.append(pval_store_st_t_dec)
decisions.append(pval_store_st_t_val)
decisions.append(pval_store_st_t_val_adj)
decisions.append(pval_store_norm_A)
decisions.append(pval_store_norm_B)
decisions.append(var_store_A)
decisions.append(var_store_B)
decisions.append(pval_store_lev_bart)
decisions = pd.DataFrame(decisions)
decisions = decisions.T
decisions.columns = [
f'Welchs t-test result for lot {self.reference_query} v lot {self.sample_query}',
'Welchs t-test considering Multi-test Correction',
'Welchs t-test pvalue unadjusted',
'Welchs t-test pvalue adjusted',
f'are lot {self.reference_query} chemicals normally distributed?',
f'are lot {self.sample_query} chemicals normally distributed?',
f'lot {self.reference_query} variance',
f'lot {self.sample_query} variance',
'levenes statistic']
decisions_before_filter = decisions.copy()
decisions_before_filter.reset_index(inplace=True)
decisions_before_filter.rename(columns={'index': 'chemicals'}, inplace=True)
decisions = decisions.loc[(decisions['Welchs t-test pvalue adjusted'] < self.alpha)]
decisions.reset_index(inplace=True)
decisions.rename(columns={'index': 'chemicals'}, inplace=True)
decisions.sort_values(by=['Welchs t-test pvalue adjusted'], inplace=True)
decisions_before_filter = decisions_before_filter.filter(items=['chemicals', 'Welchs t-test pvalue adjusted'])
decisions_before_filter = decisions_before_filter.sort_values(by=['Welchs t-test pvalue adjusted'])
results_fishers = self.fishers_method(decisions_before_filter['Welchs t-test pvalue adjusted'].tolist())
results_fishers_df = pd.DataFrame(results_fishers,
index=['decision',
'combined z-score',
'combined p-value',
'meanFDR'])
results_fishers_df = results_fishers_df.T
results_fishers_df['k'] = len(decisions_before_filter.dropna())
results_fishers_df.insert(loc=0, column='comparison', value=(str(self.reference_query + '-' + 'vs'
+ '-' + self.sample_query)))
return decisions, results_fishers_df
See all the information above
The error is telling you that filtered_df is NOT a pandas.DataFrame.
It's a tuple. In fact your TtestStrategy's transform method returns the tuple:
return decisions, results_fishers_df
If you change your code to:
def execute(self, output_from_previous: Any = None, **kwargs) -> Any:
_ignored, self.filtered_df = self.strategy.transform(key=self.key_input,
df=self.df_to_filter)
self.state.query_reports[self.key_output] = self.filtered_df
super().execute(output_from_previous=output_from_previous, **kwargs)
then you shouldn't see that AttributeError anymore.

parallelized tasks are not ditributed a cross available cpus

As shown in the code posted below in section DecoupleGridCellsProfilerLoopsPool, the run() is called as much times as the contents of the self.__listOfLoopDecouplers and it works as it supposed to be, i mean the parallelization is working duly.
as shown in the same section, DecoupleGridCellsProfilerLoopsPool.pool.map returns results and i populate some lists,lets discuss the list names self.__iterablesOfZeroCoverageCell it contains number of objects of type gridCellInnerLoopsIteratorsForZeroCoverageModel.
After that, i created the pool ZeroCoverageCellsProcessingPool with the code as posted below as well.
The problem i am facing, is the parallelized code in ZeroCoverageCellsProcessingPool is very slow and the visulisation of the cpu tasks shows that there are no processes work in parallel as shown in the video contained in url posted below.
i was suspicious about the pickling issues related to when parallelizing the code in ZeroCoverageCellsProcessingPool,so i removed the enitre body of the run() in ZeroCoverageCellsProcessingPool. however, it shows no change in the behaviour of the parallelized code.
also the url posted below shown how the parallelized methoth of ZeroCoverageCellsProcessingPool behaves.
given the code posted below, please let me know why the parallelization does not work for code in ZeroCoverageCellsProcessingPool
output url:please click the link
output url
DecoupleGridCellsProfilerLoopsPool
def postTask(self):
self.__postTaskStartTime = time.time()
with Pool(processes=int(config['MULTIPROCESSING']['proceses_count'])) as DecoupleGridCellsProfilerLoopsPool.pool:
self.__chunkSize = PoolUtils.getChunkSize(lst=self.__listOfLoopDecouplers,cpuCount=int(config['MULTIPROCESSING']['cpu_count']))
logger.info(f"DecoupleGridCellsProfilerLoopsPool.self.__chunkSize(task per processor):{self.__chunkSize}")
for res in DecoupleGridCellsProfilerLoopsPool.pool.map(self.run,self.__listOfLoopDecouplers,chunksize=self.__chunkSize):
if res[0] is not None and res[1] is None and res[2] is None:
self.__iterablesOfNoneZeroCoverageCell.append(res[0])
elif res[1] is not None and res[0] is None and res[2] is None:
self.__iterablesOfZeroCoverageCell.append(res[1])
elif res[2] is not None and res[0] is None and res[1] is None:
self.__iterablesOfNoDataCells.append(res[2])
else:
raise Exception (f"WTF.")
DecoupleGridCellsProfilerLoopsPool.pool.join()
assert len(self.__iterablesOfNoneZeroCoverageCell)+len(self.__iterablesOfZeroCoverageCell)+len(self.__iterablesOfNoDataCells) == len(self.__listOfLoopDecouplers)
zeroCoverageCellsProcessingPool = ZeroCoverageCellsProcessingPool(self.__devModeForWSAWANTIVer2,self.__iterablesOfZeroCoverageCell)
zeroCoverageCellsProcessingPool.postTask()
def run(self,param:LoopDecoupler):
row = param.getRowValue()
col = param.getColValue()
elevationsTIFFWindowedSegmentContents = param.getElevationsTIFFWindowedSegment()
verticalStep = param.getVericalStep()
horizontalStep = param.getHorizontalStep()
mainTIFFImageDatasetContents = param.getMainTIFFImageDatasetContents()
NDVIsTIFFWindowedSegmentContentsInEPSG25832 = param.getNDVIsTIFFWindowedSegmentContentsInEPSG25832()
URLOrFilePathForElevationsTIFFDatasetInEPSG25832 = param.getURLOrFilePathForElevationsTIFFDatasetInEPSG25832()
threshold = param.getThreshold()
rowsCnt = 0
colsCnt = 0
pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt = 0
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = int(config['window']['width']) * int(config['window']['height'])
pixelsWithNoDataValueInTIFFImageDatasetCnt = int(config['window']['width']) * int(config['window']['height'])
_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell = []
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell = []
_pixelsValuesInNoDataCell = []
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel = None
gridCellInnerLoopsIteratorsForZeroCoverageModel = None
gridCellInnerLoopsIteratorsForNoDataCellsModel = None
for x in range(row,row + verticalStep):
if rowsCnt == verticalStep:
rowsCnt = 0
for y in range(col,col + horizontalStep):
if colsCnt == horizontalStep:
colsCnt = 0
pixelValue = mainTIFFImageDatasetContents[0][x][y]
# windowIOUtils.writeContentsToFile(windowIOUtils.getPathToOutputDir()+"/"+config['window']['file_name']+".{0}".format(config['window']['file_extension']), "pixelValue:{0}\n".format(pixelValue))
if pixelValue >= float(threshold):
pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt+=1
_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
elif ((pixelValue < float(threshold)) and (pixelValue > float(config['TIFF']['no_data_value']))):
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt-=1
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
elif (pixelValue <= float(config['TIFF']['no_data_value'])):
pixelsWithNoDataValueInTIFFImageDatasetCnt-=1
_pixelsValuesInNoDataCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
else:
raise Exception ("WTF.Exception: unhandled condition for pixel value: {0}".format(pixelValue))
# _pixelCoordinatesInWindow.append([x,y])
colsCnt+=1
rowsCnt+=1
'''Grid-cell classfication'''
if (pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt > 0):
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel = GridCellInnerLoopsIteratorsForNoneZeroCoverageModel()
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setRowValue(row)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setColValue(col)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setPixelsValuesSatisfyThresholdInTIFFImageDatasetCnt(pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setPixelsValuesSatisfyThresholdInNoneZeroCoverageCell(_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell)
elif (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt < (int(config['window']['width']) * int(config['window']['height'])) and pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt >= 0):
gridCellInnerLoopsIteratorsForZeroCoverageModel = GridCellInnerLoopsIteratorsForZeroCoverageModel()
gridCellInnerLoopsIteratorsForZeroCoverageModel.setRowValue(row)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setColValue(col)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt(pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsValuesDoNotSatisfyThresholdInZeroCoverageCell(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell)
elif (pixelsWithNoDataValueInTIFFImageDatasetCnt == 0):
gridCellInnerLoopsIteratorsForNoDataCellsModel = GridCellInnerLoopsIteratorsForNoDataCellsModel()
gridCellInnerLoopsIteratorsForNoDataCellsModel.setRowValue(row)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setColValue(col)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setPixelsValuesInNoDataCell(_pixelsValuesInNoDataCell)
if gridCellInnerLoopsIteratorsForZeroCoverageModel is not None:
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
else:
raise Exception (f"WTF.")
return gridCellInnerLoopsIteratorsForNoneZeroCoverageModel,gridCellInnerLoopsIteratorsForZeroCoverageModel,gridCellInnerLoopsIteratorsForNoDataCellsModel
ZeroCoverageCellsProcessingPool:
def postTask(self):
self.__postTaskStartTime = time.time()
"""to collect results per each row
"""
resAllCellsForGridCellsClassifications = []
# NDVIs
resAllCellsForNDVITIFFDetailsForZeroCoverageCell = []
# area of coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell = []
# interception
resAllCellsForInterceptionForZeroCoverageCell = []
# fourCornersOfWindowInEPSG25832
resAllCellsForFourCornersOfWindowInEPSG25832ZeroCoverageCell = []
# outFromEPSG25832ToEPSG4326-lists
resAllCellsForOutFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
# fourCornersOfWindowsAsGeoJSON
resAllCellsForFourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
# calculatedCenterPointInEPSG25832
resAllCellsForCalculatedCenterPointInEPSG25832ForZeroCoverageCell = []
# centerPointsOfWindowInImageCoordinatesSystem
resAllCellsForCenterPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = []
# pixelValuesOfCenterPoints
resAllCellsForPixelValuesOfCenterPointsForZeroCoverageCell = []
# centerPointOfKeyWindowAsGeoJSONInEPSG4326
resAllCellsForCenterPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = []
# centerPointInEPSG4326
resAllCellsForCenterPointInEPSG4326ForZeroCoveringCell = []
# average heights
resAllCellsForAverageHeightsForZeroCoverageCell = []
# pixels values
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCell = []
# area Of Coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell = []
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = []
noneKeyWindowCnt=0
# center points as string
centerPointsAsStringForZeroCoverageCell = ""
with Pool(processes=int(config['MULTIPROCESSING']['proceses_count'])) as ZeroCoverageCellsProcessingPool.pool:
self.__chunkSize = PoolUtils.getChunkSize(lst=self.__iterables,cpuCount=int(config['MULTIPROCESSING']['cpu_count']))
logger.info(f"ZeroCoverageCellsProcessingPool.self.__chunkSize(task per processor):{self.__chunkSize}")
for res in ZeroCoverageCellsProcessingPool.pool.map(func=self.run,iterable=self.__iterables,chunksize=self.__chunkSize):
resAllCellsForGridCellsClassifications.append(res[0])
# NDVIs
resAllCellsForNDVITIFFDetailsForZeroCoverageCell.append(res[1])
# area of coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell.append(res[2])
# interception
resAllCellsForInterceptionForZeroCoverageCell.append(res[3])
# fourCornersOfWindowInEPSG25832
resAllCellsForFourCornersOfWindowInEPSG25832ZeroCoverageCell.append(res[4])
# outFromEPSG25832ToEPSG4326-lists
resAllCellsForOutFromEPSG25832ToEPSG4326ForZeroCoverageCells.append(res[5])
# fourCornersOfWindowsAsGeoJSONInEPSG4326
resAllCellsForFourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell.append(res[6])
# calculatedCenterPointInEPSG25832
resAllCellsForCalculatedCenterPointInEPSG25832ForZeroCoverageCell.append(res[7])
# centerPointsOfWindowInImageCoordinatesSystem
resAllCellsForCenterPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell.append(res[8])
# pixelValuesOfCenterPoints
resAllCellsForPixelValuesOfCenterPointsForZeroCoverageCell.append(res[9])
# centerPointInEPSG4326
resAllCellsForCenterPointInEPSG4326ForZeroCoveringCell.append(res[10])
# centerPointOfKeyWindowAsGeoJSONInEPSG4326
resAllCellsForCenterPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell.append(res[11])
# average heights
resAllCellsForAverageHeightsForZeroCoverageCell.append(res[12])
# pixels values
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCell.append(res[13])
# pixelsValues cnt
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt.append(res[14])
noneKeyWindowCnt +=res[15]
# centerPoints-As-String
if (res[16] is not None):
centerPointsAsStringForZeroCoverageCell+=str(res[16])
assert noneKeyWindowCnt == len(self.__iterables)
ZeroCoverageCellsProcessingPool.pool.close()
ZeroCoverageCellsProcessingPool.pool.terminate()
ZeroCoverageCellsProcessingPool.pool.join()
return
def run(self,params:GridCellInnerLoopsIteratorsForZeroCoverageModel):
if params is not None:
logger.info(f"Processing zero coverage cell #(row{params.getRowValue()},col:{params.getColValue()})")
row = params.getRowValue()
col = params.getColValue()
mainTIFFImageDatasetContents = params.getMainTIFFImageDatasetContents()
NDVIsTIFFWindowedSegmentContentsInEPSG25832 = params.getNDVIsTIFFWindowedSegmentContentsInEPSG25832()
URLOrFilePathForElevationsTIFFDatasetInEPSG25832 = params.getURLOrFilePathForElevationsTIFFDatasetInEPSG25832()
datasetElevationsTIFFInEPSG25832 = rasterio.open(URLOrFilePathForElevationsTIFFDatasetInEPSG25832,'r')
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell = params.getPixelsValuesDoNotSatisfyThresholdInZeroCoverageCell()
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = params.getPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt()
countOfNoDataCells = params.getPixelsWithNoDataValueInTIFFImageDatasetCnt()
outFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
ndviTIFFDetailsForZeroCoverageCell = NDVITIFFDetails(None,None,None).getNDVIValuePer10mX10m()
"""area of coverage per grid-cell"""
areaOfCoverageForZeroCoverageCell = None
""""interception"""
interceptionForZeroCoverageCell = None
CntOfNDVIsWithNanValueInZeroCoverageCell = 0
fourCornersOfWindowInEPSG25832ZeroCoverageCell = None
outFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
calculatedCenterPointInEPSG25832ForZeroCoverageCell = None
centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = None
pixelValuesOfCenterPointsOfZeroCoverageCell = None
centerPointInEPSG4326ForZeroCoveringCell = None
centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = None
centerPointsAsStringForZeroCoverageCell = None
"""average heights"""
averageHeightsForZeroCoverageCell = None
gridCellClassifiedAs = GridCellClassifier.ZERO_COVERAGE_CELL.value
cntOfNoneKeyWindow = 1
ndviTIFFDetailsForZeroCoverageCell = NDVITIFFDetails(ulX=row//int(config['ndvi']['resolution_height']),ulY=col//int(config['ndvi']['resolution_width']),dataset=NDVIsTIFFWindowedSegmentContentsInEPSG25832).getNDVIValuePer10mX10m()
"""area of coverage per grid-cell"""
areaOfCoverageForZeroCoverageCell = round(AreaOfCoverageDetails(pixelsCount=(int(config['window']['width']) * int(config['window']['height'])) - (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt + countOfNoDataCells)).getPercentageOfAreaOfCoverage(),2)
""""interception"""
if math.isnan(ndviTIFFDetailsForZeroCoverageCell):
# ndviTIFFDetailsForZeroCoverageCell = 0
CntOfNDVIsWithNanValueInZeroCoverageCell = 1
interceptionForZeroCoverageCell = config['sentinel_values']['interception']
else:
Indvi = INDVI()
Ic = Indvi.calcInterception(ndviTIFFDetailsForZeroCoverageCell)
Pc=areaOfCoverageForZeroCoverageCell,"""percentage of coverage"""
Pnc=float((int(config['window']['width'])*int(config['window']['height'])) - areaOfCoverageForZeroCoverageCell),"""percentage of non-coverage"""
Inc=float(config['interception']['noneCoverage']),"""interception of none-coverage"""
I=(float(Pc[0])*(Ic))+float((Pnc[0]*Inc[0]))
interceptionForZeroCoverageCell = round(I,2)
if I != 10 and I != float('nan'):
logger.error(f"ndviTIFFDetailsForZeroCoverageCell:{ndviTIFFDetailsForZeroCoverageCell}")
logger.error(f"I:{I}")
fourCornersOfWindowInEPSG25832ZeroCoverageCell = RasterIOPackageUtils.convertFourCornersOfWindowFromImageCoordinatesToCRSByCoordinatesOfCentersOfPixelsMethodFor(row,col,int(config['window']['height']),int(config['window']['width']),datasetElevationsTIFFInEPSG25832)
for i in range(0,len(fourCornersOfWindowInEPSG25832ZeroCoverageCell)):
# fourCornersOfKeyWindowInEPSG4326.append(RasterIOPackageUtils.convertCoordsToDestEPSGForDataset(fourCornersOfWindowInEPSG25832[i],datasetElevationsTIFFInEPSG25832,destEPSG=4326))
outFromEPSG25832ToEPSG4326ForZeroCoverageCells.append(OSGEOUtils.fromEPSG25832ToEPSG4326(fourCornersOfWindowInEPSG25832ZeroCoverageCell[i])) # resultant coords order is in form of lat,lon and it must be in lon,lat.thus, out[1]-lat out[0]-lon
"""fourCornersOfWindowsAsGeoJSONInEPSG4326"""
fourCornersOfWindowInEPSG4326 = []
for i in range(0,len(outFromEPSG25832ToEPSG4326ForZeroCoverageCells)):
fourCornersOfWindowInEPSG4326.append(([outFromEPSG25832ToEPSG4326ForZeroCoverageCells[i][1]],[outFromEPSG25832ToEPSG4326ForZeroCoverageCells[i][0]]))
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell.append(jsonUtils.buildFeatureCollectionAsGeoJSONForFourCornersOfKeyWindow(fourCornersOfWindowInEPSG4326[0],fourCornersOfWindowInEPSG4326[1],fourCornersOfWindowInEPSG4326[2],fourCornersOfWindowInEPSG4326[3],areaOfCoverageForZeroCoverageCell))
# debugIOUtils.writeContentsToFile(debugIOUtils.getPathToOutputDir()+"/"+"NDVIsPer10mX10mForKeyWindow"+config['window']['file_name']+".{0}".format(config['window']['file_extension']),"{0}\n".format(NDVIsPer10mX10mForKeyWindow))
"""
building geojson object for a point "center-point" to visualize it.
"""
calculatedCenterPointInEPSG25832ForZeroCoverageCell = MiscUtils.calculateCenterPointsGivenLLOfGridCell(fourCornersOfWindowInEPSG25832ZeroCoverageCell[1])#lower-left corner
centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = RasterIOPackageUtils.convertFromCRSToImageCoordinatesSystemFor(calculatedCenterPointInEPSG25832ForZeroCoverageCell[0],calculatedCenterPointInEPSG25832ForZeroCoverageCell[1],datasetElevationsTIFFInEPSG25832)
pixelValuesOfCenterPointsOfZeroCoverageCell = mainTIFFImageDatasetContents[0][centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell[0]][centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell[1]]
centerPointInEPSG4326ForZeroCoveringCell = RasterIOPackageUtils.convertCoordsToDestEPSGForDataset(calculatedCenterPointInEPSG25832ForZeroCoverageCell,datasetElevationsTIFFInEPSG25832,destEPSG=4326)
centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = jsonUtils.buildGeoJSONForPointFor(centerPointInEPSG4326ForZeroCoveringCell)
"""average heights"""
averageHeightsForZeroCoverageCell = round(MiscUtils.getAverageFor(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell),2)
assert len(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell) > 0 and (len(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell) <= (int(config['window']['width']) * int(config['window']['height'])) )
"""the following code block is for assertion only"""
if self.__devModeForWSAWANTIVer2 == config['DEVELOPMENT_MODE']['debug']:
assert pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt >= 0 and (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt < (int(config['window']['width']) * int(config['window']['height'])) )
assert (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt+countOfNoDataCells) == (int(config['window']['width']) * int(config['window']['height']))
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt:{pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt}")
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>countOfNoDataCells:{countOfNoDataCells}")
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = (int(config['window']['width']) * int(config['window']['height'])) - (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt + countOfNoDataCells)
assert pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt == 0, (f"WTF.")
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>computed pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt:{pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt}")
print(f"\n")
centerPointAsTextInWKTInEPSG3857 = CoordinatesUtils.buildWKTPointFormatForSinglePointFor(calculatedCenterPointInEPSG25832ForZeroCoverageCell[0],calculatedCenterPointInEPSG25832ForZeroCoverageCell[1])
s = centerPointAsTextInWKTInEPSG3857.replace("POINT","")
s = s.replace("(","")
s = s.replace(")","")
s = s.strip()
s = s.split(" ")
centerPointsAsStringForZeroCoverageCell = s[0] + "\t" + s[1] + "\n"
centerPointsAsStringForZeroCoverageCell = centerPointsAsStringForZeroCoverageCell.replace('\'',"")
return gridCellClassifiedAs,ndviTIFFDetailsForZeroCoverageCell,areaOfCoverageForZeroCoverageCell,interceptionForZeroCoverageCell,fourCornersOfWindowInEPSG25832ZeroCoverageCell,outFromEPSG25832ToEPSG4326ForZeroCoverageCells,fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell,calculatedCenterPointInEPSG25832ForZeroCoverageCell,centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell,pixelValuesOfCenterPointsOfZeroCoverageCell,centerPointInEPSG4326ForZeroCoveringCell,centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell,averageHeightsForZeroCoverageCell,np.array(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell).tolist(),pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt,cntOfNoneKeyWindow,centerPointsAsStringForZeroCoverageCell,CntOfNDVIsWithNanValueInZeroCoverageCell

Opening and keeping the window on PySide6

I'm trying to convert a program into a mac app via pyside6. The program gets input about assignment names, dates, scores, percentage it takes in total, and calculates the total score. The program(in python) works fine, but I'm trying to convert into app, and am stuck. When running my program, the window does pop out but instantly closes after opening, and I cannot input in it as it closes right after opening.
The input on my program is a bit strange, but it's because it's adjusted to my school's website.
The original program code:
raw_grade = []
indexes = []
index = 0
score = []
percent = []
fi_score = []
raw = []
add = 0
denom = 0
total = 0
import sys
import re
from operator import itemgetter
print("Enter the data")
def multi_input():
try:
while True:
data=input()
if not data: break
yield data
except KeyboardInterrupt:
return
data = list(multi_input())
for i in range(3, len(data),4):
index = i
indexes.append(i)
raw_grade = [data[id] for id in indexes]
for i in range(len(raw_grade)):
gc = raw_grade[i].split('\t')
raw.append(gc)
for i in range(len(raw)):
percent.append(raw[i][3])
score.append(raw[i][4])
for i in range(len(score)):
use_score = score[i].split('/')
fi_score.append(use_score)
score = fi_score
score = [value for value in score if value != ['']]
for i in range(len(score)):
score[i] = [float(i) for i in score[i]]
for i in range(len(score)):
try:
score[i] = score[i][0]/score[i][1]100
except ZeroDivisionError:
result = 0
for i in range(len(percent)):
try:
percent[i] = percent[i].replace('%', '')
except ZeroDivisionError:
result = 0
percent = [value for value in percent if value != '']
for i in range(len(percent)):
try:
percent = [float(i) for i in percent]
except ZeroDivisionError:
result = 0
print("graded assignments: ", len(score))
#calculation part
for i in range(len(score)):
add = score[i]percent[i]/100
total = total + add
denom = denom + percent[i]
total = total/denom*100
print(f"{total:05.2f}")
What I've worked on so far:
from PySide6 import QtCore, QtWidgets
from PySide6.QtWidgets import QMainWindow, QWidget, QLabel, QLineEdit,QApplication
from PySide6.QtWidgets import QPushButton
from PySide6.QtCore import QSize
raw_grade = []
indexes = []
index = 0
score = []
percent = []
fi_score = []
raw = []
add = 0
denom = 0
total = 0
import sys
import re
from operator import itemgetter
def multi_input(self):
try:
while True:
self.nameLabel = QLabel(self)
self.nameLabel.setText('Name:')
self.line = QLineEdit(self)
if not self.line.text(): break
yield self.line.text()
except KeyboardInterrupt:
return
class MainWindow(QMainWindow):
def __init__(self):
QMainWindow.__init__(self)
raw_grade = []
indexes = []
index = 0
score = []
percent = []
fi_score = []
raw = []
add = 0
denom = 0
total = 0
self.setMinimumSize(QSize(320, 140))
self.setWindowTitle("PyQt Line Edit example (textfield) - pythonprogramminglanguage.com")
multi_input(self)
self.line = list(multi_input(self))
pybutton = QPushButton('OK', self)
pybutton.clicked.connect(self.clickMethod)
pybutton.resize(200,32)
pybutton.move(80, 60)
for i in range(3, len(self.line), 4):
index = i
indexes.append(i)
raw_grade = [self.line[id] for id in indexes]
score = fi_score
score = [value for value in score if value != ['']]
for i in range(len(raw_grade)):
gc = raw_grade[i].split('\t')
raw.append(gc)
for i in range(len(raw)):
percent.append(raw[i][3])
score.append(raw[i][4])
for i in range(len(score)):
use_score = score[i].split('/')
fi_score.append(use_score)
for i in range(len(score)):
score[i] = [float(i) for i in score[i]]
for i in range(len(score)):
try:
score[i] = score[i][0] / score[i][1] * 100
except ZeroDivisionError:
result = 0
for i in range(len(percent)):
try:
percent[i] = percent[i].replace('%', '')
except ZeroDivisionError:
result = 0
percent = [value for value in percent if value != '']
for i in range(len(percent)):
try:
percent = [float(i) for i in percent]
except ZeroDivisionError:
result = 0
print("graded assignments: ", len(score))
# calculation part
for i in range(len(score)):
add = score[i] * percent[i] / 100
total = total + add
denom = denom + percent[i]
self.line = total / denom * 100
print('Your name: ' + self.line.text())
def clickMethod(self):
print('Your name: ' + self.line.text())
if __name__ == "__main__":
app = QApplication(sys.argv)
window = MainWindow()
window.show()
app.exec()

Python returns local variable referenced before error

I edit this post for your comments. Thank you :-)
The prev_fs_cell is the variable whose value can be nan or str. (ex. nan <-> "1,244,234" )
If prev_fs_cell is nan, I want not to process self._strat(self, curr_year), but it has an error...
## GLOBAL & API ###
STOCK_START="2015.01.01"
FS_START="2014.01.01"
END="2021.09.01"
SHORT=10
LONG=60
CURR_YEAR=2021
API_key=dart_config.API_key
DART=OpenDartReader(API_key)
account_nm_list=["유동자산","비유동자산","유동부채","비유동부채","자산총계","부채총계","매출액","영업이익","당기순이익"]
KOSPI_stock_code=stock.get_market_ticker_list(market="KOSPI")
class Strategy():
def __init__(self):
self.buy_signal=pd.DataFrame(columns=['open','unit'])
self.sell_signal = pd.DataFrame(columns=['open', 'unit'])
self.trade = pd.DataFrame(columns=['stock', 'cash'])
self.position=0
self.unit=1
self.cash=100000000 # 1억
def set_data(self, indicator_data, finance_data):
self.indicator_data=indicator_data
self.indicator_data.rename(columns={self.indicator_data.columns[0]:'date'}, inplace=True)
self.indicator_data = self.indicator_data.set_index('date')
self.indicator_data.index = pd.to_datetime(self.indicator_data.index, format="%Y-%m-%d")
self.fs_data=finance_data
self.fs_data.rename(columns={self.fs_data.columns[0]:'year'}, inplace=True)
self.fs_data = self.fs_data.set_index('year')
self.min_year=int(self.fs_data.index.min()) # str type
def _buy(self, row):
if (row['open']*self.unit) <= self.cash:
new_buy_row = pd.Series([row['open'], self.unit], index = self.buy_signal.columns, name=str(row.name))
self.buy_signal = self.buy_signal.append(new_buy_row)
self.position += self.unit
stock_amt = self.position * row['open']
self.cash -= row['open']*self.unit
new_trade_row = pd.Series([stock_amt, self.cash], index = self.trade.columns, name = str(row.name))
self.trade = self.trade.append(new_trade_row)
def _sell(self, row):
new_sell_row = pd.Series([row['open'], int(self.position/4)+1], index = self.sell_signal.columns, name=str(row.name))
self.sell_signal = self.sell_signal.append(new_sell_row)
self.position -= int(self.position/4)+1
stock_amt = self.position * row['open']
self.cash += row['open']*self.unit
new_trade_row = pd.Series([stock_amt, self.cash], index = self.trade.columns, name = str(row.name))
self.trade = self.trade.append(new_trade_row)
def _strat(self, row, curr_year):
fs = self.fs_data
prev_year = curr_year - 1
curr_rev = int(fs.loc[curr_year, '매출액'].replace(",",""))
prev_rev = int(fs.loc[prev_year, '매출액'].replace(",",""))
rev_growth=(curr_rev-prev_rev)/prev_rev
curr_ni = int(fs.loc[curr_year, '당기순이익'].replace(",",""))
prev_ni = int(fs.loc[prev_year, '당기순이익'].replace(",",""))
ni_growth=(curr_ni-prev_ni)/prev_ni
curr_asset = int(fs.loc[curr_year, '유동자산'].replace(",",""))
noncurr_asset = int(fs.loc[prev_year, "비유동자산"].replace(",",""))
curr_asset_rat = curr_asset / noncurr_asset
if (row.rsi<0.65) & (rev_growth>0.005) & (1.3< curr_asset_rat):# & (curr_asset_rat<2.3):
self._buy(row)
elif (row.Golden == False):
if ni_growth <= 0.001 :
if self.position:
self._sell(row)
# a=1
def run(self):
dates = self.indicator_data.index
fs = self.fs_data
#print(fs.index)
for date in dates:
curr_year = date.year
row = self.indicator_data.loc[date]
#print(curr_year, type(curr_year))
#pdb.set_trace()
try:
curr_fs_cell = fs.loc[curr_year].iloc[0].replace(",","")
try:
prev_fs_cell = fs.loc[curr_year-1].iloc[0].replace(",","")
except:
prev_fs_cell = None
except:
curr_fs_cell = None
if (curr_fs_cell == None) | (prev_fs_cell == None):
#print("fs data is empty")
continue
else:
#print(prev_fs_cell)
self._strat(row, curr_year)
for code in KOSPI_stock_code:
FS = load_data("FS_"+code)
indi = load_data("indicator_"+code)
today = dt.today()
strategy = Strategy()
strategy.set_data(indi, FS)
strategy.run()
buy = strategy.buy_signal
sell = strategy.sell_signal
unit = strategy.unit
remain_stock = buy['unit'].sum() - sell['unit'].sum()
remain = int(get_data(str(code)+".KS", today).iloc[0]['open'])*int(remain_stock)
total_buy = int((buy['open'].sum()))*unit
total_sell = int(sell['open'].sum())*unit
profit = int(remain) + int(total_sell) - int(total_buy)
if total_buy:
return_rate = profit / total_buy
trade = strategy.trade
total_return_per_day = trade['stock']+trade['cash']
residual = total_return_per_day - return_rate
sample_var = residual**2 / (trade.shape[0]-1)
sample_dev = np.sqrt(sample_var)
Rf=0.01
sharp = (return_rate - Rf) / (sample_dev)
results[code]['return'] = return_rate
results[code]['sharp'] = sharp
else:
print("No buy due to strict condition")
I have tried to make backtest code for investing into Korean stocks by using financial sheet and stock price sheet and indicator sheet.
And my code return error like the below.
UnboundLocalError Traceback (most recent call last)
<ipython-input-13-caf2b218f860> in <module>()
10 strategy = Strategy()
11 strategy.set_data(indi, FS)
---> 12 strategy.run()
13
14 buy = strategy.buy_signal
<ipython-input-12-2d41db386a22> in run(self)
84 curr_fs_cell = None
85
---> 86 if (curr_fs_cell == None) | (prev_fs_cell == None):
87 #print("fs data is empty")
88 continue
UnboundLocalError: local variable 'prev_fs_cell' referenced before assignment
Actually there is no global variable whose name is prev_fs_cell, but it is only in that class. Why this error occurs?

PyQT table update crash easyly

i first use PyQT4 .
i'm create a QTableWidget to show runing message...
when my program run, it ill crash Within ten minutes.
i try diable my TableUpdate function , and it's don't crash again.
there is my code please help me
class table_work(QThread):
TableDataSignal = pyqtSignal()
def __init__(self,main_self):
# QThread.__init__(self)
super(table_work, self).__init__(main_self)
self.main_self = main_self
self.table_update_list = list()
#pyqtSlot(dict)
def update_table_thread_o(self,work):
try:
row_pos = work['row_position']
data = work['data']
table_key_sort = work['key_sort']
this_table = work['table']
k = 0
for table_key in table_key_sort:
this_table.setItem(row_pos, k, QTableWidgetItem(unicode(data[table_key])))
k += 1
del work
except:
pass
def update_table_thread(self):
main_self = self.main_self
table_work_list = self.table_update_list
while 1:
for work in self.table_update_list:
row_pos = work['row_position']
data = work['data']
table_key_sort = work['key_sort']
this_table = work['table']
k = 0
for table_key in table_key_sort:
this_table.setItem(row_pos, k, QTableWidgetItem(unicode(data[table_key])))
k += 1
time.sleep(0.5)
def run(self):
self.update_table_thread()
this's update table message
def update_table(self,address,change_obj=None,tabe_name='auto_card'):
sample_dict = dict()
table_key_sort = list()
now_table_sort = 0
if tabe_name == "auto_bot":
this_table = self.auto_bot_procc_table
table_data_list = self.auto_bot_procc_table_list
now_table_sort = self.auto_bot_now_table_sort
sample_dict['address'] = address
sample_dict['money'] = 0
sample_dict['run_time'] = 0
sample_dict['item_cd'] = u"60分鐘後"
sample_dict['stat'] = "Ready..."
sample_dict['sort'] = now_table_sort
table_key_sort.append('address')
table_key_sort.append('money')
table_key_sort.append('run_time')
table_key_sort.append('item_cd')
table_key_sort.append('stat')
if tabe_name == "auto_card":
this_table = self.process_table
table_data_list = self.now_procc_table_list
now_table_sort = self.now_table_sort
sample_dict['address'] = address
sample_dict['done_num'] = 0
sample_dict['pre_item'] = ""
sample_dict['procc'] = "Ready"
sample_dict['mission_procc'] = u"待命.."
sample_dict['mission_num'] = 0
sample_dict['mission_line'] = 0
sample_dict['update_time'] = db.get_time()
sample_dict['sort'] = now_table_sort
sample_dict['option'] = ""
table_key_sort.append('address')
table_key_sort.append('done_num')
table_key_sort.append('pre_item')
table_key_sort.append('mission_procc')
table_key_sort.append('procc')
table_key_sort.append('mission_num')
table_key_sort.append('mission_line')
table_key_sort.append('update_time')
if address not in table_data_list:
this_table.insertRow(sample_dict['sort'])
table_data_list[address] = sample_dict
sample_dict['sort'] = self.auto_bot_now_table_sort
self.auto_bot_now_table_sort += 1
acc_data = table_data_list[address]
if change_obj != None:
key = change_obj['key']
val = change_obj['val']
if key in acc_data:
acc_data[key] = val
acc_data['update_time'] = db.get_time()
rowPosition = acc_data['sort']
temp = dict()
temp['row_position'] = rowPosition
temp['data'] = acc_data
temp['key_sort'] = table_key_sort
temp['table'] = this_table
self.TableDataSignal.emit(temp)
del temp
Some time i get a ANS.
i'm a PYQT newbie , After this period of various projects experience.
I understand if you don't use Main Thread to Change UI, Always use sign/emit
even your code is worked,but always use sign/emit, Otherwise there will be a series of disasters.
you just like
class sample(QtCore.QThread):
table_data_change = QtCore.pyqtSignal(dict)
def __init__(self,main_win):
self.main = main_win
self.table_data_change.connect(self.main.change_fn)
def test(self):
data = dict()
data['btn'] = .....
data['val'] = .....
self.table_data_change.emit(data)
Save your time !

Categories

Resources