parallelized tasks are not ditributed a cross available cpus - python

As shown in the code posted below in section DecoupleGridCellsProfilerLoopsPool, the run() is called as much times as the contents of the self.__listOfLoopDecouplers and it works as it supposed to be, i mean the parallelization is working duly.
as shown in the same section, DecoupleGridCellsProfilerLoopsPool.pool.map returns results and i populate some lists,lets discuss the list names self.__iterablesOfZeroCoverageCell it contains number of objects of type gridCellInnerLoopsIteratorsForZeroCoverageModel.
After that, i created the pool ZeroCoverageCellsProcessingPool with the code as posted below as well.
The problem i am facing, is the parallelized code in ZeroCoverageCellsProcessingPool is very slow and the visulisation of the cpu tasks shows that there are no processes work in parallel as shown in the video contained in url posted below.
i was suspicious about the pickling issues related to when parallelizing the code in ZeroCoverageCellsProcessingPool,so i removed the enitre body of the run() in ZeroCoverageCellsProcessingPool. however, it shows no change in the behaviour of the parallelized code.
also the url posted below shown how the parallelized methoth of ZeroCoverageCellsProcessingPool behaves.
given the code posted below, please let me know why the parallelization does not work for code in ZeroCoverageCellsProcessingPool
output url:please click the link
output url
DecoupleGridCellsProfilerLoopsPool
def postTask(self):
self.__postTaskStartTime = time.time()
with Pool(processes=int(config['MULTIPROCESSING']['proceses_count'])) as DecoupleGridCellsProfilerLoopsPool.pool:
self.__chunkSize = PoolUtils.getChunkSize(lst=self.__listOfLoopDecouplers,cpuCount=int(config['MULTIPROCESSING']['cpu_count']))
logger.info(f"DecoupleGridCellsProfilerLoopsPool.self.__chunkSize(task per processor):{self.__chunkSize}")
for res in DecoupleGridCellsProfilerLoopsPool.pool.map(self.run,self.__listOfLoopDecouplers,chunksize=self.__chunkSize):
if res[0] is not None and res[1] is None and res[2] is None:
self.__iterablesOfNoneZeroCoverageCell.append(res[0])
elif res[1] is not None and res[0] is None and res[2] is None:
self.__iterablesOfZeroCoverageCell.append(res[1])
elif res[2] is not None and res[0] is None and res[1] is None:
self.__iterablesOfNoDataCells.append(res[2])
else:
raise Exception (f"WTF.")
DecoupleGridCellsProfilerLoopsPool.pool.join()
assert len(self.__iterablesOfNoneZeroCoverageCell)+len(self.__iterablesOfZeroCoverageCell)+len(self.__iterablesOfNoDataCells) == len(self.__listOfLoopDecouplers)
zeroCoverageCellsProcessingPool = ZeroCoverageCellsProcessingPool(self.__devModeForWSAWANTIVer2,self.__iterablesOfZeroCoverageCell)
zeroCoverageCellsProcessingPool.postTask()
def run(self,param:LoopDecoupler):
row = param.getRowValue()
col = param.getColValue()
elevationsTIFFWindowedSegmentContents = param.getElevationsTIFFWindowedSegment()
verticalStep = param.getVericalStep()
horizontalStep = param.getHorizontalStep()
mainTIFFImageDatasetContents = param.getMainTIFFImageDatasetContents()
NDVIsTIFFWindowedSegmentContentsInEPSG25832 = param.getNDVIsTIFFWindowedSegmentContentsInEPSG25832()
URLOrFilePathForElevationsTIFFDatasetInEPSG25832 = param.getURLOrFilePathForElevationsTIFFDatasetInEPSG25832()
threshold = param.getThreshold()
rowsCnt = 0
colsCnt = 0
pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt = 0
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = int(config['window']['width']) * int(config['window']['height'])
pixelsWithNoDataValueInTIFFImageDatasetCnt = int(config['window']['width']) * int(config['window']['height'])
_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell = []
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell = []
_pixelsValuesInNoDataCell = []
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel = None
gridCellInnerLoopsIteratorsForZeroCoverageModel = None
gridCellInnerLoopsIteratorsForNoDataCellsModel = None
for x in range(row,row + verticalStep):
if rowsCnt == verticalStep:
rowsCnt = 0
for y in range(col,col + horizontalStep):
if colsCnt == horizontalStep:
colsCnt = 0
pixelValue = mainTIFFImageDatasetContents[0][x][y]
# windowIOUtils.writeContentsToFile(windowIOUtils.getPathToOutputDir()+"/"+config['window']['file_name']+".{0}".format(config['window']['file_extension']), "pixelValue:{0}\n".format(pixelValue))
if pixelValue >= float(threshold):
pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt+=1
_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
elif ((pixelValue < float(threshold)) and (pixelValue > float(config['TIFF']['no_data_value']))):
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt-=1
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
elif (pixelValue <= float(config['TIFF']['no_data_value'])):
pixelsWithNoDataValueInTIFFImageDatasetCnt-=1
_pixelsValuesInNoDataCell.append(elevationsTIFFWindowedSegmentContents[0][rowsCnt][colsCnt])
else:
raise Exception ("WTF.Exception: unhandled condition for pixel value: {0}".format(pixelValue))
# _pixelCoordinatesInWindow.append([x,y])
colsCnt+=1
rowsCnt+=1
'''Grid-cell classfication'''
if (pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt > 0):
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel = GridCellInnerLoopsIteratorsForNoneZeroCoverageModel()
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setRowValue(row)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setColValue(col)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setPixelsValuesSatisfyThresholdInTIFFImageDatasetCnt(pixelsValuesSatisfyThresholdInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForNoneZeroCoverageModel.setPixelsValuesSatisfyThresholdInNoneZeroCoverageCell(_pixelsValuesSatisfyThresholdInNoneZeroCoverageCell)
elif (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt < (int(config['window']['width']) * int(config['window']['height'])) and pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt >= 0):
gridCellInnerLoopsIteratorsForZeroCoverageModel = GridCellInnerLoopsIteratorsForZeroCoverageModel()
gridCellInnerLoopsIteratorsForZeroCoverageModel.setRowValue(row)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setColValue(col)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt(pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsValuesDoNotSatisfyThresholdInZeroCoverageCell(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell)
elif (pixelsWithNoDataValueInTIFFImageDatasetCnt == 0):
gridCellInnerLoopsIteratorsForNoDataCellsModel = GridCellInnerLoopsIteratorsForNoDataCellsModel()
gridCellInnerLoopsIteratorsForNoDataCellsModel.setRowValue(row)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setColValue(col)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setVericalStep(verticalStep)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setHorizontalStep(horizontalStep)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setMainTIFFImageDatasetContents(mainTIFFImageDatasetContents)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setNDVIsTIFFWindowedSegmentContentsInEPSG25832(NDVIsTIFFWindowedSegmentContentsInEPSG25832)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setURLOrFilePathForElevationsTIFFDatasetInEPSG25832(URLOrFilePathForElevationsTIFFDatasetInEPSG25832)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
gridCellInnerLoopsIteratorsForNoDataCellsModel.setPixelsValuesInNoDataCell(_pixelsValuesInNoDataCell)
if gridCellInnerLoopsIteratorsForZeroCoverageModel is not None:
gridCellInnerLoopsIteratorsForZeroCoverageModel.setPixelsWithNoDataValueInTIFFImageDatasetCnt(pixelsWithNoDataValueInTIFFImageDatasetCnt)
else:
raise Exception (f"WTF.")
return gridCellInnerLoopsIteratorsForNoneZeroCoverageModel,gridCellInnerLoopsIteratorsForZeroCoverageModel,gridCellInnerLoopsIteratorsForNoDataCellsModel
ZeroCoverageCellsProcessingPool:
def postTask(self):
self.__postTaskStartTime = time.time()
"""to collect results per each row
"""
resAllCellsForGridCellsClassifications = []
# NDVIs
resAllCellsForNDVITIFFDetailsForZeroCoverageCell = []
# area of coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell = []
# interception
resAllCellsForInterceptionForZeroCoverageCell = []
# fourCornersOfWindowInEPSG25832
resAllCellsForFourCornersOfWindowInEPSG25832ZeroCoverageCell = []
# outFromEPSG25832ToEPSG4326-lists
resAllCellsForOutFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
# fourCornersOfWindowsAsGeoJSON
resAllCellsForFourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
# calculatedCenterPointInEPSG25832
resAllCellsForCalculatedCenterPointInEPSG25832ForZeroCoverageCell = []
# centerPointsOfWindowInImageCoordinatesSystem
resAllCellsForCenterPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = []
# pixelValuesOfCenterPoints
resAllCellsForPixelValuesOfCenterPointsForZeroCoverageCell = []
# centerPointOfKeyWindowAsGeoJSONInEPSG4326
resAllCellsForCenterPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = []
# centerPointInEPSG4326
resAllCellsForCenterPointInEPSG4326ForZeroCoveringCell = []
# average heights
resAllCellsForAverageHeightsForZeroCoverageCell = []
# pixels values
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCell = []
# area Of Coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell = []
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = []
noneKeyWindowCnt=0
# center points as string
centerPointsAsStringForZeroCoverageCell = ""
with Pool(processes=int(config['MULTIPROCESSING']['proceses_count'])) as ZeroCoverageCellsProcessingPool.pool:
self.__chunkSize = PoolUtils.getChunkSize(lst=self.__iterables,cpuCount=int(config['MULTIPROCESSING']['cpu_count']))
logger.info(f"ZeroCoverageCellsProcessingPool.self.__chunkSize(task per processor):{self.__chunkSize}")
for res in ZeroCoverageCellsProcessingPool.pool.map(func=self.run,iterable=self.__iterables,chunksize=self.__chunkSize):
resAllCellsForGridCellsClassifications.append(res[0])
# NDVIs
resAllCellsForNDVITIFFDetailsForZeroCoverageCell.append(res[1])
# area of coverage
resAllCellsForAreaOfCoverageForZeroCoverageCell.append(res[2])
# interception
resAllCellsForInterceptionForZeroCoverageCell.append(res[3])
# fourCornersOfWindowInEPSG25832
resAllCellsForFourCornersOfWindowInEPSG25832ZeroCoverageCell.append(res[4])
# outFromEPSG25832ToEPSG4326-lists
resAllCellsForOutFromEPSG25832ToEPSG4326ForZeroCoverageCells.append(res[5])
# fourCornersOfWindowsAsGeoJSONInEPSG4326
resAllCellsForFourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell.append(res[6])
# calculatedCenterPointInEPSG25832
resAllCellsForCalculatedCenterPointInEPSG25832ForZeroCoverageCell.append(res[7])
# centerPointsOfWindowInImageCoordinatesSystem
resAllCellsForCenterPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell.append(res[8])
# pixelValuesOfCenterPoints
resAllCellsForPixelValuesOfCenterPointsForZeroCoverageCell.append(res[9])
# centerPointInEPSG4326
resAllCellsForCenterPointInEPSG4326ForZeroCoveringCell.append(res[10])
# centerPointOfKeyWindowAsGeoJSONInEPSG4326
resAllCellsForCenterPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell.append(res[11])
# average heights
resAllCellsForAverageHeightsForZeroCoverageCell.append(res[12])
# pixels values
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCell.append(res[13])
# pixelsValues cnt
resAllCellsForPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt.append(res[14])
noneKeyWindowCnt +=res[15]
# centerPoints-As-String
if (res[16] is not None):
centerPointsAsStringForZeroCoverageCell+=str(res[16])
assert noneKeyWindowCnt == len(self.__iterables)
ZeroCoverageCellsProcessingPool.pool.close()
ZeroCoverageCellsProcessingPool.pool.terminate()
ZeroCoverageCellsProcessingPool.pool.join()
return
def run(self,params:GridCellInnerLoopsIteratorsForZeroCoverageModel):
if params is not None:
logger.info(f"Processing zero coverage cell #(row{params.getRowValue()},col:{params.getColValue()})")
row = params.getRowValue()
col = params.getColValue()
mainTIFFImageDatasetContents = params.getMainTIFFImageDatasetContents()
NDVIsTIFFWindowedSegmentContentsInEPSG25832 = params.getNDVIsTIFFWindowedSegmentContentsInEPSG25832()
URLOrFilePathForElevationsTIFFDatasetInEPSG25832 = params.getURLOrFilePathForElevationsTIFFDatasetInEPSG25832()
datasetElevationsTIFFInEPSG25832 = rasterio.open(URLOrFilePathForElevationsTIFFDatasetInEPSG25832,'r')
_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell = params.getPixelsValuesDoNotSatisfyThresholdInZeroCoverageCell()
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = params.getPixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt()
countOfNoDataCells = params.getPixelsWithNoDataValueInTIFFImageDatasetCnt()
outFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
ndviTIFFDetailsForZeroCoverageCell = NDVITIFFDetails(None,None,None).getNDVIValuePer10mX10m()
"""area of coverage per grid-cell"""
areaOfCoverageForZeroCoverageCell = None
""""interception"""
interceptionForZeroCoverageCell = None
CntOfNDVIsWithNanValueInZeroCoverageCell = 0
fourCornersOfWindowInEPSG25832ZeroCoverageCell = None
outFromEPSG25832ToEPSG4326ForZeroCoverageCells = []
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell = []
calculatedCenterPointInEPSG25832ForZeroCoverageCell = None
centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = None
pixelValuesOfCenterPointsOfZeroCoverageCell = None
centerPointInEPSG4326ForZeroCoveringCell = None
centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = None
centerPointsAsStringForZeroCoverageCell = None
"""average heights"""
averageHeightsForZeroCoverageCell = None
gridCellClassifiedAs = GridCellClassifier.ZERO_COVERAGE_CELL.value
cntOfNoneKeyWindow = 1
ndviTIFFDetailsForZeroCoverageCell = NDVITIFFDetails(ulX=row//int(config['ndvi']['resolution_height']),ulY=col//int(config['ndvi']['resolution_width']),dataset=NDVIsTIFFWindowedSegmentContentsInEPSG25832).getNDVIValuePer10mX10m()
"""area of coverage per grid-cell"""
areaOfCoverageForZeroCoverageCell = round(AreaOfCoverageDetails(pixelsCount=(int(config['window']['width']) * int(config['window']['height'])) - (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt + countOfNoDataCells)).getPercentageOfAreaOfCoverage(),2)
""""interception"""
if math.isnan(ndviTIFFDetailsForZeroCoverageCell):
# ndviTIFFDetailsForZeroCoverageCell = 0
CntOfNDVIsWithNanValueInZeroCoverageCell = 1
interceptionForZeroCoverageCell = config['sentinel_values']['interception']
else:
Indvi = INDVI()
Ic = Indvi.calcInterception(ndviTIFFDetailsForZeroCoverageCell)
Pc=areaOfCoverageForZeroCoverageCell,"""percentage of coverage"""
Pnc=float((int(config['window']['width'])*int(config['window']['height'])) - areaOfCoverageForZeroCoverageCell),"""percentage of non-coverage"""
Inc=float(config['interception']['noneCoverage']),"""interception of none-coverage"""
I=(float(Pc[0])*(Ic))+float((Pnc[0]*Inc[0]))
interceptionForZeroCoverageCell = round(I,2)
if I != 10 and I != float('nan'):
logger.error(f"ndviTIFFDetailsForZeroCoverageCell:{ndviTIFFDetailsForZeroCoverageCell}")
logger.error(f"I:{I}")
fourCornersOfWindowInEPSG25832ZeroCoverageCell = RasterIOPackageUtils.convertFourCornersOfWindowFromImageCoordinatesToCRSByCoordinatesOfCentersOfPixelsMethodFor(row,col,int(config['window']['height']),int(config['window']['width']),datasetElevationsTIFFInEPSG25832)
for i in range(0,len(fourCornersOfWindowInEPSG25832ZeroCoverageCell)):
# fourCornersOfKeyWindowInEPSG4326.append(RasterIOPackageUtils.convertCoordsToDestEPSGForDataset(fourCornersOfWindowInEPSG25832[i],datasetElevationsTIFFInEPSG25832,destEPSG=4326))
outFromEPSG25832ToEPSG4326ForZeroCoverageCells.append(OSGEOUtils.fromEPSG25832ToEPSG4326(fourCornersOfWindowInEPSG25832ZeroCoverageCell[i])) # resultant coords order is in form of lat,lon and it must be in lon,lat.thus, out[1]-lat out[0]-lon
"""fourCornersOfWindowsAsGeoJSONInEPSG4326"""
fourCornersOfWindowInEPSG4326 = []
for i in range(0,len(outFromEPSG25832ToEPSG4326ForZeroCoverageCells)):
fourCornersOfWindowInEPSG4326.append(([outFromEPSG25832ToEPSG4326ForZeroCoverageCells[i][1]],[outFromEPSG25832ToEPSG4326ForZeroCoverageCells[i][0]]))
fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell.append(jsonUtils.buildFeatureCollectionAsGeoJSONForFourCornersOfKeyWindow(fourCornersOfWindowInEPSG4326[0],fourCornersOfWindowInEPSG4326[1],fourCornersOfWindowInEPSG4326[2],fourCornersOfWindowInEPSG4326[3],areaOfCoverageForZeroCoverageCell))
# debugIOUtils.writeContentsToFile(debugIOUtils.getPathToOutputDir()+"/"+"NDVIsPer10mX10mForKeyWindow"+config['window']['file_name']+".{0}".format(config['window']['file_extension']),"{0}\n".format(NDVIsPer10mX10mForKeyWindow))
"""
building geojson object for a point "center-point" to visualize it.
"""
calculatedCenterPointInEPSG25832ForZeroCoverageCell = MiscUtils.calculateCenterPointsGivenLLOfGridCell(fourCornersOfWindowInEPSG25832ZeroCoverageCell[1])#lower-left corner
centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell = RasterIOPackageUtils.convertFromCRSToImageCoordinatesSystemFor(calculatedCenterPointInEPSG25832ForZeroCoverageCell[0],calculatedCenterPointInEPSG25832ForZeroCoverageCell[1],datasetElevationsTIFFInEPSG25832)
pixelValuesOfCenterPointsOfZeroCoverageCell = mainTIFFImageDatasetContents[0][centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell[0]][centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell[1]]
centerPointInEPSG4326ForZeroCoveringCell = RasterIOPackageUtils.convertCoordsToDestEPSGForDataset(calculatedCenterPointInEPSG25832ForZeroCoverageCell,datasetElevationsTIFFInEPSG25832,destEPSG=4326)
centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell = jsonUtils.buildGeoJSONForPointFor(centerPointInEPSG4326ForZeroCoveringCell)
"""average heights"""
averageHeightsForZeroCoverageCell = round(MiscUtils.getAverageFor(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell),2)
assert len(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell) > 0 and (len(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell) <= (int(config['window']['width']) * int(config['window']['height'])) )
"""the following code block is for assertion only"""
if self.__devModeForWSAWANTIVer2 == config['DEVELOPMENT_MODE']['debug']:
assert pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt >= 0 and (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt < (int(config['window']['width']) * int(config['window']['height'])) )
assert (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt+countOfNoDataCells) == (int(config['window']['width']) * int(config['window']['height']))
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt:{pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt}")
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>countOfNoDataCells:{countOfNoDataCells}")
pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt = (int(config['window']['width']) * int(config['window']['height'])) - (pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt + countOfNoDataCells)
assert pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt == 0, (f"WTF.")
print(f"profiling for gridCellClassifiedAs:{gridCellClassifiedAs}....>computed pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt:{pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt}")
print(f"\n")
centerPointAsTextInWKTInEPSG3857 = CoordinatesUtils.buildWKTPointFormatForSinglePointFor(calculatedCenterPointInEPSG25832ForZeroCoverageCell[0],calculatedCenterPointInEPSG25832ForZeroCoverageCell[1])
s = centerPointAsTextInWKTInEPSG3857.replace("POINT","")
s = s.replace("(","")
s = s.replace(")","")
s = s.strip()
s = s.split(" ")
centerPointsAsStringForZeroCoverageCell = s[0] + "\t" + s[1] + "\n"
centerPointsAsStringForZeroCoverageCell = centerPointsAsStringForZeroCoverageCell.replace('\'',"")
return gridCellClassifiedAs,ndviTIFFDetailsForZeroCoverageCell,areaOfCoverageForZeroCoverageCell,interceptionForZeroCoverageCell,fourCornersOfWindowInEPSG25832ZeroCoverageCell,outFromEPSG25832ToEPSG4326ForZeroCoverageCells,fourCornersOfWindowsAsGeoJSONInEPSG4326ForZeroCoverageCell,calculatedCenterPointInEPSG25832ForZeroCoverageCell,centerPointsOfWindowInImageCoordinatesSystemForZeroCoverageCell,pixelValuesOfCenterPointsOfZeroCoverageCell,centerPointInEPSG4326ForZeroCoveringCell,centerPointOfKeyWindowAsGeoJSONInEPSG4326ForZeroCoverageCell,averageHeightsForZeroCoverageCell,np.array(_pixelsValuesDoNotSatisfyThresholdInZeroCoverageCell).tolist(),pixelsValuesDoNotSatisfyThresholdInTIFFImageDatasetCnt,cntOfNoneKeyWindow,centerPointsAsStringForZeroCoverageCell,CntOfNDVIsWithNanValueInZeroCoverageCell

Related

How to optimize the finding of divergences between 2 signals

I am trying to create an indicator that will find all the divergences between 2 signals.
The output of the function so far looks like this
But the problem is that is painfully slow when I am trying to use it with long signals. Could any of you guys help me to make it faster if is possible?
My code:
def find_divergence(price: pd.Series, indicator: pd.Series, width_divergence: int, order: int):
div = pd.DataFrame(index=range(price.size), columns=[
f"Bullish_{width_divergence}_{order}",
f"Berish_{width_divergence}_{order}"
])
div[f'Bullish_idx_{width_divergence}_{order}'] = False
div[f'Berish_idx_{width_divergence}_{order}'] = False
def calc_argrelextrema(price_: np.numarray):
return argrelextrema(price_, np.less_equal, order=order)[0]
price_ranges = []
for i in range(len(price)):
price_ranges.append(price.values[0:i + 1])
f = []
with ThreadPoolExecutor(max_workers=16) as exe:
for i in price_ranges:
f.append(exe.submit(calc_argrelextrema, i))
prices_lows = SortedSet()
for r in concurrent.futures.as_completed(f):
data = r.result()
for d in reversed(data):
if d not in prices_lows:
prices_lows.add(d)
else:
break
price_lows_idx = pd.Series(prices_lows)
for idx_1 in range(price_lows_idx.size):
min_price = price[price_lows_idx[idx_1]]
min_indicator = indicator[price_lows_idx[idx_1]]
for idx_2 in range(idx_1 + 1, idx_1 + width_divergence):
if idx_2 >= price_lows_idx.size:
break
if price[price_lows_idx[idx_2]] < min_price:
min_price = price[price_lows_idx[idx_2]]
if indicator[price_lows_idx[idx_2]] < min_indicator:
min_indicator = indicator[price_lows_idx[idx_2]]
consistency_price_rd = min_price == price[price_lows_idx[idx_2]]
consistency_indicator_rd = min_indicator == indicator[price_lows_idx[idx_1]]
consistency_price_hd = min_price == price[price_lows_idx[idx_1]]
consistency_indicator_hd = min_indicator == indicator[price_lows_idx[idx_2]]
diff_price = price[price_lows_idx[idx_1]] - price[price_lows_idx[idx_2]] # should be neg
diff_indicator = indicator[price_lows_idx[idx_1]] - indicator[price_lows_idx[idx_2]] # should be pos
is_regular_divergence = diff_price > 0 and diff_indicator < 0
is_hidden_divergence = diff_price < 0 and diff_indicator > 0
if is_regular_divergence and consistency_price_rd and consistency_indicator_rd:
div.at[price_lows_idx[idx_2], f'Bullish_{width_divergence}_{order}'] = (price_lows_idx[idx_1], price_lows_idx[idx_2])
div.at[price_lows_idx[idx_2], f'Bullish_idx_{width_divergence}_{order}'] = True
elif is_hidden_divergence and consistency_price_hd and consistency_indicator_hd:
div.at[price_lows_idx[idx_2], f'Berish_{width_divergence}_{order}'] = (price_lows_idx[idx_1], price_lows_idx[idx_2])
div.at[price_lows_idx[idx_2], f'Berish_idx_{width_divergence}_{order}'] = True
return div

Python returns local variable referenced before error

I edit this post for your comments. Thank you :-)
The prev_fs_cell is the variable whose value can be nan or str. (ex. nan <-> "1,244,234" )
If prev_fs_cell is nan, I want not to process self._strat(self, curr_year), but it has an error...
## GLOBAL & API ###
STOCK_START="2015.01.01"
FS_START="2014.01.01"
END="2021.09.01"
SHORT=10
LONG=60
CURR_YEAR=2021
API_key=dart_config.API_key
DART=OpenDartReader(API_key)
account_nm_list=["유동자산","비유동자산","유동부채","비유동부채","자산총계","부채총계","매출액","영업이익","당기순이익"]
KOSPI_stock_code=stock.get_market_ticker_list(market="KOSPI")
class Strategy():
def __init__(self):
self.buy_signal=pd.DataFrame(columns=['open','unit'])
self.sell_signal = pd.DataFrame(columns=['open', 'unit'])
self.trade = pd.DataFrame(columns=['stock', 'cash'])
self.position=0
self.unit=1
self.cash=100000000 # 1억
def set_data(self, indicator_data, finance_data):
self.indicator_data=indicator_data
self.indicator_data.rename(columns={self.indicator_data.columns[0]:'date'}, inplace=True)
self.indicator_data = self.indicator_data.set_index('date')
self.indicator_data.index = pd.to_datetime(self.indicator_data.index, format="%Y-%m-%d")
self.fs_data=finance_data
self.fs_data.rename(columns={self.fs_data.columns[0]:'year'}, inplace=True)
self.fs_data = self.fs_data.set_index('year')
self.min_year=int(self.fs_data.index.min()) # str type
def _buy(self, row):
if (row['open']*self.unit) <= self.cash:
new_buy_row = pd.Series([row['open'], self.unit], index = self.buy_signal.columns, name=str(row.name))
self.buy_signal = self.buy_signal.append(new_buy_row)
self.position += self.unit
stock_amt = self.position * row['open']
self.cash -= row['open']*self.unit
new_trade_row = pd.Series([stock_amt, self.cash], index = self.trade.columns, name = str(row.name))
self.trade = self.trade.append(new_trade_row)
def _sell(self, row):
new_sell_row = pd.Series([row['open'], int(self.position/4)+1], index = self.sell_signal.columns, name=str(row.name))
self.sell_signal = self.sell_signal.append(new_sell_row)
self.position -= int(self.position/4)+1
stock_amt = self.position * row['open']
self.cash += row['open']*self.unit
new_trade_row = pd.Series([stock_amt, self.cash], index = self.trade.columns, name = str(row.name))
self.trade = self.trade.append(new_trade_row)
def _strat(self, row, curr_year):
fs = self.fs_data
prev_year = curr_year - 1
curr_rev = int(fs.loc[curr_year, '매출액'].replace(",",""))
prev_rev = int(fs.loc[prev_year, '매출액'].replace(",",""))
rev_growth=(curr_rev-prev_rev)/prev_rev
curr_ni = int(fs.loc[curr_year, '당기순이익'].replace(",",""))
prev_ni = int(fs.loc[prev_year, '당기순이익'].replace(",",""))
ni_growth=(curr_ni-prev_ni)/prev_ni
curr_asset = int(fs.loc[curr_year, '유동자산'].replace(",",""))
noncurr_asset = int(fs.loc[prev_year, "비유동자산"].replace(",",""))
curr_asset_rat = curr_asset / noncurr_asset
if (row.rsi<0.65) & (rev_growth>0.005) & (1.3< curr_asset_rat):# & (curr_asset_rat<2.3):
self._buy(row)
elif (row.Golden == False):
if ni_growth <= 0.001 :
if self.position:
self._sell(row)
# a=1
def run(self):
dates = self.indicator_data.index
fs = self.fs_data
#print(fs.index)
for date in dates:
curr_year = date.year
row = self.indicator_data.loc[date]
#print(curr_year, type(curr_year))
#pdb.set_trace()
try:
curr_fs_cell = fs.loc[curr_year].iloc[0].replace(",","")
try:
prev_fs_cell = fs.loc[curr_year-1].iloc[0].replace(",","")
except:
prev_fs_cell = None
except:
curr_fs_cell = None
if (curr_fs_cell == None) | (prev_fs_cell == None):
#print("fs data is empty")
continue
else:
#print(prev_fs_cell)
self._strat(row, curr_year)
for code in KOSPI_stock_code:
FS = load_data("FS_"+code)
indi = load_data("indicator_"+code)
today = dt.today()
strategy = Strategy()
strategy.set_data(indi, FS)
strategy.run()
buy = strategy.buy_signal
sell = strategy.sell_signal
unit = strategy.unit
remain_stock = buy['unit'].sum() - sell['unit'].sum()
remain = int(get_data(str(code)+".KS", today).iloc[0]['open'])*int(remain_stock)
total_buy = int((buy['open'].sum()))*unit
total_sell = int(sell['open'].sum())*unit
profit = int(remain) + int(total_sell) - int(total_buy)
if total_buy:
return_rate = profit / total_buy
trade = strategy.trade
total_return_per_day = trade['stock']+trade['cash']
residual = total_return_per_day - return_rate
sample_var = residual**2 / (trade.shape[0]-1)
sample_dev = np.sqrt(sample_var)
Rf=0.01
sharp = (return_rate - Rf) / (sample_dev)
results[code]['return'] = return_rate
results[code]['sharp'] = sharp
else:
print("No buy due to strict condition")
I have tried to make backtest code for investing into Korean stocks by using financial sheet and stock price sheet and indicator sheet.
And my code return error like the below.
UnboundLocalError Traceback (most recent call last)
<ipython-input-13-caf2b218f860> in <module>()
10 strategy = Strategy()
11 strategy.set_data(indi, FS)
---> 12 strategy.run()
13
14 buy = strategy.buy_signal
<ipython-input-12-2d41db386a22> in run(self)
84 curr_fs_cell = None
85
---> 86 if (curr_fs_cell == None) | (prev_fs_cell == None):
87 #print("fs data is empty")
88 continue
UnboundLocalError: local variable 'prev_fs_cell' referenced before assignment
Actually there is no global variable whose name is prev_fs_cell, but it is only in that class. Why this error occurs?

Confidence score of answer extracted using ELMo BiDAF model and AllenNLP

I'm working on a Deep Learning project where I use a bidirectional attention flow model (allennlp pretrained model)to make a question answering system.It uses squad dataset.The bidaf model extracts the answer span from paragraph.Is there any way to determine the confidence score(accuracy)or any other metrics of the answer extracted by the model?
I have used the subcommand evaluate from the allennlp package but it determines only score of the model after testing.I was hoping there is a much easier way to solve the issue using other such command.
Attaching the code and the terminal output below.
from rake_nltk import Rake
from string import punctuation
from nltk.corpus import stopwords
from allennlp.predictors.predictor import Predictor
import spacy
import wikipedia
import re
import requests
from requests_html import HTMLSession
from bs4 import BeautifulSoup
import traceback
from nltk.stem import SnowballStemmer
from nltk.util import ngrams
from math import log10
from flask import Flask, request, jsonify, render_template
from gevent.pywsgi import WSGIServer
import time
import multiprocessing as mp
from gtts import gTTS
import os
NLP = spacy.load('en_core_web_md')
stop = stopwords.words('english')
symbol = r"""!#$%^&*();:\n\t\\\"!\{\}\[\]<>-\?"""
stemmer = SnowballStemmer('english')
wikipedia.set_rate_limiting(True)
session = HTMLSession()
results = 5
try:
predictor = Predictor.from_path("bidaf-model-2017.09.15-charpad.tar.gz")
except:
predictor = Predictor.from_path("https://storage.googleapis.com/allennlp-public-models/bidaf-elmo-model-2018.11.30-charpad.tar.gz")
try:
srl = Predictor.from_path('srl-model-2018.05.25.tar.gz')
except:
srl = Predictor.from_path('https://s3-us-west-2.amazonaws.com/allennlp/models/bert-base-srl-2019.06.17.tar.gz')
key = Rake(min_length=1, stopwords=stop, punctuations=punctuation, max_length=6)
wh_words = "who|what|how|where|when|why|which|whom|whose|explain".split('|')
stop.extend(wh_words)
session = HTMLSession()
output = mp.Queue()
def termFrequency(term, doc):
normalizeTermFreq = re.sub('[\[\]\{\}\(\)]', '', doc.lower()).split()
normalizeTermFreq = [stemmer.stem(i) for i in normalizeTermFreq]
dl = len(normalizeTermFreq)
normalizeTermFreq = ' '.join(normalizeTermFreq)
term_in_document = normalizeTermFreq.count(term)
#len_of_document = len(normalizeTermFreq )
#normalized_tf = term_in_document / len_of_document
normalized_tf = term_in_document
return normalized_tf, normalizeTermFreq, dl#, n_unique_term
def inverseDocumentFrequency(term, allDocs):
num_docs_with_given_term = 0
for doc in allDocs:
if term in doc:
num_docs_with_given_term += 1
if num_docs_with_given_term > 0:
total_num_docs = len(allDocs)
idf_val = log10(((total_num_docs+1) / num_docs_with_given_term))
term_split = term.split()
if len(term_split) == 3:
if len([term_split[i] for i in [0, 2] if term_split[i] not in stop]) == 2:
return idf_val*1.5
return idf_val
return idf_val
else:
return 0
def sent_formation(question, answer):
tags_doc = NLP(question)
tags_doc_cased = NLP(question.title())
tags_dict_cased = {i.lower_:i.pos_ for i in tags_doc_cased}
tags_dict = {i.lower_:i.pos_ for i in tags_doc}
question_cased = []
for i in question[:-1].split():
if tags_dict[i] == 'PROPN' or tags_dict[i] == 'NOUN':
question_cased.append(i.title())
else:
question_cased.append(i.lower())
question_cased.append('?')
question_cased = ' '.join(question_cased)
#del tags_dict,tags_doc, tags_doc_cased
pre = srl.predict(question_cased)
verbs = []
arg1 = []
for i in pre['verbs']:
verbs.append(i['verb'])
if 'B-ARG1' in i['tags']:
arg1.append((i['tags'].index('B-ARG1'), i['tags'].count('I-ARG1'))\
if not pre['words'][i['tags'].index('B-ARG1')].lower() in wh_words else \
(i['tags'].index('B-ARG2'), i['tags'].count('I-ARG2')))
arg1 = arg1[0] if arg1 else []
if not arg1:
verb_idx = pre['verbs'][0]['tags'].index('B-V')
verb = pre['words'][verb_idx] if pre['words'][verb_idx] != answer.split()[0].lower() else ''
subj_uncased = pre['words'][verb_idx+1:] if pre['words'][-1] not in symbol else \
pre['words'][verb_idx+1:-1]
else:
verb = ' '.join(verbs)
subj_uncased = pre['words'][arg1[0]:arg1[0]+arg1[1]+1]
conj = ''
if question.split()[0].lower() == 'when':
conj = ' on' if len(answer.split()) > 1 else ' in'
subj = []
for n, i in enumerate(subj_uncased):
if tags_dict_cased[i.lower()] == 'PROPN' and tags_dict[i.lower()] != 'VERB' or n == 0:
subj.append(i.title())
else:
subj.append(i.lower())
subj[0] = subj[0].title()
print(subj)
print(pre)
subj = ' '.join(subj)
sent = "{} {}{} {}.".format(subj, verb, conj, answer if answer[-1] != '.' else answer[:-1])
return sent
class extractAnswer:
def __init__(self):
self.wiki_error = (wikipedia.exceptions.DisambiguationError,
wikipedia.exceptions.HTTPTimeoutError,
wikipedia.exceptions.WikipediaException)
self.article_title = None
# symbol = """!#$%^&*();:\n\t\\\"!\{\}\[\]<>-\?"""
def extractAnswer_model(self, passage, question, s=0.4, e=0.3, wiki=False):
if type(passage) == list:
passage = " ".join(passage)
if not question[-1] == '?':
question = question+'?'
pre = predictor.predict(passage=passage, question=question)
if wiki:
if max(pre['span_end_probs']) > 0.5:
s = 0.12
elif max(pre['span_end_probs']) > 0.4:
s = 0.13
elif max(pre['span_end_probs']) > 0.35:
s = 0.14
if max(pre['span_start_probs']) > 0.5:
e = 0.12
elif max(pre['span_start_probs']) > 0.4:
e = 0.14
elif max(pre['span_start_probs']) > 0.3:
e = 0.15
if max(pre['span_start_probs']) > s and max(pre['span_end_probs']) > e:
key.extract_keywords_from_text(question)
ques_key = [stemmer.stem(i) for i in ' '.join(key.get_ranked_phrases())]
key.extract_keywords_from_text(passage)
pass_key = [stemmer.stem(i) for i in ' '.join(key.get_ranked_phrases())]
l = len(ques_key)
c = 0
for i in ques_key:
if i in pass_key:
c += 1
if c >= l/2:
print(max(pre['span_start_probs']),
max(pre['span_end_probs']))
if wiki:
return pre['best_span_str'], max(pre['span_start_probs']) + max(pre['span_end_probs'])
try:
ans = sent_formation(question, pre['best_span_str'])
except:
ans = pre['best_span_str']
print(traceback.format_exc())
return ans
print(ques_key, c, l)
print(max(pre['span_start_probs']), max(pre['span_end_probs']))
return 0, 0
else:
print(max(pre['span_start_probs']), max(pre['span_end_probs']), pre['best_span_str'])
return 0, 0
def wiki_search_api(self, query):
article_list = []
try:
article_list.extend(wikipedia.search(query, results=results))
print(article_list)
return article_list
except self.wiki_error:
params = {'search': query, 'profile': 'engine_autoselect',
'format': 'json', 'limit': results}
article_list.extend(requests.get('https://en.wikipedia.org/w/api.php?action=opensearch',
params=params).json()[1])
return article_list
except:
print('Wikipedia search error!')
print(traceback.format_exc())
return 0
def wiki_passage_api(self, article_title, article_list, output):
# Disambiguation_title = {}
try:
passage = wikipedia.summary(article_title)
output.put((article_title, self.passage_pre(passage)))
except wikipedia.exceptions.DisambiguationError as e:
print(e.options[0], e.options)
Disambiguation_pass = {}
for p in range(2 if len(e.options) > 1 else len(e.options)):
params = {'search':e.options[p], 'profile':'engine_autoselect', 'format':'json'}
article_url = requests.get('https://en.wikipedia.org/w/api.php?action=opensearch',
params=params).json()
if not article_url[3]:
continue
article_url = article_url[3][0]
r = session.get(article_url)
soup = BeautifulSoup(r.html.raw_html)
print(soup.title.string)
article_title_dis = soup.title.string.rsplit('-')[0].strip()
if article_title_dis in article_list:
print('continue')
continue
try:
url = "https://en.wikipedia.org/w/api.php?format=json&action=query&prop=extracts&exintro&explaintext&redirects=1&titles={}".format(article_title_dis)
passage = requests.get(url).json()['query']['pages']
for i in passage.keys():
if 'extract' in passage[i]:
Disambiguation_pass[article_title_dis] = self.passage_pre(passage[i]['extract'])
except wikipedia.exceptions.HTTPTimeoutError:
passage = wikipedia.summary(article_title_dis)
Disambiguation_pass[article_title_dis] = self.passage_pre(passage)
except:
Disambiguation_pass[article_title_dis] = ''
continue
output.put((article_title, Disambiguation_pass))
except:
output.put((article_title, ''))
print(traceback.format_exc())
def sorting(self, article, question, topic):
processes = [mp.Process(target=self.wiki_passage_api, args=(article[x], article, output))\
for x in range(len(article))]
for p in processes:
p.start()
for p in processes:
p.join(timeout=3)
results_p = [output.get() for p in processes]
article_list = []
passage_list = []
for i, j in results_p:
if type(j) != dict and j:
article_list.append(i)
passage_list.append(j)
elif type(j) == dict and j:
for k, l in j.items():
if l:
article_list.append(k)
passage_list.append(l)
normalize_passage_list = []
start = time.time()
keywords = " ".join(self.noun+self.ques_key+[topic.lower()])
keywords = re.sub('[{0}]'.format(symbol), ' ', keywords).split()
question = question+' '+topic
ques_tokens = [stemmer.stem(i.lower()) for i in question.split() \
if i.lower() not in wh_words]
print(ques_tokens)
keywords_bigram = [' '.join(i) for i in list(ngrams(ques_tokens, 2)) \
if i[0] not in stop and i[1] not in stop]
if len(ques_tokens) > 3:
keywords_trigram = [' '.join(i) for i in list(ngrams(ques_tokens, 3)) \
if (i[0] in stop) + (i[2] in stop) + (i[1] in stop) < 3]
else:
keywords_trigram = []
if len(ques_tokens) > 5:
keywords_4gram = [' '.join(i) for i in list(ngrams(ques_tokens, 4)) \
if (i[0] in stop) + (i[2] in stop) +(i[1] in stop)+(i[3] in stop) < 4]
else:
keywords_4gram = []
keywords_unigram = list(set([stemmer.stem(i.lower()) for i in keywords \
if i.lower() not in stop]))
keywords = keywords_unigram+list(set(keywords_bigram))+keywords_trigram+keywords_4gram
tf = []
if not passage_list:
return 0
pass_len = []
#n_u_t=[]
#key_dict = {i: keywords.count(i) for i in keywords}
print('Extraction complete')
#remove_pass={}
#for n,i in enumerate(passage_list):
#if len(i)<200 or not i:
#remove_pass[article_list[n]]=i
#print(n, article_list[n])
#passage_list=[i for i in passage_list if i not in remove_pass.values()]
#article_list=[i for i in article_list if i not in remove_pass.keys()]
passage_list_copy = passage_list.copy()
article_list_copy = article_list.copy()
for i in range(len(passage_list_copy)):
if passage_list.count(passage_list_copy[i]) > 1:
passage_list.remove(passage_list_copy[i])
article_list.remove(article_list_copy[i])
print('Copy:', article_list_copy[i])
del passage_list_copy
del article_list_copy
for n, i in enumerate(passage_list):
temp_tf = {}
c = 0
for j in keywords:
temp_tf[j], temp_pass, temp_len = termFrequency(j, i + ' ' + article_list[n])
if temp_tf[j]:
c += 1
normalize_passage_list.append(temp_pass)
pass_len.append(temp_len)
temp_tf['key_match'] = c
tf.append(temp_tf)
print(pass_len)
print(keywords)
idf = {}
for i in keywords:
idf[i] = inverseDocumentFrequency(i, normalize_passage_list)
#print(tf, idf)
tfidf = []
#b=0.333 #for PLN
b, k = 0.75, 1.2 #for BM25
avg_pass_len = sum(pass_len)/len(pass_len)
#pivot=sum(n_u_t)/len(n_u_t)
for n, i in enumerate(tf):
tf_idf = 0
#avg_tf=sum(i.values())/len(i)
key_match_ratio = i['key_match']/len(keywords)
for j in keywords:
#tf_idf+=idf[j]*((log(1+log(1+i[j])))/(1-b+(b*pass_len[n]/avg_pass_len))) #PLN
tf_idf += idf[j]*(((k+1)*i[j])/(i[j]+k*(1-b+(b*pass_len[n]/avg_pass_len)))) #BM25
tfidf.append(tf_idf*key_match_ratio)
tfidf = [i/sum(tfidf)*100 for i in tfidf if any(tfidf)]
if not tfidf:
return 0, 0, 0, 0, 0
print(tfidf)
print(article_list, len(passage_list))
if len(passage_list) > 1:
sorted_tfidf = sorted(tfidf, reverse=1)
idx1 = tfidf.index(sorted_tfidf[0])
passage1 = passage_list[idx1]
#article_title=
tfidf1 = sorted_tfidf[0]
idx2 = tfidf.index(sorted_tfidf[1])
passage2 = passage_list[idx2]
article_title = (article_list[idx1], article_list[idx2])
tfidf2 = sorted_tfidf[1]
else:
article_title = 0
tfidf2 = 0
if passage_list:
passage1 = passage_list[0]
tfidf1 = tfidf[0]
passage2 = 0
else:
passage1 = 0
passage2 = 0
tfidf1, tfidf2 = 0, 0
end = time.time()
print('TFIDF time:', end-start)
return passage1, passage2, article_title, tfidf1, tfidf2
def passage_pre(self, passage):
#passage=re.findall("[\da-zA-z\.\,\'\-\/\–\(\)]*", passage)
passage = re.sub('\n', ' ', passage)
passage = re.sub('\[[^\]]+\]', '', passage)
passage = re.sub('pronunciation', '', passage)
passage = re.sub('\\\\.+\\\\', '', passage)
passage = re.sub('{.+}', '', passage)
passage = re.sub(' +', ' ', passage)
return passage
def wiki(self, question, topic=''):
if not question:
return 0
question = re.sub(' +', ' ', question)
question = question.title()
key.extract_keywords_from_text(question)
self.ques_key = key.get_ranked_phrases()
doc = NLP(question)
self.noun = [str(i).lower() for i in doc.noun_chunks if str(i).lower() not in wh_words]
print(self.ques_key, self.noun)
question = re.sub('[{0}]'.format(symbol), ' ', question)
if not self.noun + self.ques_key:
return 0
article_list = None
question = question.lower()
if self.noun:
if len(self.noun) == 2 and len(" ".join(self.noun).split()) < 6:
#question1=question
self.noun = " ".join(self.noun).split()
if self.noun[0] in stop:
self.noun.pop(0)
self.noun = question[question.index(self.noun[0]):question.index(self.noun[-1]) \
+len(self.noun[-1])+1].split()
#del question1
print(self.noun)
article_list = self.wiki_search_api(' '.join(self.noun))
if self.ques_key and not article_list:
article_list = self.wiki_search_api(self.ques_key[0])
if not article_list:
article_list = self.wiki_search_api(' '.join(self.ques_key))
if not article_list:
print('Article not found on wikipedia.')
return 0, 0
article_list = list(set(article_list))
passage1, passage2, article_title, tfidf1, tfidf2 = self.sorting(article_list,
question, topic)
if passage1:
ans1, conf1 = self.extractAnswer_model(passage1, question, s=0.20, e=0.20, wiki=True)
else:
ans1, conf1 = 0, 0
if ans1:
conf2 = 0
if len(ans1) > 600:
print(ans1)
print('Repeat')
ans1, conf1 = self.extractAnswer_model(ans1, question, s=0.20, e=0.20, wiki=True)
threshhold = 0.3 if not ((tfidf1- tfidf2) <= 10) else 0.2
if round(tfidf1- tfidf2) < 5:
threshhold = 0
if (tfidf1- tfidf2) > 20:
threshhold = 0.35
if (tfidf1- tfidf2) > 50:
threshhold = 1
if (passage2 and conf1 < 1.5) or (tfidf1 - tfidf2) < 10:
ans2, conf2 = self.extractAnswer_model(passage2, question, s=0.20, e=0.20,
wiki=True) if passage2 else (0, 0)
title = 0
if round(conf1, 2) > round(conf2, 2) - threshhold:
print('ans1')
ans = ans1
title = article_title[0] if article_title else 0
else:
print('ans2')
title = article_title[1] if article_title else 0
ans = ans2
if not question[-1] == '?':
question = question+'?'
try:
ans = sent_formation(question, ans)
except:
print(traceback.format_exc())
print(ans, '\n', '\n', article_title)
return ans, title
extractor = extractAnswer()
app = Flask(__name__)
#app.route("/", methods=["POST", "get"])
#app.route("/ans")
def ans():
start = time.time()
question = request.args.get('question')
topic = request.args.get('topic')
passage = request.args.get('passage')
if not question:
return render_template('p.html')
if not topic:
topic = ''
if passage:
answer = extractor.extractAnswer_model(passage, question)
else:
answer, title = extractor.wiki(question, topic)
end = time.time()
if answer:
mytext = str(answer)
language = 'en'
myobj = gTTS(text=mytext, lang=language, slow=False)
myobj.save("welcome.mp3")
# prevName = 'welcome.mp3'
#newName = 'static/welcome.mp3'
#os.rename(prevName,newName)
return render_template('pro.html', answer=answer)
else:
return jsonify(Status='E', Answer=answer, Time=end-start)
#app.route("/audio_del/", methods=["POST", "get"])
def audio_del():
return render_template('p.html');
#app.route("/audio_play/", methods=["POST", "get"])
def audio_play():
os.system("mpg321 welcome.mp3")
return render_template('white.html')
if __name__ == "__main__":
PORT = 7091
HTTP_SERVER = WSGIServer(('0.0.0.0', PORT), app)
print('Running on',PORT, '...')
HTTP_SERVER.serve_forever()
![Output in the terminal for a question I've asked](https://i.stack.imgur.com/6pyv5.jpg)
I came across a possible solution to this after deeply looking into the output returned by the model. Although this, is probably not something you can accurately rely on, it seemed to have done the task in my case:
Note that the text answer which is "best_span_str" is always a subarray of the passage. It spans the range which is stored in "best_span".
i.e., "best_span" contains the start and end index of the answer.
Now, the output data contains a property named "span_end_probs".
"span_end_probs" contains a list of values that correspond to all the words present in the text input.
If you look closely for various inputs, the value is always maximum at one of the indexes within the starting and ending range that "best_span" contains. This value seemed to be very similar to the confidence levels that we need. Let's call this value score. All you need to do now is to try some inputs and find a suitable method to use this score as a metric.
e.g.: if you need a threshold value for some application, you can try a number of test inputs and find a value that is most accurate. In my case, this was around 0.35.
i.e. if score is lesser than 0.35, it prints answer not found and if greater than or equal 0.35, prints string in "best_span_str".
Here's my code snippet:
from allennlp.predictors.predictor import Predictor
passage = '--INPUT PASSAGE--'
question = '--INPUT QUESTION--'
predictor = Predictor.from_path("https://storage.googleapis.com/allennlp-public-models/bidaf-elmo.2021-02-11.tar.gz")
output = predictor.predict(
passage = passage,
question = question
)
score = max(output["span_end_probs"])
if score < 0.35:
print('Answer not found')
else:
print(output["best_span_str"])
You can readily see the example input and output here.

Let the GUI continue working after crashing without having to reset device

My current GUI is to show target tracking from a TI processor to PC via UART. Sometimes, GUI crashes due to UART performance. But everytime I want to reset GUI, I also have to reset my TI device. So I want to modify python code so as to only re-open GUI, enter COM port without having to reset device whenever GUI crashes.
Currently, each time GUI crashes, even though I re-opened GUI but didn't reset device (power off & on), I can not continue tracking.
I put python code for GUI below.In this GUI python program, we have 2 blanks & button to enter UART COM port, 2 buttons to select & send configuration to device. Then it can begin tracking.
I'm considering to modify the part of sendCfg(self), but still not sure how to make it work as my purpose due to my lack of experience in Python & GUI design. Please help me with sample code for this task.
class Window(QDialog):
def __init__(self, parent=None, size=[]):
super(Window, self).__init__(parent)
#when running Gree Demo
self.Gree = 1
# set window toolbar options, and title. #deb_gp
self.setWindowFlags(
Qt.Window |
Qt.CustomizeWindowHint |
Qt.WindowTitleHint |
Qt.WindowMinimizeButtonHint |
Qt.WindowMaximizeButtonHint |
Qt.WindowCloseButtonHint
)
self.setWindowTitle("mmWave People Counting")
print('Python is ', struct.calcsize("P")*8, ' bit')
print('Python version: ', sys.version_info)
gridlay = QGridLayout()
gridlay.addWidget(self.comBox, 0,0,1,1)
gridlay.addWidget(self.statBox, 1,0,1,1)
gridlay.addWidget(self.configBox,2,0,1,1)
gridlay.addWidget(self.plotControlBox,3,0,1,1)
gridlay.addWidget(self.boxTab,4,0,1,1)
gridlay.addWidget(self.spBox,5,0,1,1)
gridlay.addWidget(self.graphTabs,0,1,6,1)
gridlay.addWidget(self.gw, 0, 2, 6, 1)
#gridlay.addWidget(self.demoData, 0,3,1,2)
#gridlay.addWidget(self.hPlot,1,3,4,2)
gridlay.setColumnStretch(0,1)
gridlay.setColumnStretch(1,3)
self.setLayout(gridlay)
#
# left side pane layout
#
def setConnectionLayout(self):
self.comBox = QGroupBox('Connect to Com Ports')
self.uartCom = QLineEdit('') #deb_gp
self.dataCom = QLineEdit('') #deb_gp
self.uartLabel = QLabel('UART COM:')
self.dataLabel = QLabel('DATA COM:')
self.connectStatus = QLabel('Not Connected')
self.connectButton = QPushButton('Connect')
self.connectButton.clicked.connect(self.connectCom)
self.configLabel = QLabel('Config Type:')
self.configType = QComboBox()
self.configType.addItems(["3D People Counting", "SDK Out of Box Demo", "Long Range People Detection", "Indoor False Detection Mitigation", "(Legacy) 2D People Counting", "(Legacy): Overhead People Counting"])
self.comLayout = QGridLayout()
self.comLayout.addWidget(self.uartLabel,0,0)
self.comLayout.addWidget(self.uartCom,0,1)
self.comLayout.addWidget(self.dataLabel,1,0)
self.comLayout.addWidget(self.dataCom,1,1)
self.comLayout.addWidget(self.configLabel,2,0)
self.comLayout.addWidget(self.configType,2,1)
self.comLayout.addWidget(self.connectButton,3,0)
self.comLayout.addWidget(self.connectStatus,3,1)
self.comBox.setLayout(self.comLayout)
def setConfigLayout(self):
self.configBox = QGroupBox('Configuration')
self.selectConfig = QPushButton('Select Configuration')
self.sendConfig = QPushButton('Send Configuration')
self.selectConfig.clicked.connect(self.selectCfg)
self.sendConfig.clicked.connect(self.sendCfg)
self.configTable = QTableWidget(5,2)
#set parameter names
self.configTable.setItem(0,0,QTableWidgetItem('Radar Parameter'))
self.configTable.setItem(0,1,QTableWidgetItem('Value'))
self.configTable.setItem(1,0,QTableWidgetItem('Max Range'))
self.configTable.setItem(2,0,QTableWidgetItem('Range Resolution'))
self.configTable.setItem(3,0,QTableWidgetItem('Max Velocity'))
self.configTable.setItem(4,0,QTableWidgetItem('Velcoity Resolution'))
self.configLayout = QVBoxLayout()
self.configLayout.addWidget(self.selectConfig)
self.configLayout.addWidget(self.sendConfig)
self.configLayout.addWidget(self.configTable)
#self.configLayout.addStretch(1)
self.configBox.setLayout(self.configLayout)
def updateGraph(self, parsedData):
updateStart = int(round(time.time()*1000))
self.useFilter = 0
classifierOutput = []
pointCloud = parsedData[0]
targets = parsedData[1]
indexes = parsedData[2]
numPoints = parsedData[3]
numTargets = parsedData[4]
self.frameNum = parsedData[5]
fail = parsedData[6]
classifierOutput = parsedData[7]
fallDetEn = 0
indicesIn = []
if (fail != 1):
#left side
pointstr = 'Points: '+str(numPoints)
targetstr = 'Targets: '+str(numTargets)
self.numPointsDisplay.setText(pointstr)
self.numTargetsDisplay.setText(targetstr)
#right side fall detection
peopleStr = 'Number of Detected People: '+str(numTargets)
if (numTargets == 0):
fdestr = 'Fall Detection Disabled - No People Detected'
elif (numTargets == 1):
fdestr = 'Fall Detection Enabled'
fallDetEn = 1
elif (numTargets > 1):
fdestr = 'Fall Detected Disabled - Too Many People'
#self.numDetPeople.setText(peopleStr)
#self.fallDetEnabled.setText(fdestr)
if (len(targets) < 13):
targets = []
classifierOutput = []
if (fail):
return
#remove static points
if (self.configType.currentText() == '3D People Counting'):
if (not self.staticclutter.isChecked()):
statics = np.where(pointCloud[3,:] == 0)
try:
firstZ = statics[0][0]
numPoints = firstZ
pointCloud = pointCloud[:,:firstZ]
indexes = indexes[:,:self.previousFirstZ]
self.previousFirstZ = firstZ
except:
firstZ = -1
#point cloud persistence
fNum = self.frameNum%10
if (numPoints):
self.previousCloud[:5,:numPoints,fNum] = pointCloud[:5,:numPoints]
self.previousCloud[5,:len(indexes),fNum] = indexes
self.previousPointCount[fNum]=numPoints
#plotting 3D - get correct point cloud (persistent points and synchronize the frame)
if (self.configType.currentText() == 'SDK3xPeopleCount'):
pointIn = pointCloud
else:
totalPoints = 0
persistentFrames = int(self.persistentFramesInput.currentText())
#allocate new array for all the points
for i in range(1,persistentFrames+1):
totalPoints += self.previousPointCount[fNum-i]
pointIn = np.zeros((5,int(totalPoints)))
indicesIn = np.ones((1, int(totalPoints)))*255
totalPoints = 0
#fill array
for i in range(1,persistentFrames+1):
prevCount = int(self.previousPointCount[fNum-i])
pointIn[:,totalPoints:totalPoints+prevCount] = self.previousCloud[:5,:prevCount,fNum-i]
if (numTargets > 0):
indicesIn[0,totalPoints:totalPoints+prevCount] = self.previousCloud[5,:prevCount,fNum-i]
totalPoints+=prevCount
if (self.graphFin):
self.plotstart = int(round(time.time()*1000))
self.graphFin = 0
if (self.threeD):
try:
indicesIn = indicesIn[0,:]
except:
indicesIn = []
self.get_thread = updateQTTargetThread3D(pointIn, targets, indicesIn, self.scatter, self.pcplot, numTargets, self.ellipsoids, self.coordStr, classifierOutput, self.zRange, self.gw, self.plotByIndex.isChecked(), self.plotTracks.isChecked(), self.bbox,self.boundaryBoxes[0]['checkEnable'].isChecked())
self.get_thread.done.connect(self.graphDone)
self.get_thread.start(priority=QThread.HighestPriority-1)
else:
npc = pointIn[0:2,:]
print (np.shape(npc))
self.legacyThread = update2DQTGraphThread(npc, targets, numTargets, indexes, numPoints, self.trailData, self.activeTrail, self.trails, self.scatter2D, self.gatingScatter)
self.legacyThread.done.connect(self.graphDone)
self.legacyThread.start(priority=QThread.HighestPriority-1)
else:
return
#pointIn = self.previousCloud[:,:int(self.previousPointCount[fNum-1]),fNum-1]
#state tracking
if (numTargets > 0):
self.lastFrameHadTargets = True
else:
self.lastFrameHadTargets = False
if (numTargets):
self.lastTID = targets[0,:]
else:
self.lastTID = []
def graphDone(self):
plotend = int(round(time.time()*1000))
plotime = plotend - self.plotstart
try:
if (self.frameNum > 1):
self.averagePlot = (plotime*1/self.frameNum) + (self.averagePlot*(self.frameNum-1)/(self.frameNum))
else:
self.averagePlot = plotime
except:
self.averagePlot = plotime
self.graphFin = 1
pltstr = 'Average Plot time: '+str(plotime)[:5] + ' ms'
fnstr = 'Frame: '+str(self.frameNum)
self.frameNumDisplay.setText(fnstr)
self.plotTimeDisplay.setText(pltstr)
def resetFallText(self):
self.fallAlert.setText('Standing')
self.fallPic.setPixmap(self.standingPicture)
self.fallResetTimerOn = 0
def updateFallThresh(self):
try:
newThresh = float(self.fallThreshInput.text())
self.fallThresh = newThresh
self.fallThreshMarker.setPos(self.fallThresh)
except:
print('No numberical threshold')
def connectCom(self):
#get parser
self.parser = uartParserSDK(type=self.configType.currentText())
self.parser.frameTime = self.frameTime
print('Parser type: ',self.configType.currentText())
#init threads and timers
self.uart_thread = parseUartThread(self.parser)
if (self.configType.currentText() != 'Replay'):
self.uart_thread.fin.connect(self.parseData)
self.uart_thread.fin.connect(self.updateGraph)
self.parseTimer = QTimer()
self.parseTimer.setSingleShot(False)
self.parseTimer.timeout.connect(self.parseData)
try:
uart = "COM"+ self.uartCom.text() #deb_gp
data = "COM"+ self.dataCom.text() #deb_gp
#TODO: find the serial ports automatically.
self.parser.connectComPorts(uart, data)
self.connectStatus.setText('Connected') #deb_gp
self.connectButton.setText('Disconnect') #deb_gp
#TODO: create the disconnect button action
except:
self.connectStatus.setText('Unable to Connect')
if (self.configType.currentText() == "Replay"):
self.connectStatus.setText('Replay')
if (self.configType.currentText() == "Long Range People Detection"):
self.frameTime = 400
#
# Select and parse the configuration file
# TODO select the cfgfile automatically based on the profile.
def selectCfg(self):
try:
self.parseCfg(self.selectFile())
except:
print('No cfg file selected!')
def selectFile(self):
fd = QFileDialog()
filt = "cfg(*.cfg)"
filename = fd.getOpenFileName(directory='./../chirp_configs',filter=filt) #deb_gp - added folder name
return filename[0]
def parseCfg(self, fname):
cfg_file = open(fname, 'r')
self.cfg = cfg_file.readlines()
counter = 0
chirpCount = 0
for line in self.cfg:
args = line.split()
if (len(args) > 0):
if (args[0] == 'cfarCfg'):
zy = 4
#self.cfarConfig = {args[10], args[11], '1'}
elif (args[0] == 'AllocationParam'):
zy=3
#self.allocConfig = tuple(args[1:6])
elif (args[0] == 'GatingParam'):
zy=2
#self.gatingConfig = tuple(args[1:4])
elif (args[0] == 'SceneryParam' or args[0] == 'boundaryBox'):
self.boundaryLine = counter
self.profile['leftX'] = float(args[1])
self.profile['rightX'] = float(args[2])
self.profile['nearY'] = float(args[3])
self.profile['farY'] = float(args[4])
self.setBoundaryTextVals(self.profile)
self.boundaryBoxes[0]['checkEnable'].setChecked(True)
elif (args[0] == 'staticBoundaryBox'):
self.staticLine = counter
elif (args[0] == 'profileCfg'):
self.profile['startFreq'] = float(args[2])
self.profile['idle'] = float(args[3])
self.profile['adcStart'] = float(args[4])
self.profile['rampEnd'] = float(args[5])
self.profile['slope'] = float(args[8])
self.profile['samples'] = float(args[10])
self.profile['sampleRate'] = float(args[11])
print(self.profile)
elif (args[0] == 'frameCfg'):
self.profile['numLoops'] = float(args[3])
self.profile['numTx'] = float(args[2])+1
elif (args[0] == 'chirpCfg'):
chirpCount += 1
elif (args[0] == 'sensorPosition'):
self.profile['sensorHeight'] = float(args[1])
self.profile['az_tilt'] = float(args[2])
self.profile['elev_tilt'] = float(args[3])
counter += 1
maxRange = self.profile['sampleRate']*1e3*0.9*3e8/(2*self.profile['slope']*1e12)
#update boundary box
self.drawBoundaryGrid(maxRange)
#update chirp table values
bw = self.profile['samples']/(self.profile['sampleRate']*1e3)*self.profile['slope']*1e12
rangeRes = 3e8/(2*bw)
Tc = (self.profile['idle']*1e-6 + self.profile['rampEnd']*1e-6)*chirpCount
lda = 3e8/(self.profile['startFreq']*1e9)
maxVelocity = lda/(4*Tc)
velocityRes = lda/(2*Tc*self.profile['numLoops']*self.profile['numTx'])
self.configTable.setItem(1,1,QTableWidgetItem(str(maxRange)[:5]))
self.configTable.setItem(2,1,QTableWidgetItem(str(rangeRes)[:5]))
self.configTable.setItem(3,1,QTableWidgetItem(str(maxVelocity)[:5]))
self.configTable.setItem(4,1,QTableWidgetItem(str(velocityRes)[:5]))
#update sensor position
self.az_tilt.setText(str(self.profile['az_tilt']))
self.elev_tilt.setText(str(self.profile['elev_tilt']))
self.s_height.setText(str(self.profile['sensorHeight']))
def sendCfg(self):
try:
if (self.configType.currentText() != "Replay"):
self.parser.sendCfg(self.cfg)
self.configSent = 1
self.parseTimer.start(self.frameTime)
except:
print ('No cfg file selected!')
# Needed ?? deb_gp
# def setParser(self, uParser):
# self.parser = uParser
def parseData(self):
self.uart_thread.start(priority=QThread.HighestPriority)
def whoVisible(self):
if (self.threeD):
self.threeD = 0
else:
self.threeD = 1
print('3d: ', self.threeD)
if __name__ == '__main__':
if (compileGui):
appctxt = ApplicationContext()
app = QApplication(sys.argv)
screen = app.primaryScreen()
size = screen.size()
main = Window(size=size)
main.show()
exit_code = appctxt.app.exec_()
sys.exit(exit_code)
else:
app = QApplication(sys.argv)
screen = app.primaryScreen()
size = screen.size()
main = Window(size=size)
main.show()
sys.exit(app.exec_())

PyQT table update crash easyly

i first use PyQT4 .
i'm create a QTableWidget to show runing message...
when my program run, it ill crash Within ten minutes.
i try diable my TableUpdate function , and it's don't crash again.
there is my code please help me
class table_work(QThread):
TableDataSignal = pyqtSignal()
def __init__(self,main_self):
# QThread.__init__(self)
super(table_work, self).__init__(main_self)
self.main_self = main_self
self.table_update_list = list()
#pyqtSlot(dict)
def update_table_thread_o(self,work):
try:
row_pos = work['row_position']
data = work['data']
table_key_sort = work['key_sort']
this_table = work['table']
k = 0
for table_key in table_key_sort:
this_table.setItem(row_pos, k, QTableWidgetItem(unicode(data[table_key])))
k += 1
del work
except:
pass
def update_table_thread(self):
main_self = self.main_self
table_work_list = self.table_update_list
while 1:
for work in self.table_update_list:
row_pos = work['row_position']
data = work['data']
table_key_sort = work['key_sort']
this_table = work['table']
k = 0
for table_key in table_key_sort:
this_table.setItem(row_pos, k, QTableWidgetItem(unicode(data[table_key])))
k += 1
time.sleep(0.5)
def run(self):
self.update_table_thread()
this's update table message
def update_table(self,address,change_obj=None,tabe_name='auto_card'):
sample_dict = dict()
table_key_sort = list()
now_table_sort = 0
if tabe_name == "auto_bot":
this_table = self.auto_bot_procc_table
table_data_list = self.auto_bot_procc_table_list
now_table_sort = self.auto_bot_now_table_sort
sample_dict['address'] = address
sample_dict['money'] = 0
sample_dict['run_time'] = 0
sample_dict['item_cd'] = u"60分鐘後"
sample_dict['stat'] = "Ready..."
sample_dict['sort'] = now_table_sort
table_key_sort.append('address')
table_key_sort.append('money')
table_key_sort.append('run_time')
table_key_sort.append('item_cd')
table_key_sort.append('stat')
if tabe_name == "auto_card":
this_table = self.process_table
table_data_list = self.now_procc_table_list
now_table_sort = self.now_table_sort
sample_dict['address'] = address
sample_dict['done_num'] = 0
sample_dict['pre_item'] = ""
sample_dict['procc'] = "Ready"
sample_dict['mission_procc'] = u"待命.."
sample_dict['mission_num'] = 0
sample_dict['mission_line'] = 0
sample_dict['update_time'] = db.get_time()
sample_dict['sort'] = now_table_sort
sample_dict['option'] = ""
table_key_sort.append('address')
table_key_sort.append('done_num')
table_key_sort.append('pre_item')
table_key_sort.append('mission_procc')
table_key_sort.append('procc')
table_key_sort.append('mission_num')
table_key_sort.append('mission_line')
table_key_sort.append('update_time')
if address not in table_data_list:
this_table.insertRow(sample_dict['sort'])
table_data_list[address] = sample_dict
sample_dict['sort'] = self.auto_bot_now_table_sort
self.auto_bot_now_table_sort += 1
acc_data = table_data_list[address]
if change_obj != None:
key = change_obj['key']
val = change_obj['val']
if key in acc_data:
acc_data[key] = val
acc_data['update_time'] = db.get_time()
rowPosition = acc_data['sort']
temp = dict()
temp['row_position'] = rowPosition
temp['data'] = acc_data
temp['key_sort'] = table_key_sort
temp['table'] = this_table
self.TableDataSignal.emit(temp)
del temp
Some time i get a ANS.
i'm a PYQT newbie , After this period of various projects experience.
I understand if you don't use Main Thread to Change UI, Always use sign/emit
even your code is worked,but always use sign/emit, Otherwise there will be a series of disasters.
you just like
class sample(QtCore.QThread):
table_data_change = QtCore.pyqtSignal(dict)
def __init__(self,main_win):
self.main = main_win
self.table_data_change.connect(self.main.change_fn)
def test(self):
data = dict()
data['btn'] = .....
data['val'] = .....
self.table_data_change.emit(data)
Save your time !

Categories

Resources