Traceback (most recent call last):
File "D:/desk/bert-master333/bert-master/run_classifier.py", line 1024, in <module>
tf.app.run()
File "D:\anaconda\envs\tensorflow\lib\site-packages\tensorflow\python\platform\app.py", line 40, in run
_run(main=main, argv=argv, flags_parser=_parse_flags_tolerate_undef)
File "D:\anaconda\envs\tensorflow\lib\site-packages\absl\app.py", line 303, in run
_run_main(main, args)
File "D:\anaconda\envs\tensorflow\lib\site-packages\absl\app.py", line 251, in _run_main
sys.exit(main(argv))
File "D:/desk/bert-master333/bert-master/run_classifier.py", line 885, in main
train_examples = processor.get_train_examples(FLAGS.data_dir)
File "D:/desk/bert-master333/bert-master/run_classifier.py", line 380, in get_train_examples
self._read_tsv(os.path.join(data_dir, "train.csv")), "train")
File "D:/desk/bert-master333/bert-master/run_classifier.py", line 410, in _create_examples
label = tokenization.convert_to_unicode(line[1])
IndexError: list index out of range
class limengnanProcessor(DataProcessor):
def get_train_examples(self, data_dir):
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.csv")), "train")
def get_dev_examples(self, data_dir):
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.csv")), "dev")
def get_test_examples(self, data_dir):
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.csv")), "test")
def get_labels(self):
return ['0','1']
def _read_tsv(cls, input_file, quotechar=None):
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="&", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
def _create_examples(self, lines, set_type):
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[0])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[0])
label = tokenization.convert_to_unicode(line[1])
if label not in ['0','1']:
continue
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
Related
import dask.bag as db
class Converter():
def __init__(self,input,output):
"""Converter constructor"""
self.input = input
self.output = output
#staticmethod
def large_file_reader(file_path: str):
"""
File reader
"""
temp_data = db.read_avro(file_path)
data = temp_data.to_dataframe()
# just to check able to by read properly
print(data.head(6))
return data
#staticmethod
def large_file_writer(data, file_path: str) -> bool:
"""
File writer
"""
data.compute().to_csv(file_path, index=False)
def large_file_processor(self):
"Read then write"
input_file_path =self.input
output_file_path =self.output
data = Converter.large_file_reader(input_file_path)
Converter.large_file_writer(data=data, file_path=output_file_path)
if __name__ == "__main__":
c = Converter("/Users/csv_to_avro_new.avro", "/Users/test_avro_new.csv")
c.large_file_processor()
Traceback (most recent call last):
File "/Users/PycharmProjects/ms--py/new.py", line 41, in <module>
c.large_file_processor()
File "/Users/PycharmProjects/ms--py/new.py", line 36, in large_file_processor
Converter.large_file_writer(data=data, file_path=output_file_path)
File "/Users/PycharmProjects/ms--py/new.py", line 28, in large_file_writer
data.compute().to_csv(file_path, index=False)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/base.py", line 315, in compute
(result,) = compute(self, traverse=False, **kwargs)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/base.py", line 600, in compute
results = schedule(dsk, keys, **kwargs)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/threaded.py", line 89, in get
results = get_async(
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/local.py", line 511, in get_async
raise_exception(exc, tb)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/local.py", line 319, in reraise
raise exc
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/local.py", line 224, in execute_task
result = _execute_task(task, data)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/core.py", line 119, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/core.py", line 119, in <genexpr>
return func(*(_execute_task(a, cache) for a in args))
File "/Users/adavsandeep/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/core.py", line 119, in _execute_task
return func(*(_execute_task(a, cache) for a in args))
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/dask/bag/avro.py", line 150, in read_chunk
chunk = read_block(f, off, l, head["sync"])
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/fsspec/utils.py", line 244, in read_block
found_start_delim = seek_delimiter(f, delimiter, 2**16)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/fsspec/utils.py", line 187, in seek_delimiter
current = file.read(blocksize)
File "/Users/PycharmProjects/data-ingest/lib/python3.10/site-packages/fsspec/implementations/local.py", line 337, in read
return self.f.read(*args, **kwargs)
ValueError: read of closed file
Process finished with exit code 1
`Added the error traceback.check it once.Thanks.
Error
ValueError: read of closed file
Tried convert to some other formats like csv to json in the same way it was working
But not able to convert avro to csv, avro to json and avro to parquet
My file size is more than 2GB .Thats the reason I am using dask.
Thanks in advance .
`
I am facing this error which is related to the dictionary of data loader for (MSVD dataset) while running training file for video_captioning on https://github.com/nasib-ullah/video-captioning-models-in-Pytorch
//////////////////////////////////////////////////////
code of getitem function :
def __getitem__(self,idx):
anno = random.choice(self.annotation_dict[self.v_name_list[idx]])
anno_index = []
for word in anno.split(' '):
try:
anno_index.append(self.voc.word2index[word])
except:
pass
if self.opt_truncate_caption:
if len(anno_index)> self.max_caption_length:
anno_index = anno_index[:self.max_caption_length]
anno_index = anno_index + [self.voc.cfg.EOS_token]
appearance_tensor = torch.tensor(self.appearance_feature_dict[self.v_name_list[idx]]).float()
if self.motion_feature_dict == None:
motion_tensor = torch.zeros_like(appearance_tensor)
else:
motion_tensor = torch.tensor(self.motion_feature_dict[self.v_name_list[idx]]).float()
if self.object_feature_dict == None:
object_tensor = torch.zeros_like(appearance_tensor)
else:
object_tensor = torch.tensor(self.object_feature_dict[self.v_name_list[idx]]).float()
return appearance_tensor,anno_index, self.v_name_list[idx],motion_tensor,object_tensor
Traceback (most recent call last):
File "/home/adel/Downloads/video-captioning-models-in-Pytorch-main/untitled0.py", line 84, in <module>
loss_train,ac_loss = model.train_epoch(train_loader,utils)
File "/home/adel/Downloads/video-captioning-models-in-Pytorch-main/models/MARN/model.py", line 321, in train_epoch
for data in dataloader:
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 435, in __next__
data = self._next_data()
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1085, in _next_data
return self._process_data(data)
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/dataloader.py", line 1111, in _process_data
data.reraise()
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/_utils.py", line 428, in reraise
raise self.exc_type(msg)
KeyError: Caught KeyError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/_utils/worker.py", line 198, in _worker_loop
data = fetcher.fetch(index)
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py", line 44, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/home/adel/anaconda3/envs/nasib/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py", line 44, in <listcomp>
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/home/adel/Downloads/video-captioning-models-in-Pytorch-main/data.py", line 88, in __getitem__
appearance_tensor = torch.tensor(self.appearance_feature_dict[self.v_name_list[idx]]).float()
KeyError: 'vid1'
////////////////////////////////////////////////////////
keys of dictionary
Keys: <KeysViewHDF5 ['vid1', 'vid10', 'vid100', 'vid1000', 'vid1001', 'vid1002', 'vid1003', 'vid1004', 'vid1005', 'vid1006', ...
<HDF5 dataset "vid1": shape (28, 1536), type "<f4">
I'm trying to pass a variable as a keyword argument like this:
#receiver(post_save, sender=BudgetTransaction)
def create_forecasted(sender, instance, created, **kwargs):
if instance.transaction_type == "EC":
return True
if created:
today = instance.next_date
last_day = date(today.year, 12, 31)
this_month = today.month
weeks_left = int((last_day - today).days / 7)
fortnights_left = int(weeks_left / 2)
months_left = 12 - this_month
if instance.frequency == 'weeks':
create_range = weeks_left
elif instance.frequency == 'fort':
create_range = fortnights_left
else:
create_range = months_left
loop = 1
for i in range(create_range):
if instance.frequency == 'fort':
true_frequency = 'weeks'
true_loop = loop * 2
elif instance.frequency == '4week':
true_frequency = 'weeks'
true_loop = loop * 4
else:
true_frequency = instance.frequency
true_loop = loop
### PASSING VARIABLE HERE ###
next_month = today + relativedelta(**{true_frequency: true_loop})
date_string = next_month.strftime("%Y-%m-%d")
BudgetTransaction.objects.bulk_create([BudgetTransaction(
owner=instance.owner,
transaction_amount=instance.transaction_amount,
transaction_name=instance.transaction_name,
transaction_type=instance.transaction_type,
next_date=date_string,
frequency=instance.frequency,
)])
loop += 1
This is for a django site and it doesn't error when I run this through the site. However, when I run a test I'm getting this error:
Error
Traceback (most recent call last):
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\bf_app\tests\test_models.py", line 26, in test_transaction_has_owner
transaction = TransactionFactory(owner=user)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\base.py", line 40, in __call__
return cls.create(**kwargs)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\base.py", line 528, in create
return cls._generate(enums.CREATE_STRATEGY, kwargs)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\django.py", line 117, in _generate
return super()._generate(strategy, params)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\base.py", line 465, in _generate
return step.build()
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\builder.py", line 262, in build
instance = self.factory_meta.instantiate(
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\base.py", line 317, in instantiate
return self.factory._create(model, *args, **kwargs)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\factory\django.py", line 166, in _create
return manager.create(*args, **kwargs)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\db\models\manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\db\models\query.py", line 514, in create
obj.save(force_insert=True, using=self.db)
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\db\models\base.py", line 806, in save
self.save_base(
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\db\models\base.py", line 872, in save_base
post_save.send(
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\dispatch\dispatcher.py", line 176, in send
return [
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\venv\lib\site-packages\django\dispatch\dispatcher.py", line 177, in <listcomp>
(receiver, receiver(signal=self, sender=sender, **named))
File "C:\Users\Mitchell\PycharmProjects\BudgieFinance_TW\bf_app\models.py", line 82, in create_forecasted
next_month = next_date + relativedelta(**{true_frequency: true_loop})
TypeError: keywords must be strings
I'm not entirely sure how to solve this as I've never passed a keyword as a variable before so I'm not overly familiar with the process. This is happening on a receiver object.
Thanks
The issue here was that occasionally I was passing None as the variable which was returning the error. In the test I added in a proper string parameter and all was fine!
When I try to load the model (input, not meta-model), it returns a MemoryError about 30 seconds after executing.
Expected: List of tree: [{'type':'func', 'callee':'print', 'args':[['Hello']]}]
Actual: MemoryError
Output
Traceback (most recent call last):
File "C:/Users/kenxs/PycharmProjects/program/program/parser.py", line 103, in <module>
main()
File "C:/Users/kenxs/PycharmProjects/program/program/parser.py", line 97, in main
program.do_it(True, True, True)
File "C:/Users/kenxs/PycharmProjects/program/program/parser.py", line 80, in do_it
if cont and intp: cont, err = self.interpret()
File "C:/Users/kenxs/PycharmProjects/program/program/parser.py", line 67, in interpret
self.model = self.mm.model_from_file(os.path.abspath('program.program'))
File "C:\Program Files (x86)\Python38-32\lib\site-packages\textx\metamodel.py", line 574, in model_from_file
return self.internal_model_from_file(file_name, encoding, debug)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\textx\metamodel.py", line 613, in internal_model_from_file
model = self._parser_blueprint.clone().get_model_from_str(
File "C:\Program Files (x86)\Python38-32\lib\site-packages\textx\model.py", line 262, in get_model_from_str
self.parse(model_str, file_name=file_name)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 1493, in parse
self.parse_tree = self._parse()
File "C:\Program Files (x86)\Python38-32\lib\site-packages\textx\model.py", line 221, in _parse
return self.parser_model.parse(self)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 365, in _parse
result = e.parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 365, in _parse
result = e.parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 481, in _parse
result = p(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 404, in _parse
result = e.parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 365, in _parse
result = e.parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 365, in _parse
result = e.parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 286, in parse
result = self._parse(parser)
File "C:\Program Files (x86)\Python38-32\lib\site-packages\arpeggio\__init__.py", line 484, in _parse
append(result)
MemoryError
Grammar
Program:
commands*=Command
;
Command:
Statement | Function | Definition
;
Statement:
callee=ID '(' checker=Checker ')' '=' effect=Collection Ending
;
Checker:
a=Object sign=CheckerSign b=Object
;
CheckerSign:
'==' | '!='
;
Collection:
'[' objs*=PseudoObject ']'
;
PseudoObject:
Object Ending
;
Function:
callee=ID '(' args=Arguments ')' Ending
;
Arguments:
arg*=Argument
;
Argument:
NamedArgument | UnnamedArgument
;
NamedArgument:
a=Object '=' b=Object
;
UnnamedArgument:
a=Object
;
Definition:
a=Object '=' b=Object
;
Object:
a*=ObjectChild
;
ObjectChild:
ObjectChildChild ( '.' | '' )
;
ObjectChildChild:
String | ID | INT | STRICTFLOAT | BOOL | Collection | Function
;
String:
'"' ID '"'
;
Comment:
/#.*/ Ending
;
Ending:
'' *Newline
;
Newline:
( '\n' | ';' )
;
Program
import os
from textx import *
from textx.export import *
class Parser(object):
def __init__(self, meta_model_path='grammar.tx', model_str='print("Hello")'):
self.tree = []
self.meta_model_path = os.path.abspath(meta_model_path)
self.model_str = model_str
self.mm = None
self.model = None
def __str__(self):
return str(self.tree)
def _interpret_function(self, c):
result = {}
result['type'] = 'func'
result['callee'] = c.callee
result['args'] = []
for arg in c.args.arg:
if arg.__class__.__name__ == 'UnnamedArgument':
result['args'].append([arg.a.a])
elif arg.__class__.__name__ == 'NamedArgument':
result['args'].append([arg.a.a, arg.b.a])
return result
def _interpret_definition(self, c):
result = {}
result['type'] = 'defi'
result['a'] = c.a.a
result['b'] = c.b.a
return result
def _interpret_statement(self, c):
result = {}
result['type'] = 'stat'
result['callee'] = c.callee
result['checker_a'] = c.checker.a
result['checker_b'] = c.checker.b
result['checker_sign'] = c.checker.sign
result['effect'] = c.effect.objs
return result
def _interpret(self, model):
for c in model.commands:
if c.__class__.__name__ == 'Statement':
self.tree.append(self._interpret_statement(c))
elif c.__class__.__name__ == 'Function':
self.tree.append(self._interpret_function(c))
elif c.__class__.__name__ == 'Definition':
self.tree.append(self._interpret_definition(c))
def export_meta_model(self, mm):
metamodel_export(self.mm, os.path.abspath('grammar.dot'))
return [True, None]
def export_model(self, model):
model_export(self.model, os.path.abspath('program.dot'))
return [True, None]
def interpret(self):
print(-1)
self.mm = metamodel_from_file(self.meta_model_path, debug=False)
print(0)
try:
self.model = self.mm.model_from_str(self.model_str)
# self.model = self.mm.model_from_file(os.path.abspath('program.prg'))
except TextXSyntaxError as err:
print('Syntax Error # {}:{}'.format(err.line, err.col))
print('{}'.format(err.message))
return [False, err]
print(1)
self._interpret(model)
print(2)
return [True, None]
def do_it(self, exp_mm=False, exp_m=False, intp=True): # My naming skills :)
cont = True
err = None
if cont and intp: cont, err = self.interpret()
if cont and exp_mm: cont, err = self.export_meta_model()
if cont and exp_m: cont, err = self.export_model()
def main(debug=False):
print('Program')
program = Parser()
print('Inp Done')
program.do_it(True, True, True)
print('Done')
print(program)
if __name__ == "__main__":
main()
Rule Ending has zero or more empty string match ''* that is essentially an infinite loop building a parse tree node with an infinite number of empty match terminals. Eventually, the parse tree eats up all the memory and you get MemoryError.
In general, repetitions ('*', '+') over a parsing expression that could potentially be an empty match could lead to an infinite loop.
I suggest that you register an issue in the issue tracker for this as it should be fairly easy to at least detect it at runtime without to much overhead.
I have added a small debugging aid to my server. It logs a stack trace obtained from traceback.format_stack()
It contains few incomplete lines like this:
File "/home/...../base/loop.py", line 361, in run
self.outputs.fd_list, (), sleep)
which is not that much helpfull.
The source lines 360 and 361:
rlist, wlist, unused = select.select(self.inputs.fd_list,
self.outputs.fd_list, (), sleep)
If only one line can be part of the stack trace, I would say the line 360 with the function name (here select.select) is the right one, because the stack is created by calling functions.
Anyway, I would prefer the whole (logical) line to be printed. Or at least some context (e.g. 2 lines before). Is that possible? I mean with just an adequate effort, of course.
Tried to add a line continuation character \, but without success.
EPILOGUE:
Based on Jean-François Fabre's answer and his code I'm going to use this function:
def print_trace():
for fname, lnum, func, line in traceback.extract_stack()[:-1]:
print('File "{}", line {}, in {}'.format(fname, lnum, func))
try:
with open(fname) as f:
rl = f.readlines()
except OSError:
if line is not None:
print(" " + line + " <===")
continue
first = max(0, lnum-3)
# read 2 lines before and 2 lines after
for i, line in enumerate(rl[first:lnum+2]):
line = line.rstrip()
if i + first + 1 == lnum:
print(" " + line + " <===")
elif line:
print(" " + line)
"just with adequate effort" this can be done. But it's hack-like
check this example:
import traceback,re,os,sys
r = re.compile(r'File\s"(.*)",\sline\s(\d+)')
def print_trace():
# discard the 2 deepest entries since they're a call to print_trace()
lines = [str.split(x,"\n")[0] for x in traceback.format_stack()][:-2]
for l in lines:
m = r.search(l)
if m != None:
sys.stdout.write(l+"\n")
file = m.group(1)
line = int(m.group(2))-1
if os.path.exists(file):
with open(file,"r") as f:
rl = f.readlines()
tblines = rl[max(line-2,0):min(line+3,len(rl))]
# read 2 lines before and 2 lines after
for i,tl in enumerate(tblines):
tl = tl.rstrip()
if i==2:
sys.stdout.write(" "+tl+" <====\n")
elif tl:
sys.stdout.write(" "+tl+"\n")
def foo():
print_trace()
foo()
output:
File "C:\Users\dartypc\AppData\Roaming\PyScripter\remserver.py", line 63, in <module>
if __name__ == "__main__":
main() <====
File "C:\Users\dartypc\AppData\Roaming\PyScripter\remserver.py", line 60, in main
t = SimpleServer(ModSlaveService, port = port, auto_register = False)
t.start() <====
if __name__ == "__main__":
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\utils\server.py", line 227, in start
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\utils\server.py", line 139, in accept
File "C:\Users\dartypc\AppData\Roaming\PyScripter\remserver.py", line 14, in _accept_method
class SimpleServer(Server):
def _accept_method(self, sock):
self._serve_client(sock, None) <====
class ModSlaveService(SlaveService):
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\utils\server.py", line 191, in _serve_client
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\core\protocol.py", line 391, in serve_all
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\core\protocol.py", line 382, in serve
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\core\protocol.py", line 350, in _dispatch
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\core\protocol.py", line 298, in _dispatch_request
File "C:\Program Files\PyScripter\Lib\rpyc.zip\rpyc\core\protocol.py", line 528, in _handle_call
File "<string>", line 420, in run_nodebug
File "C:\DATA\jff\data\python\stackoverflow\traceback_test.py", line 31, in <module>
print_trace()
foo() <====
EDIT: VPfB suggested the use of extract_stack which is a little less "hacky", no need to parse a string, just get the quadruplet with traceback info (needs to rebuild the text message, but that's better)
import traceback,os,sys
def print_trace():
# discard the 2 deepest entries since they're a call to print_trace()
for file,line,w1,w2 in traceback.extract_stack()[:-2]:
sys.stdout.write(' File "{}", line {}, in {}\n'.format(file,line,w1))
if os.path.exists(file):
line -= 1
with open(file,"r") as f:
rl = f.readlines()
tblines = rl[max(line-2,0):min(line+3,len(rl))]
# read 2 lines before and 2 lines after
for i,tl in enumerate(tblines):
tl = tl.rstrip()
if i==2:
sys.stdout.write(" "+tl+" <====\n")
elif tl:
sys.stdout.write(" "+tl+"\n")
def foo():
print_trace()
foo()
The traceback.format_exception_only function format only one line, except in case of SyntaxError, so…