ponyORM: trouble with query - python

I have query with dynamic conditions,i.e.
select (lambda obj:obj.A = 'a' and obj.B = 'b' and ...)
So i write code for this:
def search(self,**kwargs):
q = unicode('lambda obj:', 'utf-8')
for field,value in kwargs.iteritems():
value = unicode(value, 'utf-8')
field = unicode(field, 'utf-8')
q+=u" obj.%s == '%s' and" % (field,value
q = q[0:q.rfind('and')]
res = select(q.encode('utf-8'))[:]
But i have this error during execution of function:
tasks.search(title='Задача 1',url='test.com')
res = select(q.encode('utf-8'))[:]
File "<string>", line 2, in select
File ".../local/lib/python2.7/site-packages/pony/utils.py", line 96, in cut_traceback
return func(*args, **kwargs)
File ".../local/lib/python2.7/site-packages/pony/orm/core.py", line 3844, in select
if not isinstance(tree, ast.GenExpr): throw(TypeError)
File "...local/lib/python2.7/site-packages/pony/utils.py", line 123, in throw
raise exc
TypeError

While it is possible to use strings in order to apply conditions to a query, it can be unsafe, because of the risk of SQL injection. The better way for applying conditions to a query is using the filter() method. You can take the latest version of Pony ORM from https://github.com/ponyorm/pony repository and try a couple of examples provided below.
First we define entities and create a couple of objects:
from decimal import Decimal
from pony.orm import *
db = Database('sqlite', ':memory:')
class Product(db.Entity):
name = Required(unicode)
description = Required(unicode)
price = Required(Decimal)
quantity = Required(int, default=0)
db.generate_mapping(create_tables=True)
with db_session:
Product(name='iPad', description='Air, 16GB', price=Decimal('478.99'), quantity=10)
Product(name='iPad', description='Mini, 16GB', price=Decimal('284.95'), quantity=15)
Product(name='iPad', description='16GB', price=Decimal('299.00'), quantity=10)
Now we'll apply filters passing them as keyword arguments:
def find_by_kwargs(**kwargs):
q = select(p for p in Product)
q = q.filter(**kwargs)
return list(q)
with db_session:
products = find_by_kwargs(name='iPad', quantity=10)
for p in products:
print p.name, p.description, p.price, p.quantity
Another option is to use lambdas in order to specify the conditions:
def find_by_params(name=None, min_price=None, max_price=None):
q = select(p for p in Product)
if name is not None:
q = q.filter(lambda p: p.name.startswith(name))
if min_price is not None:
q = q.filter(lambda p: p.price >= min_price)
if max_price is not None:
q = q.filter(lambda p: p.price <= max_price)
return list(q)
with db_session:
products = find_by_params(name='iPad', max_price=400)
for p in products:
print p.name, p.description, p.price, p.quantity
As you can see filters can be applied dynamically. You can find more information about using filters following by this link: http://doc.ponyorm.com/queries.html#Query.filter

If you still want to filter using strings, you have to apply new filter for each key/value pair.
Something like this:
def search(self,**kwargs):
q = select(m for m in Product)
for field,value in kwargs.iteritems():
value = unicode(value, 'utf-8')
field = unicode(field, 'utf-8')
flt = u"m.{0} == {1}".format(value, field)
q = q.filter(flt)
# return q # return Query which can be further modified (for ex. paging, ordering, etc.)
return q[:] # or return found products
HTH, Tom

Related

PYTHON SQLITE selecting multiple where conditions that may or may not exist

Working on some code that uses pysimplegui as the UI and SQlite for the data sorting. I'm using SQLite's execute function to select data based on input from the user in the UI through variables. For example user wants to search for part name they input all or part of the name into the box, hit the search button which then runs my "parts_search" method, which will then only filter the result based on part name. OR the user enters information in multiple boxes which then filters based on the boxes that have information.
This here is runnable code provided you add a file base1.db in the same folder location as the script itself
import PySimpleGUI as sg
import os.path
import sqlite3
# sql var
c = None
conn = None
setup = None
# list var
parts = []
def sql():
global setup
conn_sql()
c.execute("""CREATE TABLE IF NOT EXISTS parts (part_name TEXT, part_number TEXT, part_series TEXT,
part_size INTEGER, job_type TEXT)""")
conn.commit()
if conn:
conn.close()
def conn_sql():
global c
global conn
# SQL connection var
if os.path.isfile('./base1.db'):
conn = sqlite3.connect('base1.db')
c = conn.cursor()
def main_gui_parts():
global parts
layout = [[sg.Text('Part Name: '), sg.Input(size=(20, 1), key='-PName-'), sg.Text('Part Series:'),
sg.Input(size=(10, 1), key='-PSeries-')],
[sg.Text('Part Number:'), sg.Input(size=(20, 1), key='-PNumber-'), sg.Text('Part Size:'),
sg.Input(size=(10, 1), key='-PSize-')],
[sg.Checkbox('Fit', key='-PFit-'), sg.Checkbox('Weld', key='-PWeld-'),
sg.Checkbox('Assemble', key='-PAssemble-'),
sg.Button('Search', key='-PSearch-')],
[sg.Listbox(parts, size=(58, 10), key='-PParts-')], [sg.Button('Back', key='-PBack-')]]
window = sg.Window('parts list', layout, grab_anywhere=True)
sql()
while True:
event, values = window.read()
if event == 'Close' or event == sg.WIN_CLOSED:
break
# PART WINDOW
part_name = values['-PName-']
part_series = values['-PSeries-']
part_number = values['-PNumber-']
part_size = values['-PSize-']
fit = values['-PFit-']
weld = values['-PWeld-']
assemble = values['-PAssemble-']
if event == '-PSearch-':
print('search parts')
part_search(part_name, part_series, part_number, part_size, fit, weld, assemble)
if event == '-PBack-':
break
window.close()
def part_search(part_name, part_series, part_number, part_size, fit, weld, assemble):
global parts
conn_sql()
filter_original = """SELECT * FROM parts WHERE """
filter = filter_original
if part_name:
print('part name: ' + part_name)
if filter == filter_original:
filter += """part_name LIKE ? """
else:
filter += """AND part_name LIKE ? """
if part_series:
print('part series: ' + part_series)
if filter == filter_original:
filter += """part_series=(?) """
else:
filter += """AND part_series=(?) """
if part_number:
print('part number: ' + part_number)
if filter == filter_original:
filter += """part_number LIKE ? """ ### DONT USE LIKE???
else:
filter += """AND part_number LIKE ? """ ### DONT USE LIKE???
if part_size:
print('part size: ' + part_size)
if filter == filter_original:
filter += """part_size=(?) """
else:
filter += """AND part_size=(?) """
if fit:
print('job type: ' + str(fit))
if filter == filter_original:
filter += """job_type = fit """
else:
filter += """AND job_type = fit """
if weld:
print('job type: ' + str(weld))
if filter == filter_original:
filter += """job_type = weld """
else:
filter += """AND job_type = weld """
if assemble:
print('job type: ' + str(assemble))
if filter == filter_original:
filter += """job_type = assemble"""
else:
filter += """AND job_type = assemble"""
print(filter)
#if filter != filter_original:
#c.execute(filter, ())
#else:
#c.execute("""SELECT * FROM parts""")
main_gui_parts()
THE PROBLEM: The commented code at the bottom is where I'm having trouble figuring out (in the "part_search" method). I don't use all of the variables all the time. Only filter with the variables provided by the user. which means the tuple should only have the variables which was input by the user.
If all the variables were used this is what it would look like. c.execute(filter, (part_name, part_series, part_number, part_size, fit, weld, assemble)) but more often than not only some of those variable will have been used and may need to look like this instead. c.execute(filter, (part_name, part_series, weld)) Somehow I need the variables here to be removeable(for lack of better word)
I've been learning a lot about SQLite but I could be seeing tunnel vision and can't think of a different way to go about this.
Probably the easiest way to deal with this is to put all the filter conditions and values into lists, and then only add a WHERE clause if the length of the filters list is non-zero. For example:
query = """SELECT * FROM parts"""
filters = []
values = []
if part_name:
filters.append("""part_name LIKE ?""")
values.append(part_name)
...
if len(filters):
query += ' WHERE ' + ' AND '.join(filters)
c.execute(query, tuple(values))
Note: should your filters ever include OR conditions, you need to parenthesise them when building the query to ensure correct operation i.e.
query += ' WHERE (' + ') AND ('.join(filters) + ')'

python peewee dynamically or + and clauses

I'd like to do a and clause with two lists of multiple or clauses from the same table.
The problem with the following code is, that the query result is empty. If I just select 'indices' or 'brokers', the result is fine.
...
query = query.join(StockGroupTicker, on=(Ticker.id == StockGroupTicker.ticker))
# indices
if "indices" in filter:
where_indices = []
for f in filter["indices"]:
where_indices.append(StockGroupTicker.stock_index == int(f))
if len(where_indices):
query = query.where(peewee.reduce(peewee.operator.or_, where_indices))
# broker
if "brokers" in filter:
where_broker = []
for f in filter["brokers"]:
where_broker.append(StockGroupTicker.stock_index == int(f))
if len(where_broker):
query = query.where(peewee.reduce(peewee.operator.or_, where_broker))
return query.distinct()
SQL Querie (update)
# index and brocker
SELECT
DISTINCT `t1`.`id`,
`t1`.`symbol`,
`t1`.`type`,
`t1`.`name`,
`t1`.`sector`,
`t1`.`region`,
`t1`.`primary_exchange`,
`t1`.`currency`,
`t1`.`score`,
`t1`.`last_price`,
`t1`.`last_price_date`,
`t1`.`last_price_check`,
`t1`.`last_stock_split`,
`t1`.`next_earning`,
`t1`.`last_earnings_update`,
`t1`.`disused`,
`t1`.`source`,
`t1`.`source_intraday`,
`t1`.`created`,
`t1`.`modified`,
`t2`.`invest_score` AS `invest_score`
FROM
`ticker` AS `t1`
INNER JOIN `tickerstats` AS `t2` ON
(`t1`.`id` = `t2`.`ticker_id`)
INNER JOIN `stockgroupticker` AS `t3` ON
(`t1`.`id` = `t3`.`ticker_id`)
WHERE
(((((`t1`.`disused` IS NULL)
OR (`t1`.`disused` = 0))
AND (`t2`.`volume_mean_5` > 10000.0))
AND (`t3`.`stock_index_id` = 1))
AND (`t3`.`stock_index_id` = 10)
)
Thanks to #coleifer, the peewee solution is quite simple. I had to use an alias.
if "indices" in filter and filter["indices"]:
query = query.join(
StockGroupTicker, peewee.JOIN.INNER, on=(Ticker.id == StockGroupTicker.ticker)
)
where_indices = []
for f in filter["indices"]:
where_indices.append(StockGroupTicker.stock_index == int(f))
if len(where_indices):
query = query.where(peewee.reduce(peewee.operator.or_, where_indices))
if "brokers" in filter and filter["brokers"]:
BrokerGroupTicker = StockGroupTicker.alias()
query = query.join(
BrokerGroupTicker, peewee.JOIN.INNER, on=(Ticker.id == BrokerGroupTicker.ticker)
)
where_broker = []
for f in filter["brokers"]:
where_broker.append(BrokerGroupTicker.stock_index == int(f))
if len(where_broker):
query = query.where(peewee.reduce(peewee.operator.or_, where_broker))
return query.distinct()

Combine ~Q and F in Django?

Query
Balance.objects.filter(~Q(fax_date=F('paused_date')))
returns empty qs even though I do have objects that fit the condition "fax date field not equal to paused date". Is it possible to use ~Q and F together like that?
ran a test like this:
deals = Deal.objects.all()
balance_pre = Balance.objects.filter(~Q(fax_date=F('paused_date')), fax_date__isnull=False, reserved=False)
agr_nums = list(deals.filter(agr_name__isnull=False).values_list('agr_name', flat=True).distinct())
agrs_with_fax = 0
for agr_num in agr_nums:
try:
balance_agr = Balance.objects.get(number__icontains=agr_num)
if balance_agr.fax_date is not None and balance_agr.fax_date != balance_agr.paused_date and not balance_agr.reserved:
agrs_with_fax += 1
except Balance.DoesNotExist:
pass
agrs_with_fax2 = 0
for agr_num in agr_nums:
try:
balance_pre.get(number__icontains=agr_num)
agrs_with_fax2 += 1
except Balance.DoesNotExist:
pass
r = [agrs_with_fax, agrs_with_fax2, balance_agr.fax_date, balance_agr.paused_date, balance_agr.reserved]
r returned is
[55, 0, datetime.date(2018, 7, 11), None, False]
I don't see my error, both cycles should return same result.
I created a Balance model in a fresh project just to test that print(qs.query) will show you the generated query(not in all cases) in this case. I used also exclude as #daniel-roseman suggested to prove that they were equivalent. I hope this help you.
>>> from django.db.models import F, Q
>>> qs = Balance.objects.filter(~Q(fax_date=F('paused_date')))
>>> print(qs.query)
SELECT "so_balance"."id", "so_balance"."fax_date", "so_balance"."paused_date"
FROM "so_balance" WHERE NOT ("so_balance"."fax_date" =
("so_balance"."paused_date"))
>>> qs = Balance.objects.exclude(fax_date=F('paused_date'))
>>> print(qs.query)
SELECT "so_balance"."id", "so_balance"."fax_date", "so_balance"."paused_date"
FROM "so_balance" WHERE NOT ("so_balance"."fax_date" =
("so_balance"."paused_date"))

Build a dynamic update query in psycopg2

I have to construct a dynamic update query for postgresql.
Its dynamic, because beforehand I have to determine which columns to update.
Given a sample table:
create table foo (id int, a int, b int, c int)
Then I will construct programmatically the "set" clause
_set = {}
_set['a'] = 10
_set['c'] = NULL
After that I have to build the update query. And here I'm stuck.
I have to construct this sql Update command:
update foo set a = 10, b = NULL where id = 1
How to do this with the psycopg2 parametrized command? (i.e. looping through the dict if it is not empty and build the set clause) ?
UPDATE
While I was sleeping I have found the solution by myself. It is dynamic, exactly how I wanted to be :-)
create table foo (id integer, a integer, b integer, c varchar)
updates = {}
updates['a'] = 10
updates['b'] = None
updates['c'] = 'blah blah blah'
sql = "upgrade foo set %s where id = %s" % (', '.join("%s = %%s" % u for u in updates.keys()), 10)
params = updates.values()
print cur.mogrify(sql, params)
cur.execute(sql, params)
And the result is what and how I needed (especially the nullable and quotable columns):
"upgrade foo set a = 10, c = 'blah blah blah', b = NULL where id = 10"
There is actually a slightly cleaner way to make it, using the alternative column-list syntax:
sql_template = "UPDATE foo SET ({}) = %s WHERE id = {}"
sql = sql_template.format(', '.join(updates.keys()), 10)
params = (tuple(addr_dict.values()),)
print cur.mogrify(sql, params)
cur.execute(sql, params)
Using psycopg2.sql – SQL string composition module
The module contains objects and functions useful to generate SQL dynamically, in a convenient and safe way.
from psycopg2 import connect, sql
conn = connect("dbname=test user=postgres")
upd = {'name': 'Peter', 'age': 35, 'city': 'London'}
ref_id = 12
sql_query = sql.SQL("UPDATE people SET {data} WHERE id = {id}").format(
data=sql.SQL(', ').join(
sql.Composed([sql.Identifier(k), sql.SQL(" = "), sql.Placeholder(k)]) for k in upd.keys()
),
id=sql.Placeholder('id')
)
upd.update(id=ref_id)
with conn:
with conn.cursor() as cur:
cur.execute(sql_query, upd)
conn.close()
Running print(sql_query.as_string(conn)) before closing connection will reveal this output:
UPDATE people SET "name" = %(name)s, "age" = %(age)s, "city" = %(city)s WHERE id = %(id)s
No need for dynamic SQL. Supposing a is not nullable and b is nullable.
If you want to update both a and b:
_set = dict(
id = 1,
a = 10,
b = 20, b_update = 1
)
update = """
update foo
set
a = coalesce(%(a)s, a), -- a is not nullable
b = (array[b, %(b)s])[%(b_update)s + 1] -- b is nullable
where id = %(id)s
"""
print cur.mogrify(update, _set)
cur.execute(update, _set)
Output:
update foo
set
a = coalesce(10, a), -- a is not nullable
b = (array[b, 20])[1 + 1] -- b is nullable
where id = 1
If you want to update none:
_set = dict(
id = 1,
a = None,
b = 20, b_update = 0
)
Output:
update foo
set
a = coalesce(NULL, a), -- a is not nullable
b = (array[b, 20])[0 + 1] -- b is nullable
where id = 1
An option without python format using psycopg2's AsIs function for column names (although that doesn't prevent you from SQL injection over column names). Dict is named data:
update_statement = f'UPDATE foo SET (%s) = %s WHERE id_column=%s'
columns = data.keys()
values = [data[column] for column in columns]
query = cur.mogrify(update_statement, (AsIs(','.join(columns)), tuple(values), id_value))
Here's my solution that I have within a generic DatabaseHandler class that provides a lot of flexibility when using pd.DataFrame as your source.
def update_data(
self,
table: str,
df: pd.DataFrame,
indexes: Optional[list] = None,
column_map: Optional[dict] = None,
commit: Optional[bool] = False,
) -> int:
"""Update data in the media database
Args:
table (str): the "tablename" or "namespace.tablename"
df (pandas.DataFrame): dataframe containing the data to update
indexes (list): the list of columns in the table that will be in the WHERE clause of the update statement.
If not provided, will use df indexes.
column_map (dict): dictionary mapping the columns in df to the columns in the table
columns in the column_map that are also in keys will not be updated
Key = df column.
Value = table column.
commit (bool): if True, the transaction will be committed (default=False)
Notes:
If using a column_map, only the columns in the data_map will be updated or used as indexes.
Order does not matter. If not using a column_map, all columns in df must exist in table.
Returns:
int : rows updated
"""
try:
if not indexes:
# Use the dataframe index instead
indexes = []
for c in df.index.names:
if not c:
raise Exception(
f"Dataframe contains indexes without names. Unable to determine update where clause."
)
indexes.append(c)
update_strings = []
tdf = df.reset_index()
if column_map:
target_columns = [c for c in column_map.keys() if c not in indexes]
else:
column_map = {c: c for c in tdf.columns}
target_columns = [c for c in df.columns if c not in indexes]
for i, r in tdf.iterrows():
upd_params = ", ".join(
[f"{column_map[c]} = %s" for c in target_columns]
)
upd_list = [r[c] if pd.notna(r[c]) else None for c in target_columns]
upd_str = self._cur.mogrify(upd_params, upd_list).decode("utf-8")
idx_params = " AND ".join([f"{column_map[c]} = %s" for c in indexes])
idx_list = [r[c] if pd.notna(r[c]) else None for c in indexes]
idx_str = self._cur.mogrify(idx_params, idx_list).decode("utf-8")
update_strings.append(f"UPDATE {table} SET {upd_str} WHERE {idx_str};")
full_update_string = "\n".join(update_strings)
print(full_update_string) # Debugging
self._cur.execute(full_update_string)
rowcount = self._cur.rowcount
if commit:
self.commit()
return rowcount
except Exception as e:
self.rollback()
raise e
Example usages:
>>> df = pd.DataFrame([
{'a':1,'b':'asdf','c':datetime.datetime.now()},
{'a':2,'b':'jklm','c':datetime.datetime.now()}
])
>>> cls.update_data('my_table', df, indexes = ['a'])
UPDATE my_table SET b = 'asdf', c = '2023-01-17T22:13:37.095245'::timestamp WHERE a = 1;
UPDATE my_table SET b = 'jklm', c = '2023-01-17T22:13:37.095250'::timestamp WHERE a = 2;
>>> cls.update_data('my_table', df, indexes = ['a','b'])
UPDATE my_table SET c = '2023-01-17T22:13:37.095245'::timestamp WHERE a = 1 AND b = 'asdf';
UPDATE my_table SET c = '2023-01-17T22:13:37.095250'::timestamp WHERE a = 2 AND b = 'jklm';
>>> cls.update_data('my_table', df.set_index('a'), column_map={'a':'db_a','b':'db_b','c':'db_c'} )
UPDATE my_table SET db_b = 'asdf', db_c = '2023-01-17T22:13:37.095245'::timestamp WHERE db_a = 1;
UPDATE my_table SET db_b = 'jklm', db_c = '2023-01-17T22:13:37.095250'::timestamp WHERE db_a = 2;
Note however that this is not safe from SQL injection due to the way it generates the where clause.

What Should Be In My Return?

I am using Python to parse an XML response from a SOAP web-service. The Customer returns about 40 values as you can see below. I would like to know if there is a way to make it so I only have to type one thing into my return statement and get all of the values returned? I tried to use for customer in doc.findall('.//Customer').itervalues() and that did not work as I believe that call is for dictionaries. Same results and reasoning behind .iteritems.
doc = ET.fromstring(response_xml)
for customer in doc.findall('.//Customer'):
customer_number = customer.findtext('CustomerNumber')
customer_first_name = customer.findtext('FirstName')
customer_last_name = customer.findtext('LastName')
customer_middle_name = customer.findtext('MiddleName')
customer_salutation = customer.findtext('Salutation')
customer_gender = customer.findtext('Gender')
customer_language = customer.findtext('Language')
customer_address1 = customer.findtext('Address1')
customer_address2 = customer.findtext('Address2')
customer_address3 = customer.findtext('Address3')
customer_city = customer.findtext('City')
customer_county = customer.findtext('County')
customer_state_code = customer.findtext('StateCode')
customer_zip_code = customer.findtext('ZipCode')
customer_phone_number = customer.findtext('PhoneNumber')
customer_business_phone = customer.findtext('BusinessPhone')
customer_business_ext = customer.findtext('BusinessExt')
customer_fax_number = customer.findtext('FaxNumber')
customer_birth_date = customer.findtext('BirthDate')
customer_drivers_license = customer.findtext('DriversLicense')
customer_contact = customer.findtext('Contact')
customer_preferred_contact = customer.findtext('PreferredContact')
customer_mail_code = customer.findtext('MailCode')
customer_tax_exempt_Number = customer.findtext('TaxExmptNumber')
customer_assigned_salesperson = customer.findtext('AssignedSalesperson')
customer_type = customer.findtext('CustomerType')
customer_preferred_phone = customer.findtext('PreferredPhone')
customer_cell_phone = customer.findtext('CellPhone')
customer_page_phone = customer.findtext('PagePhone')
customer_other_phone = customer.findtext('OtherPhone')
customer_other_phone_desc = customer.findtext('OtherPhoneDesc')
customer_email1 = customer.findtext('Email1')
customer_email2 = customer.findtext('Email2')
customer_optional_field = customer.findtext('OptionalField')
customer_allow_contact_postal = customer.findtext('AllowContactByPostal')
customer_allow_contact_phone = customer.findtext('AllowContactByPhone')
customer_allow_contact_email = customer.findtext('AllowContactByEmail')
customer_business_phone_ext = customer.findtext('BusinessPhoneExtension')
customer_internatinol_bus_phone = customer.findtext('InternationalBusinessPhone')
customer_international_cell = customer.findtext('InternationalCellPhone')
customer_external_x_reference_key = customer.findtext('ExternalCrossReferenceKey')
customer_international_fax = customer.findtext('InternationalFaxNumber')
customer_international_other_phone = customer.findtext('InternationalOtherPhone')
customer_international_home_phone = customer.findtext('InternationalHomePhone')
customer_preferred_name = customer.findtext('CustomerPreferredName')
customer_international_pager = customer.findtext('InternationalPagerPhone')
customer_preferred_lang = customer.findtext('PreferredLanguage')
customer_last_change_date = customer.findtext('LastChangeDate')
customer_vehicles = customer.findtext('Vehicles')
customer_ccid = customer.findtext('CCID')
customer_cccd = customer.findtext('CCCD')
webservice.close()
return
I would write that as a generator function yielding dicts where the key matches the findtext argument, e.g.:
fields = ['CustomerNumber', 'FirstName', 'LastName',
# ...
]
for customer in doc.findall('.//Customer'):
yield dict((f, customer.findtext(f)) for f in fields)
You either want to return a list of dicts:
customers = []
for customer in doc.findall('.//Customer'):
customer_dict = {}
customer_dict['number'] = customer.findtext('CustomerNumber')
customer_dict['first_name'] = customer.findtext('FirstName')
customer_dict['last_name'] = customer.findtext('LastName')
# ad nauseum
customers.append(customer_dict)
webservice.close()
return customers
Or you make a Customer class that handles this, and you return a list of customer instances.
I would use a dictionary of dictionaries:
doc = ET.fromstring(response_xml)
customers = {}
cust_dict = {}
for customer in doc.findall('.//Customer'):
cust_dict['customer_number'] = customer.findtext('CustomerNumber')
cust_dict['customer_first_name'] = customer.findtext('FirstName')
cust_dict['customer_last_name'] = customer.findtext('LastName')
snip snip...
customers[customer_number] = cust_dict # or whatever property you want to use to identify each customer, I'm assuming customer_number is some sort of id number
webservice.close()
return customers
That is if you don't have a class you can use to create a Customer object.

Categories

Resources