Sawtooth Transaction error: "Tried to set unauthorized address" - python

I am trying to write my custom Transaction processor. I am writing for simple Account class
class Account:
def __init__(self, name, ac_number, balance):
self.name = name
self.ac_number = ac_number
self.balance = balance
My TP is working fine for a single account. Now I want to improve it for multiple accounts. To get a different state for each account number I have changed _'_get_account_address_' function. I am following #danintel 's Cookiejar and XO_python projects. I am following xo code to get the address.
AC_NAMESPACE = hashlib.sha512('account'.encode("utf-8")).hexdigest()[0:6]
def _make_account_address(name):
return AC_NAMESPACE + \
hashlib.sha512(name.encode('utf-8')).hexdigest()[:64]
_get_account_address is working fine but _make_account_address showing error in cli
Tried to set unauthorized address
My state code is
import logging
import hashlib
from sawtooth_sdk.processor.exceptions import InternalError
LOGGER = logging.getLogger(__name__)
FAMILY_NAME = "account"
# TF Prefix is first 6 characters of SHA-512("cookiejar"), a4d219
AC_NAMESPACE = hashlib.sha512('account'.encode("utf-8")).hexdigest()[0:6]
def _make_account_address(name):
return AC_NAMESPACE + \
hashlib.sha512(name.encode('utf-8')).hexdigest()[:64]
def _hash(data):
'''Compute the SHA-512 hash and return the result as hex characters.'''
return hashlib.sha512(data).hexdigest()
def _get_account_address(from_key):
'''
Return the address of a cookiejar object from the cookiejar TF.
The address is the first 6 hex characters from the hash SHA-512(TF name),
plus the result of the hash SHA-512(cookiejar public key).
'''
return _hash(FAMILY_NAME.encode('utf-8'))[0:6] + \
_hash(from_key.encode('utf-8'))[0:64]
class Account:
def __init__(self, name, ac_number, balance):
self.name = name
self.ac_number = ac_number
self.balance = balance
class AccountState:
def __init__(self, context):
self._context = context
def make_account(self, account_obj, from_key):
'''Bake (add) "amount" cookies.'''
account_address = _make_account_address(account_obj.name) # not working
#account_address = _get_account_address(from_key) # working fine
LOGGER.info('Got the key %s and the account address %s.',
from_key, account_address)
state_str = ",".join([str(account_obj.name), str(account_obj.ac_number), str(account_obj.balance)])
state_data = state_str.encode('utf-8')
addresses = self._context.set_state({account_address: state_data})
if len(addresses) < 1:
raise InternalError("State Error")

This probably has been answered already, but I've lesser credits to add a comment.
The error you see "Tried to set unauthorized address: " is because client did not include these addresses in TransactionHeader's "outputs" addresses field.
It is possible for client to give prefix instead of complete address in "outputs" addresses field, but make use of this feature cautiously because it'll impact parallel transaction scheduling.
Please refer to https://sawtooth.hyperledger.org/docs/core/nightly/master/architecture/transactions_and_batches.html#dependencies-and-input-output-addresses for detailed understanding on different fields when composing TransactionHeader.

It means a the transaction processor tried to set (put) a value not in the list of outputs. This occurs when a client submits a transaction with an inaccurate list of inputs/outputs.
Make sure the Sawtooth address is the correct length--the address is 70 hex characters, which represent a 35 byte address (including the 6 hex character or 3 byte Transaction Family prefix).
Also, you can set the outputs list to empty--that will allow all addresses to be written (at the expense of security and efficiency). It is better to set the inputs and outputs to the state addresses you are changing--that allows transactions to be ran parallel (if you run sawtooth-validator --scheduler parallel -vv ) and is more safe and secure as the transaction processor cannot write to state addresses outside the list.

I had this issue as well. I realized that I had different prefixs to my address. Make sure they match!!

Related

Slack API not showing all private channels a token belongs to

I need to get a list of all the public and private Slack channels a particular user token ("xoxp...") belongs to. The problem is that currently the api is returning only some of the private channels, not all of them (but it is returning some). It used to return all, but now some are missing (that used to be found by the user before this). This started happening sometime after March (the last time I queried the API).
I tried:
Creating a new private channel and adding the user to it to see if it saw that => it does
Removing the user from the channel it doesn't see when api calling and re-adding the user to the channel => issue remains
Reinstalling the app to the workspace => issue remains
The workspace only has about 100 channels, including deprecated ones, so I know I'm not hitting the limit.
Here is my code (in Python):
def _getChannels(self, _next_cursor=""):
""" Needs scope channels:read
Archived channels are included by default.
INCLUDES private channels the calling user (person whose token is being used) has access to
"""
kwargs = {"limit":1000,
"types":"public_channel,private_channel"}
if _next_cursor:
kwargs["cursor"] = _next_cursor
results_full = self._callApi("conversations.list", "_getChannels()", kwargs)
results = results_full["channels"]
next_cursor = results_full["response_metadata"]["next_cursor"]
if next_cursor: # Isn't empty
results = results + self._getChannels(next_cursor)
return results
def _callApi(self, call_type, calling_function, kwargs={}):
""" calling_function is a string for error message reporting """
# New API can't handle booleans or extra params
pass_error_through = kwargs.get(self.PASS_ERROR_THROUGH, False)
if self.PASS_ERROR_THROUGH in kwargs:
kwargs.pop(self.PASS_ERROR_THROUGH)
for key in kwargs:
if type(kwargs[key]) == bool:
kwargs[key] = str(kwargs[key]).lower()
# New api raises exceptions instead of returning error, so need to catch
try:
result = self._slack_client.api_call(call_type, params=kwargs)
except Exception as E:
result = str(E) # You used to be able to just call result["error"]
if "error" in result:
if "ratelimited" in result:
print("\nRatelimited. Waiting one min before retrying.", flush=True)
sleep(60)
return self._callApi(call_type, calling_function, kwargs)
elif not pass_error_through:
error_message = ("ERROR: Call to " + calling_function +
" failed due to " + result + ". " +
"\n\nkwargs: " + str(kwargs) + "\n\n--End Message--")
#if "needed" in result:
# error_message += "It needs: " + result["needed"]
print() # To provide spacing before the traceback starts
raise ValueError(error_message)
return result
Did you try scope groups:read for private channels?
https://api.slack.com/methods/conversations.list
Documentation says:
Information about required scopes
This Conversations API method's required scopes depend on the type of channel-like object you're working with. To use the method, you'll need at least one of the channels:, groups:, im: or mpim: scopes corresponding to the conversation type you're working with.

Pythonic/efficient way to check organisation of public IP addresses

I have a DataFrame common_ips containing IPs as shown below.
I need to achieve two basic tasks:
Identify private and public IPs.
Check organisation for public IPs.
Here is what I am doing:
import json
import urllib
import re
baseurl = 'http://ipinfo.io/' # no HTTPS supported (at least: not without a plan)
def isIPpublic(ipaddress):
return not isIPprivate(ipaddress)
def isIPprivate(ipaddress):
if ipaddress.startswith("::ffff:"):
ipaddress=ipaddress.replace("::ffff:", "")
# IPv4 Regexp from https://stackoverflow.com/questions/30674845/
if re.search(r"^(?:10|127|172\.(?:1[6-9]|2[0-9]|3[01])|192\.168)\..*", ipaddress):
# Yes, so match, so a local or RFC1918 IPv4 address
return True
if ipaddress == "::1":
# Yes, IPv6 localhost
return True
return False
def getipInfo(ipaddress):
url = '%s%s/json' % (baseurl, ipaddress)
try:
urlresult = urllib.request.urlopen(url)
jsonresult = urlresult.read() # get the JSON
parsedjson = json.loads(jsonresult) # put parsed JSON into dictionary
return parsedjson
except:
return None
def checkIP(ipaddress):
if (isIPpublic(ipaddress)):
if bool(getipInfo(ipaddress)):
if 'bogon' in getipInfo(ipaddress).keys():
return 'Private IP'
elif bool(getipInfo(ipaddress).get('org')):
return getipInfo(ipaddress)['org']
else:
return 'No organization data'
else:
return 'No data available'
else:
return 'Private IP'
And applying it to my common_ips DataFrame with
common_ips['Info'] = common_ips.IP.apply(checkIP)
But it's taking longer than I expected. And for some IPs, it's giving incorrect Info.
For instance:
where it should have been AS19902 Department of Administrative Services as I cross-checked it by
and
What am I missing here ? And how can I achieve these tasks in a more Pythonic way ?
A blanket except: is basically always a bug. You are returning None instead of handling any anomalous or error response from the server, and of course the rest of your code has no way to recover.
As a first debugging step, simply take out the try/except handling. Maybe then you can find a way to put back a somewhat more detailed error handler for some cases which you know how to recover from.
def getipInfo(ipaddress):
url = '%s%s/json' % (baseurl, ipaddress)
urlresult = urllib.request.urlopen(url)
jsonresult = urlresult.read() # get the JSON
parsedjson = json.loads(jsonresult) # put parsed JSON into dictionary
return parsedjson
Perhaps the calling code in checkIP should have a try/except instead, and e.g. retry after sleeping for a bit if the server indicates that you are going too fast.
(In the absence of an authorization token, it looks like you are using the free version of this service, which is probably not in any way guaranteed anyway. Also maybe look at using their recommended library -- I haven't looked at it in more detail, but I would imagine it at the very least knows better how to behave in the case of a server-side error. It's almost certainly also more Pythonic, at least in the sense that you should not reinvent things which already exist.)

How do I connect dbus and policykit to my function in python?

I am making a python application that has a method needing root privileges. From https://www.freedesktop.org/software/polkit/docs/0.105/polkit-apps.html, I found Example 2. Accessing the Authority via D-Bus which is the python version of the code below, I executed it and I thought I'd be able to get root privileges after entering my password but I'm still getting "permission denied" on my app. This is the function I'm trying to connect
import dbus
bus = dbus.SystemBus()
proxy = bus.get_object('org.freedesktop.PolicyKit1', '/org/freedesktop/PolicyKit1/Authority')
authority = dbus.Interface(proxy, dbus_interface='org.freedesktop.PolicyKit1.Authority')
system_bus_name = bus.get_unique_name()
subject = ('system-bus-name', {'name' : system_bus_name})
action_id = 'org.freedesktop.policykit.exec'
details = {}
flags = 1 # AllowUserInteraction flag
cancellation_id = '' # No cancellation id
result = authority.CheckAuthorization(subject, action_id, details, flags, cancellation_id)
print result
In the python code you quoted, does result indicate success or failure? If it fails, you need to narrow down the error by first of all finding out what the return values of bus, proxy, authority and system_bus_name are. If it succeeds, you need to check how you are using the result.

why use the self (reference?) in a "variable"?

I started studying / writing Twisted network programming and I came across with the following code:
def handle_REGISTER(self, name):
if name in self.factory.users:
self.sendLine("Name taken, please choose another.")
return
self.sendLine("Welcome, %s!" % (name,))
self.broadcastMessage("%s has joined the channel." % (name,))
self.name = name
self.factory.users[name] = self
self.state = "CHAT"
def handle_CHAT(self, message):
message = "<%s> %s" % (self.name, message)
self.broadcastMessage(message)
def broadcastMessage(self, message):
for name, protocol in self.factory.users.iteritems():
if protocol != self:
protocol.sendLine(message)
what the benefits from self.x[y]=self?
self.factory.users is a shared mapping; each and every instance of this class can access it. It is a central registry of connection instances, if you will. The connection itself is made responsible for registering itself.
By storing references to all the per-user instances in self.factory.users you can then send messages to all users, in the broadcastMessage method:
for name, protocol in self.factory.users.iteritems():
if protocol != self:
protocol.sendLine(message)
This loops over all registered instances, and calls sendLine() on each and every other connection.
The code uses the self-reference in two ways:
To determine if a name in the chatroom is already taken
To send everyone else a message (i.e. to prevent to send a copy of the message to the user who wrote it).
To achieve #2, they iterate over all items in the dict self.factory.users. The keys are users in the chatroom. Values are instances of the chat.
When protocol != self, then the code has found an instance which doesn't belong to the current user.

Python-ldap search: Size Limit Exceeded

I'm using the python-ldap library to connect to our LDAP server and run queries. The issue I'm running into is that despite setting a size limit on the search, I keep getting SIZELIMIT_EXCEEDED errors on any query that would return too many results. I know that the query itself is working because I will get a result if the query returns a small subset of users. Even if I set the size limit to something absurd, like 1, I'll still get a SIZELIMIT_EXCEEDED on those bigger queries. I've pasted a generic version of my query below. Any ideas as to what I'm doing wrong here?
result = self.ldap.search_ext_s(self.base, self.scope, '(personFirstMiddle=<value>*)', sizelimit=5)
When the LDAP client requests a size-limit, that is called a 'client-requested' size limit. A client-requested size limit cannot override the size-limit set by the server. The server may set a size-limit for the server as a whole, for a particular authorization identity, or for other reasons - whichever the case, the client may not override the server size limit. The search request may have to be issued in multiple parts using the simple paged results control or the virtual list view control.
Here's a Python3 implementation that I came up with after heavily editing what I found here and in the official documentation. At the time of writing this it works with the pip3 package python-ldap version 3.2.0.
def get_list_of_ldap_users():
hostname = "google.com"
username = "username_here"
password = "password_here"
base = "dc=google,dc=com"
print(f"Connecting to the LDAP server at '{hostname}'...")
connect = ldap.initialize(f"ldap://{hostname}")
connect.set_option(ldap.OPT_REFERRALS, 0)
connect.simple_bind_s(username, password)
connect=ldap_server
search_flt = "(personFirstMiddle=<value>*)" # get all users with a specific middle name
page_size = 500 # how many users to search for in each page, this depends on the server maximum setting (default is 1000)
searchreq_attrlist=["cn", "sn", "name", "userPrincipalName"] # change these to the attributes you care about
req_ctrl = SimplePagedResultsControl(criticality=True, size=page_size, cookie='')
msgid = connect.search_ext(base=base, scope=ldap.SCOPE_SUBTREE, filterstr=search_flt, attrlist=searchreq_attrlist, serverctrls=[req_ctrl])
total_results = []
pages = 0
while True: # loop over all of the pages using the same cookie, otherwise the search will fail
pages += 1
rtype, rdata, rmsgid, serverctrls = connect.result3(msgid)
for user in rdata:
total_results.append(user)
pctrls = [c for c in serverctrls if c.controlType == SimplePagedResultsControl.controlType]
if pctrls:
if pctrls[0].cookie: # Copy cookie from response control to request control
req_ctrl.cookie = pctrls[0].cookie
msgid = connect.search_ext(base=base, scope=ldap.SCOPE_SUBTREE, filterstr=search_flt, attrlist=searchreq_attrlist, serverctrls=[req_ctrl])
else:
break
else:
break
return total_results
This will return a list of all users but you can edit it as required to return what you want without hitting the SIZELIMIT_EXCEEDED issue :)

Categories

Resources