I got something really weird going on with Tornado
from tornado.web import Application
from tornado.testing import AsyncHTTPTestCase
import requests
import tornado
from tornado.ioloop import IOLoop
class MyTest(AsyncHTTPTestCase):
def get_app(self):
return Application()
#tornado.testing.gen_test(timeout=30)
async def test_pass(self):
response = await IOLoop.current().run_in_executor(None, requests.get,"http://127.0.0.1:" + str(self.get_http_port()) + "/foo")
print(response)
#tornado.testing.gen_test(timeout=30)
async def test_fail(self):
response = await IOLoop.current().run_in_executor(None, requests.get,"http://192.168.2.1:" + str(self.get_http_port()) + "/foo")
print(response)
The first test passes while the second one fails with
_____________________________________________________________ MyTest.test_fail _____________________________________________________________
self = <urllib3.connection.HTTPConnection object at 0x7fea40367e50>
def _new_conn(self):
"""Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = connection.create_connection(
> (self._dns_host, self.port), self.timeout, **extra_kw
)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/urllib3/connection.py:170:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
address = ('192.168.2.1', 41809), timeout = None, source_address = None, socket_options = [(6, 1, 1)]
def create_connection(
address,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None,
socket_options=None,
):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`socket.getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith("["):
host = host.strip("[]")
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
# us select whether to work with IPv4 DNS records, IPv6 records, or both.
# The original create_connection function always returns all records.
family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
return six.raise_from(
LocationParseError(u"'%s', label empty or too long" % host), None
)
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
> raise err
../.virtualenvs/creative-preview/lib/python3.7/site-packages/urllib3/util/connection.py:96:
...
During handling of the above exception, another exception occurred:
self = <my_test.MyTest testMethod=test_fail>
#tornado.testing.gen_test(timeout=30)
async def test_fail(self):
> response = await IOLoop.current().run_in_executor(None, requests.get,"http://192.168.2.1:" + str(self.get_http_port()) + "/foo")
my_test.py:18:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.7/concurrent/futures/thread.py:57: in run
result = self.fn(*self.args, **self.kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/api.py:76: in get
return request('get', url, params=params, **kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/api.py:61: in request
return session.request(method=method, url=url, **kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/sessions.py:542: in request
resp = self.send(prep, **send_kwargs)
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/sessions.py:655: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7fea4034dbd0>, request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True, cert = None, proxies = OrderedDict()
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
if isinstance(e.reason, _SSLError):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
> raise ConnectionError(e, request=request)
E requests.exceptions.ConnectionError: HTTPConnectionPool(host='192.168.2.1', port=41809): Max retries exceeded with url: /foo (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fea40367e50>: Failed to establish a new connection: [Errno 111] Connection refused'))
../.virtualenvs/creative-preview/lib/python3.7/site-packages/requests/adapters.py:516: ConnectionError
==================================================== 1 failed, 1 passed in 0.22 seconds ==================================================
192.168.2.1 is localhost as well (through docker0 interface). The weird thing is that if I run a simple http server python -m http.server then curl 127.0.0.1:8000 and curl 192.168.2.1:8000 both work, so it doesn't seem to be a docker related issue. So I really don't know what's going on
can you try curl -v 192.168.2.1:8000 (verbose) and check the output?
maybe you're redirected to localhost and it's not happened in your code.
What's your app listening host is? localhost or 0.0.0.0?
If you want to be able to connect from the local machine you need to ensure you're listening on 0.0.0.0.
I am trying to run my automation test case (Written in Python) on the Selenium Grid (created with the docker-compose.yml file). When I run the "docker-compose up -d" command, the Grid opens up successfully and works as expected. However, it keeps throwing the TimeoutException: Message when I run the test.py file (I am using a corporate laptop connected via VPN issued by my organization).If I run the test.py file without the DesiredCapabilities , it works absolutely fine. I am using the Selenium grid/hub for the first time on my system (Windows 10).
I have tried my level best to search for the answer on google & stackoverflow, yet, here I am! Could you please help me with your feedback as where am I making the mistake?
test.py
import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
class TestLogin(unittest.TestCase):
def setUp(self):
base_url = "http://newtours.demoaut.com/mercurywelcome.php"
host = "http://localhost:4444/wd/hub"
caps = DesiredCapabilities.CHROME.copy()
self.driver = webdriver.Remote(command_executor=host, desired_capabilities=caps)
self.driver.maximize_window()
self.driver.get(base_url)
def test_login(self):
wait = WebDriverWait(self.driver, 10)
user_name = wait.until(ec.presence_of_element_located((By.NAME, "userName")))
user_name.send_keys('ralphsin')
print("Entered UserId")
time.sleep(2)
password = wait.until(ec.presence_of_element_located((By.NAME, "password")))
password.send_keys('abc123')
print("Entered Password")
time.sleep(2)
log_in = wait.until(ec.presence_of_element_located((By.XPATH, "//input[#alt='Sign-In']")))
log_in.click()
print("Clicked on Sign-In button!")
time.sleep(2)
def tearDown(self):
self.driver.quit()
docker-compose.yml
version: '3'
services:
hub:
image: selenium/hub:3.141.59
ports:
- 4444:4444
chrome:
image: selenium/node-chrome:3.141.59
depends_on:
- hub
environment:
- HUB_HOST=hub
- HUB_PORT=4444
firefox:
image: selenium/node-firefox:3.141.59
depends_on:
- hub
environment:
- HUB_HOST=hub
- HUB_PORT=4444
Error Message
C:\Users\rsingh99\Desktop\WebDevelopment_and_Automation\Selenium_Projects\test_selenium>pytest -v test.py
=========================================================================== test session starts ===========================================================================
platform win32 -- Python 3.6.1, pytest-5.0.1, py-1.8.0, pluggy-0.12.0 -- c:\users\rsingh99\appdata\local\programs\python\python36\python.exe
cachedir: .pytest_cache
rootdir: C:\Users\rsingh99\Desktop\WebDevelopment_and_Automation\Selenium_Projects\test_selenium
plugins: ordering-0.6
collected 1 item
test.py::TestLogin::test_login FAILED [100%]
================================================================================ FAILURES =================================================================================
__________________________________________________________________________ TestLogin.test_login ___________________________________________________________________________
self = <test.TestLogin testMethod=test_login>
def test_login(self):
wait = WebDriverWait(self.driver, 10)
> user_name = wait.until(ec.presence_of_element_located((By.NAME, "userName")))
test.py:22:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <selenium.webdriver.support.wait.WebDriverWait (session="54f19a6e911494f8fd102e2f51cb4104")>
method = <selenium.webdriver.support.expected_conditions.presence_of_element_located object at 0x000002925F4AE4E0>, message = ''
def until(self, method, message=''):
"""Calls the method provided with the driver as an argument until the \
return value is not False."""
screen = None
stacktrace = None
end_time = time.time() + self._timeout
while True:
try:
value = method(self._driver)
if value:
return value
except self._ignored_exceptions as exc:
screen = getattr(exc, 'screen', None)
stacktrace = getattr(exc, 'stacktrace', None)
time.sleep(self._poll)
if time.time() > end_time:
break
> raise TimeoutException(message, screen, stacktrace)
E selenium.common.exceptions.TimeoutException: Message:
..\..\..\..\appdata\local\programs\python\python36\lib\site-packages\selenium\webdriver\support\wait.py:80: TimeoutException
======================================================================== 1 failed in 11.75 seconds ========================================================================
What I'm Doing
I'm learning aiohttp by building a REST api which I'm testing with Pytest (and its async and aiohttp plugins).
For my first test (I'm going with TDD from outset) I have the following code:
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
resp = await client.post("/users/", json=payload)
...
aiohttp_client is the client fixture from pytest-aiohttp
init_test_app is a fixture which essentially mirrors the app I'm going to build
create_test_user_table is my fixture for creating a table for users in the test database
What's Going Wrong With It
My first test is throwing the following runtime error at the last line in the code block above:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
env36\lib\site-packages\aiohttp\test_utils.py:295: in request
method, self.make_url(path), **kwargs
env36\lib\site-packages\aiohttp\client.py:417: in _request
with timer:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <aiohttp.helpers.TimerContext object at 0x0000015DE23A3B38>
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
> raise RuntimeError(
'Timeout context manager should be used '
'inside a task'
)
E RuntimeError: Timeout context manager should
be used inside a task
env36\lib\site-packages\aiohttp\helpers.py:568: RuntimeError
From the error message, I gather that the client is trying to use an async timeout context manager, but this fails because I'm not calling it inside a task.
I don't know whether my deduction is correct.
And then also, I'm not comfortable enough with asyncio to know how to resolve this.
I will be grateful if someone shows me the way out.
More Info
Here's the source code of my test file:
import asyncio
import sqlite3
from pathlib import Path
import pytest
from aiohttp import web
from app import router
#pytest.fixture(name="event_loop", scope="session")
def fixture_event_loop():
"""
Mock session scoped event loop.
Default event loop is function scoped, and won't work with
otherwisely scoped fixtures. Hence, the need for this overwrite.
"""
loop = asyncio.get_event_loop()
yield loop
loop.close()
#pytest.fixture(name="test_db_path", scope="session")
async def fixture_test_db_path():
return Path(__file__).absolute().parent.joinpath("test_db.sqlite")
#pytest.fixture(name="init_test_db_conn", scope="session")
async def fixture_init_test_db_conn(test_db_path):
"""
Mock initiator of test database connection.
"""
async def _init_test_db_conn(test_app):
with sqlite3.connect(str(test_db_path)) as conn:
test_app["DB_CONN"] = conn
yield
return _init_test_db_conn
#pytest.fixture(name="init_test_app", scope="session")
async def fixture_init_test_app(init_test_db_conn):
"""
Mock app for testing.
Substitute the test db for the development db for testing and
undo the substitution after all tests have been run.
"""
app = web.Application()
app.add_routes(router)
app.cleanup_ctx.append(init_test_db_conn)
return app
#pytest.fixture(name="create_test_user_table")
def fixture_create_test_user_table(test_db_path):
"""
Mock user table for tests. Scope at function level.
Drop table at end of each test.
"""
conn = sqlite3.connect(str(test_db_path))
conn.execute(
"""CREATE TABLE test_users (
id INTEGER PRIMARY KEY,
email TEXT NOT NULL UNIQUE,
username TEXT NOT NULL UNIQUE,
pwd_hash TEXT NOT NULL,
active INTEGER,
joined TEXT NOT NULL);
"""
)
yield
conn.execute("""DROP TABLE test_users;""")
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
resp = await client.post("/users/", json=payload)
assert resp.status == 200
resp_json = await resp.json()
assert resp_json["email"] == payload["email"]
assert resp_json["username"] == payload["username"]
assert resp_json["pwd_hash"] != payload["pwd_hash"]
assert resp_json["active"] == 0
client.close()
And here's a full trace of the runtime error (alongside deprecation warnings - which I'll appreciate getting help on, as well :) )
$ pytest
============================= test session starts =============================
platform win32 -- Python 3.6.8, pytest-5.3.5, py-1.8.1, pluggy-0.13.1
rootdir: C:\Users\Mfonism\Codeville\AIOHttp\curious_me
plugins: aiohttp-0.3.0, asyncio-0.10.0
collected 1 item
test_app.py F [100%]
================================== FAILURES ===================================
_______________________ test_handle_user_create[pyloop] _______________________
aiohttp_client = <function aiohttp_client.<locals>.go at 0x0000015DE239AD08>
init_test_app = <Application 0x15de23a0d30>, create_test_user_table = None
#pytest.mark.asyncio
async def test_handle_user_create(
aiohttp_client, init_test_app, create_test_user_table
):
payload = {
"email": "tintin#gmail.com",
"username": "Tintin",
"password": "y0u != n00b1e",
}
client = await aiohttp_client(init_test_app)
> resp = await client.post("/users/", json=payload)
test_app.py:89:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
env36\lib\site-packages\aiohttp\test_utils.py:295: in request
method, self.make_url(path), **kwargs
env36\lib\site-packages\aiohttp\client.py:417: in _request
with timer:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <aiohttp.helpers.TimerContext object at 0x0000015DE23A3B38>
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
> raise RuntimeError('Timeout context manager should be used '
'inside a task')
E RuntimeError: Timeout context manager should be used inside a task
env36\lib\site-packages\aiohttp\helpers.py:568: RuntimeError
============================== warnings summary ===============================
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\cookiejar.py:55: DeprecationWarning: The object should be created from async function
super().__init__(loop=loop)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\test_utils.py:247: DeprecationWarning: The object should be created from async function
**kwargs)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\connector.py:730: DeprecationWarning: The object should be created from async function
loop=loop)
test_app.py::test_handle_user_create[pyloop]
c:\users\mfonism\codeville\aiohttp\curious_me\env36\lib\site-packages\aiohttp\connector.py:735: DeprecationWarning: The object should be created from async function
resolver = DefaultResolver(loop=self._loop)
-- Docs: https://docs.pytest.org/en/latest/warnings.html
====== 1 failed, 4 warnings in 0.78s ======
I'm using Httpretty to simulate a web API.
If I use httpretty.enable(allow_net_connect=False) (When allow_net_connect is False any connection to an unregistered uri will throw httpretty.errors.UnmockedError), I get an error since pymysql can't connect, via sockets, to a local DB.
However, when I use httpretty.enable(allow_net_connect=True), I get an error from a lost connection.
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/__init__.py:90: in Connect
return Connection(*args, **kwargs)
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/connections.py:699: in __init__
self.connect()
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/connections.py:935: in connect
self._get_server_information()
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/connections.py:1249: in _get_server_information
packet = self._read_packet()
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/connections.py:991: in _read_packet
packet_header = self._read_bytes(4)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <pymysql.connections.Connection object at 0x7fefe1ad40f0>, num_bytes = 4
def _read_bytes(self, num_bytes):
self._sock.settimeout(self._read_timeout)
while True:
try:
data = self._rfile.read(num_bytes)
break
except (IOError, OSError) as e:
if e.errno == errno.EINTR:
continue
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_LOST,
"Lost connection to MySQL server during query (%s)" % (e,))
if len(data) < num_bytes:
self._force_close()
raise err.OperationalError(
> CR.CR_SERVER_LOST, "Lost connection to MySQL server during query")
E pymysql.err.OperationalError: (2013, 'Lost connection to MySQL server during query')
../../.virtualenvs/3.6espiga/lib/python3.6/site-packages/pymysql/connections.py:1037: OperationalError
How can I have Httpretty mocking the web API and leave the DB connection to run as usually?
I have written some unittests for my Python Google App Engine app. Below is a distillation of the problematic code.
class TestCase(unittest.TestCase):
def setUp(self):
from google.appengine.ext import testbed
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_blobstore_stub()
ndb.get_context().clear_cache()
def tearDown(self):
self.testbed.deactivate()
def testing(self):
from google.cloud import storage
client = storage.Client()
I am getting an encoding LookupError while opening a file (my gcloud application credentials) using the io library.
Here is the relevant stack trace and offending io code (from pytest):
self = <test_1.TestCase testMethod=testing>
def testing(self):
from google.cloud import storage
> client = storage.Client()
/Users/alex/projects/don/don_server/mobile/tests/test_1.py:66:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/Users/alex/projects/don/don_server/lib/google/cloud/storage/client.py:59: in __init__
_http=_http)
/Users/alex/projects/don/don_server/lib/google/cloud/client.py:223: in __init__
_ClientProjectMixin.__init__(self, project=project)
/Users/alex/projects/don/don_server/lib/google/cloud/client.py:177: in __init__
project = self._determine_default(project)
/Users/alex/projects/don/don_server/lib/google/cloud/client.py:190: in _determine_default
return _determine_default_project(project)
/Users/alex/projects/don/don_server/lib/google/cloud/_helpers.py:181: in _determine_default_project
_, project = google.auth.default()
/Users/alex/projects/don/don_server/lib/google/auth/_default.py:277: in default
credentials, project_id = checker()
/Users/alex/projects/don/don_server/lib/google/auth/_default.py:117: in _get_gcloud_sdk_credentials
credentials_filename)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
filename = '/Users/name/.config/gcloud/application_default_credentials.json'
def _load_credentials_from_file(filename):
"""Loads credentials from a file.
The credentials file must be a service account key or stored authorized
user credentials.
Args:
filename (str): The full path to the credentials file.
Returns:
Tuple[google.auth.credentials.Credentials, Optional[str]]: Loaded
credentials and the project ID. Authorized user credentials do not
have the project ID information.
Raises:
google.auth.exceptions.DefaultCredentialsError: if the file is in the
wrong format.
"""
> with io.open(filename, 'r') as file_obj:
E LookupError: unknown encoding:
I don't get this error in when I run this code on my GAE local development server. Furthermore when I open the credentials file using a shell (I checked the file attribute on the io module and it's the same) no error is raised.
For some reason the correct environment variables are not set on a Mac when running the unittests through pycharm. Adding the following code (from this answer) solved it for me:
import os
os.environ['LC_ALL'] = 'en_US.UTF-8'
os.environ['LANG'] = 'en_US.UTF-8'