This is my code:
now = datetime.datetime.now().replace(microsecond=0)
curs.execute("SELECT name, msgDate, FROM test where msgDate=%s",(now))
I got these msgs:
File "C:\Python\lib\site-packages\mysql\connector\cursor.py", line 220, in _process_params
res = list(map(to_mysql,res))
TypeError: 'datetime.datetime' object is not iterable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\projectse\Email\src\a.py", line 65, in <module>
curs.execute("SELECT name, msgDate FROM messages where msgDate=%s",(now))
File "C:\Python\lib\site-packages\mysql\connector\cursor.py", line 300, in execute
stmt = operation % self._process_params(params)
File "C:\Python\lib\site-packages\mysql\connector\cursor.py", line 225, in _process_params
"Failed processing format-parameters; %s" % e)
mysql.connector.errors.ProgrammingError: -1: Failed processing format-parameters;
'datetime.datetime' object is not iterable
Any tips?
I think you should replace:
curs.execute("SELECT name, msgDate, FROM test where msgDate=%s",(now))
with:
curs.execute("SELECT name, msgDate, FROM test where msgDate=%s",%(now))
and use this for your time variable:
import time
now=time.strftime('%Y-%m-%d %H:%M:%S')
Try seeing what now = str(datetime.datetime.now().replace(microsecond=0)) looks like in the python interpreter, if you're curious why that's a problem.
now = datetime.datetime(2012, 2, 23, 0, 0)
now.strftime('%m/%d/%Y')
curs.execute("SELECT name, msgDate, FROM test where msgDate=:now",dict(now=str(now))
please try converting date to string format and execute
Related
I want to check if my table is loaded correctly or not. If it is not loaded correctly then the number of records will be zero. I am using SQLCheckOperator to do this task.
This is the code
from airflow.operators.sql import SQLCheckOperator
from datetime import date, timedelta
CURRENT_DATE = str(date.today() - timedelta(2))
TABLE_NAME = "foo"
search_monolith_post_sanity = SQLCheckOperator(
task_id="search_monolith_post_sanity",
sql=f"SELECT COUNT(*) FROM `{TABLE_NAME}` WHERE feed_date = DATE_SUB('{CURRENT_DATE}', INTERVAL 1 DAY)",
bigquery_conn_id='bigquery_default',
use_legacy_sql=False,
dag=dag
)
I got the below error:
Executing SQL check: SELECT COUNT(*) FROM `foo` WHERE feed_date = DATE_SUB('2021-01-31', INTERVAL 1 DAY)
[2021-02-02 07:16:43,664] {taskinstance.py:1153} ERROR - 'NoneType' object has no attribute 'upper'
Traceback (most recent call last)
File "/usr/local/lib/airflow/airflow/models/taskinstance.py", line 986, in _run_raw_tas
result = task_copy.execute(context=context
File "/usr/local/lib/airflow/airflow/operators/sql.py", line 95, in execut
records = self.get_db_hook().get_first(self.sql
File "/usr/local/lib/airflow/airflow/operators/sql.py", line 116, in get_db_hoo
return BaseHook.get_hook(conn_id=self.conn_id
File "/usr/local/lib/airflow/airflow/hooks/base_hook.py", line 94, in get_hoo
connection = cls.get_connection(conn_id
File "/usr/local/lib/airflow/airflow/hooks/base_hook.py", line 87, in get_connectio
conn = random.choice(list(cls.get_connections(conn_id))
File "/usr/local/lib/airflow/airflow/hooks/base_hook.py", line 83, in get_connection
return secrets.get_connections(conn_id
File "/usr/local/lib/airflow/airflow/secrets/__init__.py", line 55, in get_connection
conn_list = secrets_backend.get_connections(conn_id=conn_id
File "/usr/local/lib/airflow/airflow/secrets/base_secrets.py", line 64, in get_connection
conn_uri = self.get_conn_uri(conn_id=conn_id
File "/usr/local/lib/airflow/airflow/secrets/environment_variables.py", line 39, in get_conn_ur
environment_uri = os.environ.get(CONN_ENV_PREFIX + conn_id.upper()
AttributeError: 'NoneType' object has no attribute 'upper
I have tried using BigQueryCheckOperator and CheckOperator instead of SQLCheckOperator but ran into error. If I replace BigQueryCheckOperator with BigQueryOperator the code works fine and I get zero as output.
I am new to airflow. Any help is much appreciated. Thanks !!
If you look at the line before the error message in the stacktrace.
environment_uri = os.environ.get(CONN_ENV_PREFIX + conn_id.upper()
AttributeError: 'NoneType' object has no attribute 'upper'
In this case the NoneType object that is having upper() called on it is conn_id.
If you're using Airflow 1.10.15 the documentation for this operator has a rather important Note buried at the bottom
Note that this is an abstract class and get_db_hook needs to be defined. Whereas a get_db_hook is hook that gets a single record from an external source.
Also note that the definition of the function appears to expect a conn_id parameter.
Assumption: You are using Airflow >= 2.0.0
Use the following code, notice usage of BigQueryCheckOperator and that I used gcp_conn_id instead of bigquery_conn_id.
from airflow.providers.google.cloud.operators.bigquery import BigQueryCheckOperator
from datetime import date, timedelta
CURRENT_DATE = str(date.today() - timedelta(2))
TABLE_NAME = "foo"
search_monolith_post_sanity = BigQueryCheckOperator(
task_id="search_monolith_post_sanity",
sql=f"SELECT COUNT(*) FROM `{TABLE_NAME}` WHERE feed_date = DATE_SUB('{CURRENT_DATE}', INTERVAL 1 DAY)",
gcp_conn_id='bigquery_default',
use_legacy_sql=False,
dag=dag
)
I am trying to add multiple records into a database using mysql-connector-python. I was able to add one record initially but for multiple records, this error has been persistent.
class DataBase:
def __init__(self):
try:
self.connection = mysql.connector.connect(host='xx.xxx.xx.xx',
database='XXX',
user='XXXX',
password='xxxx')
except Error as error:
print("Failed to connect: {}".format(error))
def store_into_table(self, df):
mySql_insert_query = """INSERT INTO data ('data_id', 'a', 'b', 'c') VALUES ('%S','%S', '%S', '%S') """
records_to_insert = df
cursor = self.connection.cursor()
cursor.executemany(mySql_insert_query, records_to_insert)
self.connection.commit()
print(cursor.rowcount, " Records inserted successfully into data table")
The variable that I am passing to an instance of DataBase is:
data = [('101', 'name_1', '3', 'sample'), ('102', 'name_2', '5', 'sample_1')]
Exception being thrown:
Traceback (most recent call last):
File "/Users/dev/anaconda3/envs/sql/lib/python3.7/site-packages/mysql/connector/cursor_cext.py", line 317, in _batch_insert
"Not all parameters were used in the SQL statement")
ProgrammingError: Not all parameters were used in the SQL statement
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<ipython-input-123-44fdf194cd10>", line 1, in <module>
db.store_into_table(data)
File "/Users/dev/Detector/database.py", line 30, in store_into_table
cursor.executemany(mySql_insert_query, records_to_insert)
File "/Users/dev/anaconda3/envs/sql/lib/python3.7/site-packages/mysql/connector/cursor_cext.py", line 350, in executemany
stmt = self._batch_insert(operation, seq_params)
File "/Users/dev/anaconda3/envs/sql/lib/python3.7/site-packages/mysql/connector/cursor_cext.py", line 329, in _batch_insert
"Failed executing the operation; %s" % err)
InterfaceError: Failed executing the operation; Not all parameters were used in the SQL statement
Any clues as to why this isn't working?
Try this:
mySql_insert_query = """INSERT INTO data (data_id, a, b, c) VALUES (%s, %s, %s, %s) """
Anyone come across this before?
import boto
conn = boto.dynamodb.connect_to_region('eu-west-1', aws_access_key_id=aws_key, aws_secret_access_key=aws_secret)
table = conn.get_table('TweetSample')
print table.scan(limit=1)
error:
Traceback (most recent call last):
File "test.py", line 9, in <module>
print table.scan(limit=1)
File "table.py", line 518, in scan
return self.layer2.scan(self, *args, **kw)
TypeError: scan() got an unexpected keyword argument 'limit'
[Finished in 0.4s with exit code 1]
I don't even know...
According to the documentation, scan method of boto.dynamodb.table.Table (which is returned by boto.dynamodb.layer2.Layer2.get_table) does not accepts limit, but max_results.
And the result is a generator. So, if you want to print it you should iterate it:
import boto.dynamodb
conn = boto.dynamodb.connect_to_region(
'eu-west-1',
aws_access_key_id=aws_key,
aws_secret_access_key=aws_secret)
table = conn.get_table('TweetSample')
for row in table.scan(max_results=1):
print row
or convert it to a sequence:
print list(table.scan(max_results=1))
I am using Python to query a MySQL table and getting one datetime in string format, which is stored in row[3]. I need to convert this string timestamp to epoch seconds.
import MySQLdb
import os
import datetime
try:
db = MySQLdb.connect("localhost","root","","test" )
except MySQLdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
sys.exit (1)
cursor = db.cursor()
cursor.execute ("SELECT * from main_tbl WHERE login_user_name='kumar'")
data = cursor.fetchall()
for row in data :
print row[3] ###printing 2014-09-26 12:24:23
date = datetime.datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S")
print date
On execution it throws this error:
2014-09-26 12:24:23
Traceback (most recent call last):
File "test1.py", line 22, in <module>
date = datetime.datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S")
TypeError: must be string, not datetime.datetime
What am I doing wrong?
I have also tried the following:
epoch_start = time.mktime(time.strptime(row[3], "%Y-%m-%d %H:%M:%S"));
But I get this error:
Traceback (most recent call last):
File "test1.py", line 29, in <module>
epoch_start = time.mktime(time.strptime(row[3], "%Y-%m-%d %H:%M:%S"));
File "C:\Python27\lib\_strptime.py", line 467, in _strptime_time
return _strptime(data_string, format)[0]
File "C:\Python27\lib\_strptime.py", line 322, in _strptime
found = format_regex.match(data_string)
TypeError: expected string or buffer
The value in row[3] is already in the datetime.datetime format as it is being clearly pointed out by the traceback. So there is no need for creating the variable date. You can use the row[3] directly as a datetime.datetime object.
Just try printing:
print type(row[3])
That should give the type as datetime.datetime
row[3] is already in datetime.datetime format.
From your question its sounds like you want convert to EPOCH
so do something like:
import time
epochTime = time.mktime(row[3].timetuple())
print epochTime
Then check if the converted epoch is correct or not:
print time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(epochTime))
print row[3]
Verify last two statements have the same output.
I'm currently working on a basic query which insert data depending on the input parameters, and I'm unable to perfom it.
cur.execute("INSERT INTO foo (bar1, bar2) values (?, ?)", (foo1, foo2))
I have this error message:
Exception in Tkinter callback Traceback (most recent call last):
File "/usr/lib/python3.2/tkinter/init.py", line 1426, in call
return self.func(*args) File "test.py", line 9, in register
cur.execute("INSERT INTO foo (bar1, bar2) values (?,?)", (foo1, foo2)) File
"/usr/local/lib/python3.2/dist-packages/pymysql/cursors.py", line 108,
in execute
query = query % escaped_args TypeError: unsupported operand type(s) for %: 'bytes' and 'tuple'
foo1 and foo2 are both string type. I tried with %s, same error.
It seems like a bug in cursors.py. As suggested here and here you should replace this line in cursors.py:
query = query % conn.escape(args)
With this:
query = query.decode(charset) % conn.escape(args)
In case it didn't work try this one instead:
query = query.decode(charset) % escaped_args