How do I add password to postgresql peewee - python

I want to connect to a postgresql server with peewee but I keep getting a error that I need a password but I have no clue how to add it current transaction is aborted, commands ignored until end of transaction block
.I have read every bit of the docs and still have no clue how. If I can't help i'll just go to sqlite.
from twython import Twython, TwythonError
import json
from peewee import *
"""Setting up variables"""
db = PostgresqlDatabase("Tweets",user='postgres')
Cred_Filename = 'Keys.json'
jf = open(Cred_Filename)
creds = json.load(jf)
jf.close()
twitter = Twython(creds['consumer_key'],
creds['consumer_secret'],
creds['access_token'],
creds['access_token_secret'])
"""End of variables"""
"""PostgreSql Setup"""
class BaseModel(Model):
"""A base model that will use our Postgresql database"""
class Meta:
database = db
class Tweet(BaseModel):
tweet = CharField()
try:
Tweet.create_table()
except Exception as e:
pass
"""End of PostgreSql Setup"""
"""Pulling Tweets and displaying them"""
def PullTweet():
"""Take user input and pull and print 10 newest tweets"""
user_input = input('Please Enter A Username: ')
try:
user_timeline = twitter.get_user_timeline(screen_name=user_input)
except TwythonError as e:
print(e)
print(user_input)
for tweets in user_timeline:
print('[*]' + tweets['text'])
try:
tweet = Tweet(tweet=tweets['text'])
tweet.save()
except Exception as e:
print(e)
PullTweet()

PostgresqlDatabase just passes additional kwargs through to psycopg2.connect, and that signature looks like
conn = psycopg2.connect(dbname="test", user="postgres", password="secret")
So you just need to add a password kwarg to your call:
db = PostgresqlDatabase("Tweets",user='postgres', password='***')
Note, though, that it's generally not safe to store your database password in plain text within your program.

Related

Why open the same one database with sqlalchemy, but get different, how can I update it?

I write some tests with pytest, I want to test create user and email with post method.
With some debug, I know the issue is I open two databases in memory, but they are same database SessionLocal().
So how can I fix this, I try db.flush(), but it doesn't work.
this is the post method code
#router.post("/", response_model=schemas.User)
def create_user(
*,
db: Session = Depends(deps.get_db), #the get_db is SessionLocal()
user_in: schemas.UserCreate,
current_user: models.User = Depends(deps.get_current_active_superuser),
) -> Any:
"""
Create new user.
"""
user = crud.user.get_by_email(db, email=user_in.email)
if user:
raise HTTPException(
status_code=400,
detail="The user with this username already exists in the system.",
)
user = crud.user.create(db, obj_in=user_in)
print("====post====")
print(db.query(models.User).count())
print(db)
if settings.EMAILS_ENABLED and user_in.email:
send_new_account_email(
email_to=user_in.email, username=user_in.email, password=user_in.password
)
return user
and the test code is:
def test_create_user_new_email(
client: TestClient, superuser_token_headers: dict, db: Session # db is SessionLocal()
) -> None:
username = random_email()
password = random_lower_string()
data = {"email": username, "password": password}
r = client.post(
f"{settings.API_V1_STR}/users/", headers=superuser_token_headers, json=data,
)
assert 200 <= r.status_code < 300
created_user = r.json()
print("====test====")
print(db.query(User).count())
print(db)
user = crud.user.get_by_email(db, email=username)
assert user
assert user.email == created_user["email"]
and the test result is
> assert user
E assert None
====post====
320
<sqlalchemy.orm.session.Session object at 0x7f0a9f660910>
====test====
319
<sqlalchemy.orm.session.Session object at 0x7f0aa09c4d60>
Your code does not provide enough information to help you, the key issues are probably in what is hidden and explained by your comments.
And it seems like you are confusing sqlalchemy session and databases. If you are not familiar with these concepts, I highly recommend you to have a look at SQLAlchemy documentation.
But, looking at your code structure, it seems like you are using FastAPI.
Then, if you want to test SQLAlchemy with pytest, I recommend you to use pytest fixture with SQL transactions.
Here is my suggestion on how to implement such a test. I'll suppose that you want to run the test on your actual database and not create a new database especially for the tests. This implementation is heavily based on this github gist (the author made a "feel free to use statement", so I suppose he is ok with me copying his code here):
# test.py
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from fastapi.testclient import TestClient
from myapp.models import BaseModel
from myapp.main import app # import your fastapi app
from myapp.database import get_db # import the dependency
client = TestClient(app)
# scope="session" mean that the engine will last for the whole test session
#pytest.fixture(scope="session")
def engine():
return create_engine("postgresql://localhost/test_database")
# at the end of the test session drops the created metadata using fixture with yield
#pytest.fixture(scope="session")
def tables(engine):
BaseModel.metadata.create_all(engine)
yield
BaseModel.metadata.drop_all(engine)
# here scope="function" (by default) so each time a test finished, the database is cleaned
#pytest.fixture
def dbsession(engine, tables):
"""Returns an sqlalchemy session, and after the test tears down everything properly."""
connection = engine.connect()
# begin the nested transaction
transaction = connection.begin()
# use the connection with the already started transaction
session = Session(bind=connection)
yield session
session.close()
# roll back the broader transaction
transaction.rollback()
# put back the connection to the connection pool
connection.close()
## end of the gist.github code
#pytest.fixture
def db_fastapi(dbsession):
def override_get_db():
db = dbsession
try:
yield db
finally:
db.close()
client.app.dependency_overrides[get_db] = override_get_db
yield db
# Now you can run your test
def test_create_user_new_email(db_fastapi):
username = random_email()
# ...

Python OOP programming issue when using MySQL connect

I am trying to write a backend that will be used for an IOS app. I know it will be technically the wrong way to do it but it wont be deployed.
My issues is i get the error
self.cursor(query)
TypeError: 'CMySQLCursor' object is not callable
This happens when i run the following from main.py
import database
db = database.database()
staff = db.getData("SELECT * FROM timesheets.staff")
Finally this is my database.py code
import mysql.connector
class database :
conn = ""
cursor = ""
def __init__(self):
self.conn = mysql.connector.connect(user='james',
password='timeismoney',
host='hallfamily.mycrestron.com',
database='timesheets',
port='6033')
self.cursor = self.conn.cursor()
print("Done")
def getData(self, query):
#Checking if the user has applied a string
if isinstance(query, str):
self.cursor(query)
else:
return "You have provided a request that cant be processed"
#Fetching all the results
result = self.cursor.fetchall()
#Returning back to the user
return result
def postData(self):
print("Coming soon")
def close(self):
self.conn.close()
Instead of:
self.cursor(query)
Try this:
self.cursor.execute(query)

Boto3 AWS creating user Try catch not allowing the for loop to conrtinue

I have this python script that will go through all my accounts and create a user in each but I get an error when the user already exists 'botocore.errorfactory.EntityAlreadyExistsException:' or it will only create the user in the account until it hits and error the the for loop ends and never creates the user in the exiting accounts in the array. I have tried the botocore exceptions and moving the exception pass to other lines in the code when I do that I get no output. Also this creates a second set of keys. I am a bit of a newbie with Python...
import boto3
import pprint
import sys
from botocore.exceptions import ClientError, ParamValidationError
pp = pprint.PrettyPrinter(indent=4)
username='musicman'
Password = 'Welcome'
accounts = ["account1", "account2", "account3", "account4", "account5", "account6"]
try:
for account in accounts:
boto3.setup_default_session(profile_name=account)
#brooksprod = boto3.session.Session(profile_name='account1')
iam = boto3.client('iam')
# create a user
iam.create_user( UserName=username)
except Exception:
pass
response = iam.add_user_to_group(
GroupName='Admin',
UserName=username
)
response = iam.create_login_profile(
UserName= username,
Password= Password,
PasswordResetRequired=True
)
#pp.pprint(response)
response = iam.get_user(UserName = username)
pp.pprint(response)
response = iam.create_access_key(
UserName=username
)
#print(response.get('AccessKey','SecretAccessKey'))
pp.pprint(response)
Do the try/except inside the for loop:
for account in accounts:
try:
boto3.setup_default_session(profile_name=account)
#brooksprod = boto3.session.Session(profile_name='account1')
iam = boto3.client('iam')
# create a user
iam.create_user( UserName=username)
except Exception:
pass

Telegram Bot respond to specific command in Python list

I am making a Telegram bot that can can access database to reply users' query. The bot need to respond to specific request of certain data in database. I was able to solve for when users request for all data but I am stuck with individual data. I am using telegram.ext from telegram package in python. Here is what I have done so far.
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
import MySQLdb
currr = [] # global list var ~don't bash me for using global in python please, I'm a newbie
# request for all data in database
def request2(bot, update):
db = MySQLdb.connect(host = "local", user = "root", passwd = "pwd", db = "mydb")
cur = db.cursor()
cur.execute("select ID from table")
ID = cur.fetchall()
cur.execute("SELECT ID, temp FROM table2 order by indexs desc")
each_rows = cur.fetchall()
for IDs in ID:
for each_row in each_rows:
if str(each_row[0])[0:4]==str(ID)[2:6]:
update.message.reply_text('reply all related data here')
break
# request for single data
def individualreq(bot, update):
db = pymysql.connect(host = "localhost", user = "root", passwd = "pwd", db = "mydb")
update.message.reply_text('reply individual data to users here')
def main():
updater = Updater("TOKEN")
dp = updater.dispatcher
global currr
# get all ID form database
db = MySQLdb.connect(host = "localhost", user = "root", passwd = "pwd", db = "mydb")
cur = db.cursor()
cur.execute("select ID from table")
curr_ID = cur.fetchall()
# example ID = 'F01', 'F02', 'F03'
for curr_IDs in curr_ID:
currr.append(curr_IDs[0])
# request all data
dp.add_handler(CommandHandler("all", request2))
# request individual data
dp.add_handler(CommandHandler(currr, individualreq)) # list command in currr[]
if __name__ == '__main__':
main()
I am looking for a way to pass the current command which is also the ID in database that user request in the currr[] list to the individualreq(bot, update) function so that only data of the called ID is being replied. Users will select from a list of ID in telegram and the command handler can pass the selected ID to the function. I have not found a way to pass the ID to the function. Could someone help me to solve this please. Thanks
I find out a solution for my question from the answer provided by Oluwafemi Sule. CommandHandler can pass the arguments of the command to the function by adding pass_args=True in the CommandHandler.
dp.add_handler(CommandHandler(currr, individualreq, pass_args=True))
To print out the args in the function, the function need to receive the args.
def individualreq(bot, update, args):
# id store the args value
id = update.message.text
print(id[1:]) # [1:] is to get rid of the / in id
You can outright make individualreq a closure.
CommandHandler takes a command or list of command to listen to and a list other options.
There is a pass_user_data option that allows for user data to be passed to the callback.
dp.add_handler(CommandHandler(currr, individualreq, pass_user_data=True))
The signature for individualreq callback will be updated to take the user_data
def individualreq(bot, update, user_data=None):
#user_data is a dict
print(user_data)

Trying to use Tweepy/Twitters Streaming API and psycopg2 to populate a PostgreSQL database. Very close, one line off

I've been working on trying to populate a table in a PostreSQL database using Tweepy and Twitter's Streaming API. I'm extremely close, I believe I'm just one line away from getting it. I've looked at many examples including:
http://andrewbrobinson.com/2011/07/15/using-tweepy-to-access-the-twitter-stream/
http://blog.creapptives.com/post/14062057061/the-key-value-store-everyone-ignored-postgresql
Python tweepy writing to sqlite3 db
tweepy stream to sqlite database - invalid synatx
Using tweepy to access Twitter's Streaming API
etc, etc
Im at the point where I can stream tweets quite easily using Tweepy, so I know my consumer key, consumer secret, access key and access secret are correct. I also have Postgres set up, and am successfully connecting to the database I created. I tested hard coded values into the table in my database using psycopg2 from a .py file, and that is also working. I am getting tweets streamed in based on keywords I select, and am successfully connected to a table in a database. Now I just need the tweets to stream into the table in my postgres database. Like I said, I am so close and any help would be so greatly appreciated.
This stripped down script inserts data into my desired table:
import psycopg2
try:
conn = psycopg2.connect("dbname=teststreamtweets user=postgres password=x host=localhost")
print "connected"
except:
print "unable to connect"
namedict = (
{"first_name":"Joshua", "last_name":"Drake"},
{"first_name":"Steven", "last_name":"Foo"},
{"first_name":"David", "last_name":"Bar"}
)
cur = conn.cursor()
cur.executemany("""INSERT INTO testdata(first_name, last_name) VALUES (%(first_name)s, %(last_name)s)""", namedict);
conn.commit()
Below is the script I have been editing for a while now trying to get it to work:
import psycopg2
import time
import json
from getpass import getpass
import tweepy
consumer_key = 'x'
consumer_secret = 'x'
access_key = 'x'
access_secret = 'x'
connection = psycopg2.connect("dbname=teststreamtweets user=postgres password=x host=localhost")
cursor = connection.cursor()
#always use this step to begin clean
def reset_cursor():
cursor = connection.cursor()
class StreamWatcherListener(tweepy.StreamListener):
def on_data(self, data):
try:
print 'before cursor' + data
connection = psycopg2.connect("dbname=teststreamtweets user=postgres password=x host=localhost")
cur = connection.cursor()
print 'status is: ' + str(connection.status)
#cur.execute("INSERT INTO tweet_list VALUES (%s)" % (data.text))
cur.executemany("""INSERT INTO tweets(tweet) VALUES (%(text)s)""", data);
connection.commit()
print '---------'
print type(data)
#print data
except Exception as e:
connection.rollback()
reset_cursor()
print "not saving"
return
if cursor.lastrowid == None:
print "Unable to save"
def on_error(self, status_code):
print 'Error code = %s' % status_code
return True
def on_timeout(self):
print 'timed out.....'
print 'welcome'
auth1 = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth1.set_access_token(access_key, access_secret)
api = tweepy.API(auth1)
l = StreamWatcherListener()
print 'about to stream'
stream = tweepy.Stream(auth = auth1, listener = l)
setTerms = ['microsoft']
#stream.sample()
stream.filter(track = setTerms)
Sorry if it's a bit messy of code, but have been trying many options. Like I said any suggestions, links to helpful examples, etc would be greatly appreciated as I've tried everything I can think of and am now resorting to a long walk. Thanks a ton.
Well, I'm not sure why you are using classes for this, and then why you don't have __init__ defined in your class. Seems complicated.
Here is a basic version of the functions I use to do this stuff. I've only ever used sqlite for it, but the syntax looks basically the same. Maybe you can get something from this.
def retrieve_tweets(numtweets=10, *args):
"""
This function optionally takes one or more arguments as keywords to filter tweets.
It iterates through tweets from the stream that meet the given criteria and sends them
to the database population function on a per-instance basis, so as to avoid disaster
if the stream is disconnected.
Both SampleStream and FilterStream methods access Twitter's stream of status elements.
"""
filters = []
for key in args:
filters.append(str(key))
if len(filters) == 0:
stream = tweetstream.SampleStream(username, password)
else:
stream = tweetstream.FilterStream(username, password, track=filters)
try:
count = 0
while count < numtweets:
for tweet in stream:
# a check is needed on text as some "tweets" are actually just API operations
# the language selection doesn't really work but it's better than nothing(?)
if tweet.get('text') and tweet['user']['lang'] == 'en':
if tweet['retweet_count'] == 0:
# bundle up the features I want and send them to the db population function
bundle = (tweet['id'], tweet['user']['screen_name'], tweet['retweet_count'], tweet['text'])
db_initpop(bundle)
break
else:
# a RT has a different structure. This bundles the original tweet. Getting the
# retweets comes later, after the stream is de-accessed.
bundle = (tweet['retweeted_status']['id'], tweet['retweeted_status']['user']['screen_name'], \
tweet['retweet_count'], tweet['retweeted_status']['text'])
db_initpop(bundle)
break
count += 1
except tweetstream.ConnectionError, e:
print 'Disconnected from Twitter at '+time.strftime("%d %b %Y %H:%M:%S", time.localtime()) \
+'. Reason: ', e.reason
def db_initpop(bundle):
"""
This function places basic tweet features in the database. Note the placeholder values:
these can act as a check to verify that no further expansion was available for that method.
"""
#unpack the bundle
tweet_id, user_sn, retweet_count, tweet_text = bundle
curs.execute("""INSERT INTO tblTweets VALUES (null,?,?,?,?,?,?)""", \
(tweet_id, user_sn, retweet_count, tweet_text, 'cleaned text', 'cleaned retweet text'))
conn.commit()
print 'Database populated with tweet '+str(tweet_id)+' at '+time.strftime("%d %b %Y %H:%M:%S", time.localtime())
Good luck!

Categories

Resources