I tried implementing the mysql pooling but there is an issue: I am parsing mysql connection credentials from an env variable which is basically a dictionary and has multiple databases credentials in it.
dict={'db1': {'username':**** ,'password':**** ,'database':*****,'host':****,'uri': mysql url with username ,password,database},
'db2': {'username': ****, 'hostname': *****,host: ******, 'password': ***,'uri': mysql url with username ,password,database}}
global conn
conn={}
def connect():
cred=json.loads(os.environ['dict'])
for db in cred:
multiple_databases=cred[db]['uri']
url=urlparse.urlparse(multile_databases)
conn[db]={'user':'url.username','password':'url.password','host':'url.hostname','database':'url.path[1:]'}
cnx = mysql.connector.connect(pool_name = "mypool",
pool_size = 3,
**conn[db])
connect()
Issue here is only one connection is getting opened and it is for database 'db2'.
when i execute the same code without implementing pooling,both connections work fine/open.
for db in cred:
multiple_databases=cred[db]['uri']
url=urlparse.urlparse(multile_databases)
conn[db]=mysql.connector.connect(user=url.username,password=url.password,host=url.hostname,database=url.path[1:])
Question:When implementing pooling why both connection is not getting opened?
Figured it out
def conn1():
pool_size = 3
ser=json.loads(os.environ['dict'])
for db in ser:
instance_1=ser[db]['uri']
url=urlparse.urlparse(instance_1)
pool[db]=PooledDB(mysql.connector, pool_size, user=url.username, password=url.password, host=url.hostname, database=url.path[1:])
cnx=[None,]*pool_size
for i in xrange(0,pool_size):
cnx[i]=pool[db].connection()
cursor=cnx[i].cursor()
cursor.execute('SELECT CONNECTION_ID()')
print "Cnx %d has ID %d" % (i+1,cursor.fetchone()[0])
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.