mysql 性能分析套件

 #!/usr/local/python3./bin/python3.
#!coding:utf-
####################################
#目地:用于诊断mysql性能问题
#作者:蒋乐兴
#时间:--
#create user moniter@'127.0.0.1' identified by 'moniter@2048';
#
#################################### import mysql.connector as connector
import argparse
import psutil
import json
import sys
import os show_global_status_56="select variable_name,variable_value from information_schema.global_status where variable_name= %s"
show_global_variables_56="select variable_name,variable_value from information_schema.global_variables where variable_name= %s"
show_global_status_57="select variable_name,variable_value from performance_schema.global_status where variable_name= %s"
show_global_variables_57="select variable_name,variable_value from performance_schema.global_variables where variable_name= %s" class AnalyseBase(object):
def __init__(self,cursor,args):
self.cursor=cursor
self.args=args
self.result={} def Analyse(self):
"执行分析函数"
pass
def Print(self):
print(json.dumps(analyst.result,sort_keys=True,indent=,ensure_ascii=False)) class AnalyseInnodb(AnalyseBase):
def innodb_log_waits(self):
"status:innodb_log_waits innodb 等待刷新redo log 的次,如果它不是0,说明innodb_log_buffer_size 过小"
self.cursor.execute(args.show_global_status,('innodb_log_waits',))
name,value=self.cursor.fetchone()
comment=None
if int(value)==:
comment='正常'
else:
comment='innodb_log_waits > 0 应该适当增加innodb_log_buffer_size的大小'
self.result['innodb_log_waits']={'name':'innodb_log_waits','value':value,'comment':comment} def innodb_flush_log_at_trx_commit(self):
("variables:innodb_flush_log_at_trx_commit 0:事务提交时并不把redo log 写入日志文件,而是等待主线程每秒的刷新。"
"1:commit 时同步的方式刷新redo log 到日志文件"
"2:commit 时异步的方式刷新redo log 到日志文件")
self.cursor.execute(args.show_global_variables,('innodb_flush_log_at_trx_commit',))
name,value=self.cursor.fetchone()
comment=None
if int(value)==:
comment='正常、由于每个事务完成后都要同步的刷新日志,所以性能不是最好'
else:
comment='注意、有安全隐患;0:事务提交时并不把redo log 写入日志文件,而是等待主线程每秒的刷新;2:commit 时异步的方式刷新redo log 到日志文件。'
self.result['innodb_flush_log_at_trx_commit']={'name':'innodb_flush_log_at_trx_commit','value':value,'comment':comment} def innodb_buffer_pool_size(self):
self.cursor.execute(args.show_global_variables,('innodb_buffer_pool_size',))
name,value=self.cursor.fetchone()
memory_object=psutil.virtual_memory();
total_memory=memory_object.total
rate=float(value)/float(total_memory)
comment=None
if rate <=0.75:
comment="注意、innodb_buffer_pool_size 过小;total_memory:{0}{1} / innodb_buffer_pool_size:{2}{3} = {4}%"
elif rate<=0.85:
comment="正常、innodb_buffer_pool_size 合适;total_memory:{0}{1} / innodb_buffer_pool_size:{2}{3} = {4}%"
else:
comment="注意、innodb_buffer_pool_size 过大;total_memory:{0}{1} / innodb_buffer_pool_size:{2}{3} = {4}%"
sign=args.memoryunit['sign']
unit=int(args.memoryunit['unit'])
value=int(value)
comment=comment.format(value/unit,sign,total_memory/unit,sign,rate*)
self.result['innodb_buffer_pool_size']={'name':'innodb_buffer_pool_size','value':"{0}{1}".format(value/unit,sign),'comment':comment} def innodb_file_per_table(self):
"variables:innodb_file_per_table 不做成单独表空间的话管理不方便"
self.cursor.execute(args.show_global_variables,('innodb_file_per_table',))
name,value=self.cursor.fetchone()
comment=None
if comment=='ON':
comment='正常'
else:
comment='注意、建议开启innodb_file_per_table,以方式管理innodb表空间文件'
self.result['innodb_file_per_table']={'name':'innodb_file_per_table','value':value,'comment':comment} def innodb_io_capacity(self):
"1:在合并插入缓冲时,合并插入缓冲数量为innodb_io_capacity的5%; 2:在从缓冲区刷新脏页时,刷新脏页的数量为innodb_io_capacity页。"
self.cursor.execute(args.show_global_variables,('innodb_io_capacity',))
name,value=self.cursor.fetchone()
comment=("注意、无法确认最优值,请核对磁盘IO能力。在合并插入缓冲时,合并插入缓冲数量为innodb_io_capacity的5%;"
"在从缓冲区刷新脏页时,刷新脏页的数量为innodb_io_capacity页。")
self.result['innodb_io_capacity']={'name':'innodb_io_capacity','value':value,'comment':comment} def innodb_max_dirty_pages_pct(self):
"innodb 在每秒刷新缓冲池时会去判断这个值,如果大于innodb_max_dirty_pages_pct,才会去刷新100个脏页"
self.cursor.execute(args.show_global_variables,('innodb_max_dirty_pages_pct',))
name,value=self.cursor.fetchone()
comment=None
if int(value) <=:
comment=("注意、innodb_max_dirty_pages_pct 过小;这会增加磁盘的IO负载,请适当增加,推荐值75~80")
elif int(value) <=:
comment='正常'
else:
comment='注意、innodb_max_dirty_pages_pct 过大;脏面数量过大,这会影响服务宕机后,重启的用时'
self.result['innodb_max_dirty_pages_pct']={'name':'innodb_max_dirty_pages_pct','value':value,'comment':comment} def Analyse(self):
self.innodb_log_waits()
self.innodb_file_per_table()
self.innodb_flush_log_at_trx_commit()
self.innodb_io_capacity()
self.innodb_max_dirty_pages_pct()
self.innodb_buffer_pool_size() if __name__=="__main__":
parser=argparse.ArgumentParser()
parser.add_argument('--host',default='127.0.0.1',help='ip address of mysql server.....')
parser.add_argument('--port',default=,type=int,help='port number of mysql server....')
parser.add_argument('--user',default='moniter',help='mysql user name................')
parser.add_argument('--password',default='moniter@2048',help='password of mysql user.........')
parser.add_argument('--mysqlversion',default=5.6,choices=['5.6','5.7'],help='version of mysql server........')
parser.add_argument('--memoryunit',default='MB',choices=['G','GB','M','MB','K','KB'])
parser.add_argument('target',default='innodb',choices=['innodb','binlog','all'],help='the part of mysql that you want to tuning')
args=parser.parse_args()
#隔离不同版本mysql数据库的差异
if args.mysqlversion==5.6:
args.show_global_status=show_global_status_56
args.show_global_variables=show_global_variables_56
elif args.mysqlversion==5.7:
args.show_global_status=show_global_status_57
args.show_global_variables=show_global_variables_57
#调整内存单位
unit=*
if args.memoryunit in('G','GB'):
unit=**
elif args.memoryunit in ('M','MB'):
unit=*
elif args.memoryunit in ('K','KB'):
unit=
args.memoryunit={'sign':args.memoryunit,'unit':unit}
cnx=None
cursor=None
connection_config={
'host':args.host,
'port':args.port,
'user':args.user,
'password':args.password
}
try:
cnx=connector.connect(**connection_config)
cursor=cnx.cursor()
analyst=AnalyseInnodb(cursor,args)
analyst.Analyse()
analyst.Print()
except Exception as err:
print(err)
finally:
if cnx != None:
cnx.close()
cursor.close()
上一篇:axios与vue的配合使用事例,实现缓存和重复加载的控制


下一篇:C#操作config文件