• R/O
  • HTTP
  • SSH
  • HTTPS

pysilhouette.git: コミット

メインリポジトリ


コミットメタ情報

リビジョンffa1d14723109168280a26e10532cd95d1789c61 (tree)
日時2010-04-09 11:18:24
作者Kei Funagayama <kei.topaz@gmai...>
コミッターKei Funagayama

ログメッセージ

Tuning Logging

  • add logger key - sqlalchemy.engine
  • add logger key - sqlalchemy.pool
  • add logger key - sqlalchemy.orm
  • add logger key - pysilhouette.asynscheduler
  • add logger key - pysilhouette.asynperformer

変更サマリ

差分

--- a/doc/log.conf.example
+++ b/doc/log.conf.example
@@ -1,8 +1,8 @@
11 [loggers]
2-keys=root,pysilhouette,pysilhouette_trace,sqlalchemy.engine
2+keys=root,pysilhouette,pysilhouette_trace,sqlalchemy.engine,sqlalchemy.pool,sqlalchemy.orm,pysilhouette.asynscheduler,pysilhouette.asynperformer
33
44 [handlers]
5-keys=default,pysilhouette,pysilhouette_trace,sqlalchemy.engine
5+keys=default,pysilhouette,pysilhouette_trace,sqlalchemy.engine,sqlalchemy.pool,sqlalchemy.orm,pysilhouette.asynscheduler,pysilhouette.asynperformer
66
77 [formatters]
88 keys=default,common
@@ -36,24 +36,69 @@ class=handlers.RotatingFileHandler
3636 formatter=common
3737 args=('/var/log/pysilhouette/sql.log', 'a', (5 *1024 *1024), 5)
3838
39+[handler_sqlalchemy.pool]
40+class=handlers.RotatingFileHandler
41+formatter=common
42+args=('/var/log/pysilhouette/sql.log', 'a', (5 *1024 *1024), 5)
43+
44+[handler_sqlalchemy.orm]
45+class=handlers.RotatingFileHandler
46+formatter=common
47+args=('/var/log/pysilhouette/sql.log', 'a', (5 *1024 *1024), 5)
48+
49+[handler_pysilhouette.asynscheduler]
50+class=handlers.RotatingFileHandler
51+formatter=common
52+args=('/var/log/pysilhouette/application.log', 'a', (5 *1024 *1024), 5)
53+
54+[handler_pysilhouette.asynperformer]
55+class=handlers.RotatingFileHandler
56+formatter=common
57+args=('/var/log/pysilhouette/application.log', 'a', (5 *1024 *1024), 5)
58+
3959 [logger_root]
4060 level=ERROR
4161 handlers=default
4262
4363 [logger_pysilhouette]
44-level=DEBUG
64+level=INFO
4565 handlers=pysilhouette
4666 propagate=0
4767 qualname=pysilhouette
4868
4969 [logger_pysilhouette_trace]
50-level=DEBUG
70+level=INFO
5171 handlers=pysilhouette_trace
5272 propagate=0
5373 qualname=pysilhouette_trace
5474
5575 [logger_sqlalchemy.engine]
56-level=DEBUG
76+level=WARNING
5777 handlers=sqlalchemy.engine
5878 propagate=0
5979 qualname=sqlalchemy.engine
80+
81+[logger_sqlalchemy.pool]
82+level=WARNING
83+handlers=sqlalchemy.pool
84+propagate=0
85+qualname=sqlalchemy.pool
86+
87+[logger_sqlalchemy.orm]
88+level=INFO
89+handlers=sqlalchemy.orm
90+propagate=0
91+qualname=sqlalchemy.orm
92+
93+[logger_pysilhouette.asynscheduler]
94+level=WARNING
95+handlers=pysilhouette.asynscheduler
96+propagate=0
97+qualname=pysilhouette.asynscheduler
98+
99+[logger_pysilhouette.asynperformer]
100+level=WARNING
101+handlers=pysilhouette.asynperformer
102+propagate=0
103+qualname=pysilhouette.asynperformer
104+
--- a/pysilhouette/asynperformer.py
+++ b/pysilhouette/asynperformer.py
@@ -71,19 +71,9 @@ class AsynPerformer(ER):
7171 finally:
7272 fp.close()
7373
74- self.logger.info('Received code from the FIFO file. - code=%s' % code)
74+ #self.logger.info('Received code from the FIFO file. - code=%s' % code)
7575 session = self.db.get_session()
7676
77- # TODO:dummy data
78- #dummy_set_job(self.cf,
79- # int(self.cf['asynperformer.thread.pool.size']),
80- # 'echo "aaaaaa"',
81- # 'echo "bbbbb"',
82- # 'echo "cccc"',
83- # #'serial',
84- # 'parallel',
85- # self.db)
86-
8777 # Pending JobGroup search
8878 if self.cf['asynperformer.thread.pool.size'] <= tq.now_alive():
8979 continue
@@ -93,30 +83,27 @@ class AsynPerformer(ER):
9383 int(self.cf['asynperformer.thread.pool.size']) - tq.now_alive())
9484
9585 session.close()
96-
97- self.logger.info('Queued the Job Group from the database. - Number of JobGroup=%d' \
98- % len(m_jgs))
99-
100- self.logger.debug('filo code=%s, cf asynperformer.mkfifo.start.code=%s' % (code, self.cf["asynperformer.mkfifo.start.code"]))
86+ #self.logger.info('Queued the Job Group from the database. - Number of JobGroup=%d' % len(m_jgs))
87+ #self.logger.debug('filo code=%s, cf asynperformer.mkfifo.start.code=%s' % (code, self.cf["asynperformer.mkfifo.start.code"]))
88+ self.logger.info('Activity Information. - [fifo_code=%s, type=serial, jobgroup_num=%d]' % (code, len(m_jgs)))
10189 if code == self.cf["asynperformer.mkfifo.start.code"]:
10290 if 0 < len(m_jgs):
10391 for m_jg in m_jgs:
10492 try:
105- # thread worker!! start
106- tq.put(ThreadWorker(self.cf, self.db, m_jg.id))
93+ tq.put(ThreadWorker(self.cf, self.db, m_jg.id)) # thread worker!! start
10794 except Exception, e:
108- self.logger.info('Failed to perform the job group. Exceptions are not expected. - jobgroup_id=%d : %s'
95+ self.logger.debug('Failed to perform the job group. Exceptions are not expected. - jobgroup_id=%d : %s'
10996 % (m_jg.id, str(e.args)))
11097 print >>sys.stderr, traceback.format_exc()
11198 t_logger = logging.getLogger('pysilhouette_traceback')
11299 t_logger.error(traceback.format_exc())
113100 else:
114- self.logger.info('No Job Group.')
101+ self.logger.debug('No Job Group.')
115102 elif code == self.cf["asynperformer.mkfifo.stop.code"]:
116- self.logger.info('Received stop code from the FIFO file. - code=%s' % code)
103+ self.logger.warning('Received stop code from the FIFO file. - code=%s' % code)
117104 break
118105 else:
119- self.logger.info('Received illegal code from the FIFO file. - code=%s' % code)
106+ self.logger.warning('Received illegal code from the FIFO file. - code=%s' % code)
120107
121108 def sigterm_handler(signum, frame):
122109 logger = logging.getLogger('pysilhouette.asynperformer')
--- a/pysilhouette/asynscheduler.py
+++ b/pysilhouette/asynscheduler.py
@@ -78,9 +78,8 @@ class AsynScheduler(ER):
7878
7979 def sigterm_handler(signum, frame):
8080 logger = logging.getLogger('pysilhouette.asynscheduler.signal')
81- logger.info('Stop the asynschedulerd with signal- pid=%s, signal=%s' % (os.getpid(), signum))
81+ logger.warning('Stop the asynschedulerd with signal- pid=%s, signal=%s' % (os.getpid(), signum))
8282
83-
8483 def main():
8584 (opts, args) = getopts()
8685 if chkopts(opts) is True:
@@ -116,7 +115,7 @@ def main():
116115 finally:
117116 if opts.daemon is True and os.path.isfile(opts.pidfile):
118117 os.unlink(opts.pidfile)
119- logger.info('Process file has been deleted.. - pidfile=%s' % opts.pidfile)
118+ logger.warning('Process file has been deleted.. - pidfile=%s' % opts.pidfile)
120119
121120 return PROCERROR
122121
--- a/pysilhouette/daemon.py
+++ b/pysilhouette/daemon.py
@@ -158,6 +158,7 @@ def observer(opts, cf):
158158
159159 try:
160160 while True:
161+ simple_log = []
161162 # Performer
162163 if not pf.poll() is None:
163164 logger.debug('return code=%d' % pf.returncode)
@@ -168,7 +169,8 @@ def observer(opts, cf):
168169 logger.info('performer : [start] - pid=%s, count=%s/%s'
169170 % (pf.pid, count, cf['observer.restart.count']))
170171 else:
171- logger.info('performer [running] - pid=%s, count=%s/%s'
172+ simple_log.append('performer (running) - count=%s/%s' % (count, cf['observer.restart.count']))
173+ logger.debug('performer [running] - pid=%s, count=%s/%s'
172174 % (pf.pid, count, cf['observer.restart.count']))
173175
174176 # Scheduler
@@ -181,7 +183,8 @@ def observer(opts, cf):
181183 logger.info('scheduler : [start] - pid=%s, count=%s/%s'
182184 % (sd.pid, count, cf['observer.restart.count']))
183185 else:
184- logger.info('scheduler [running] - pid=%s, count=%s/%s'
186+ simple_log.append('scheduler (running) - count=%s/%s' % (count, cf['observer.restart.count']))
187+ logger.debug('scheduler [running] - pid=%s, count=%s/%s'
185188 % (sd.pid, count, cf['observer.restart.count']))
186189
187190 # AsynPerformer
@@ -194,7 +197,8 @@ def observer(opts, cf):
194197 logger.info('asynperformer : [start] - pid=%s, count=%s/%s'
195198 % (asynpf.pid, count, cf['observer.restart.count']))
196199 else:
197- logger.info('asynperformer [running] - pid=%s, count=%s/%s'
200+ simple_log.append('asynperformer (running) - count=%s/%s' % (count, cf['observer.restart.count']))
201+ logger.debug('asynperformer [running] - pid=%s, count=%s/%s'
198202 % (asynpf.pid, count, cf['observer.restart.count']))
199203
200204 # AsynScheduler
@@ -207,9 +211,12 @@ def observer(opts, cf):
207211 logger.info('asynscheduler : [start] - pid=%s, count=%s/%s'
208212 % (asynsd.pid, count, cf['observer.restart.count']))
209213 else:
210- logger.info('asynscheduler [running] - pid=%s, count=%s/%s'
214+ simple_log.append('asynscheduler (running) - count=%s/%s' % ( count, cf['observer.restart.count']))
215+ logger.debug('asynscheduler [running] - pid=%s, count=%s/%s'
211216 % (asynsd.pid, count, cf['observer.restart.count']))
212217
218+ logger.info(str(simple_log)[1:-1])
219+
213220 # status output
214221 status(count, status_count, default_count, False)
215222
--- a/pysilhouette/db/__init__.py
+++ b/pysilhouette/db/__init__.py
@@ -53,52 +53,23 @@ def create_database(cf):
5353 db = Database(cf['database.url'],
5454 encoding="utf-8",
5555 convert_unicode=True,
56- #assert_unicode='warn', # DEBUG
57- #echo = opts.verbose,
58- #echo_pool = opts.verbose,
59- echo=True, # TODO
60- echo_pool=True # TODO
6156 )
6257 else:
6358 if int(cf['database.pool.status']) == 1:
6459 db = Database(cf['database.url'],
6560 encoding="utf-8",
6661 convert_unicode=True,
67- #assert_unicode='warn', # DEBUG
6862 poolclass=QueuePool,
6963 pool_size=int(cf['database.pool.size']),
7064 max_overflow=int(cf['database.pool.max.overflow']),
71- #echo = opts.verbose,
72- #echo_pool = opts.verbose,
73- echo=True, # TODO
74- echo_pool=True # TODO
7565 )
7666 else:
7767 db = Database(cf['database.url'],
7868 encoding="utf-8",
7969 convert_unicode=True,
80- #assert_unicode='warn', # DEBUG
8170 poolclass=SingletonThreadPool,
8271 pool_size=int(cf['database.pool.size']),
83- #echo = opts.verbose,
84- #echo_pool = opts.verbose,
85- echo=True, # TODO
86- echo_pool=True # TODO
8772 )
88- """
89- db = Database(cf['database.url'],
90- encoding="utf-8",
91- convert_unicode=True,
92- #assert_unicode='warn', # DEBUG
93- poolclass=SingletonThreadPool,
94- pool_size=int(cf['database.pool.size']),
95- #echo = opts.verbose,
96- #echo_pool = opts.verbose,
97- echo=True, # TODO
98- echo_pool=True, # TODO
99- #strategy='threadlocal'
100- )
101- """
10273 if db is None:
10374 raise SilhouetteDBException('Initializing a database error - "Database" failed to create.')
10475 reload_mappers(db.get_metadata())
--- a/pysilhouette/performer.py
+++ b/pysilhouette/performer.py
@@ -61,14 +61,12 @@ class Performer(ER):
6161 finally:
6262 fp.close()
6363
64- self.logger.info('Received code from the FIFO file. - code=%s' % code)
64+ #self.logger.info('Received code from the FIFO file. - code=%s' % code)
6565 session = self.db.get_session()
6666 m_jgs = jobgroup_findbytype_status(session, JOBGROUP_TYPE['SERIAL'])
6767 session.close()
68-
69- self.logger.info('Queued the Job Group from the database. - Number of JobGroup=%d' \
70- % len(m_jgs))
71-
68+ #self.logger.info('Queued the Job Group from the database. - Number of JobGroup=%d' % len(m_jgs))
69+ self.logger.info('Activity Information. - [fifo_code=%s, type=serial, jobgroup_num=%d]' % (code, len(m_jgs)))
7270 if code == self.cf["performer.mkfifo.start.code"]:
7371 if 0 < len(m_jgs):
7472 for m_jg in m_jgs:
@@ -93,12 +91,13 @@ class Performer(ER):
9391 t_logger.error(traceback.format_exc())
9492
9593 else:
96- self.logger.info('No Job Group.')
94+ #self.logger.info('No Job Group.')
95+ pass
9796 elif code == self.cf["performer.mkfifo.stop.code"]:
98- self.logger.info('Received stop code from the FIFO file. - code=%s' % code)
97+ self.logger.warning('Received stop code from the FIFO file. - code=%s' % code)
9998 return PROCSUCCESS
10099 else:
101- self.logger.info('Received illegal code from the FIFO file. - code=%s' % code)
100+ self.logger.warning('Received illegal code from the FIFO file. - code=%s' % code)
102101
103102 # --
104103 def sigterm_handler(signum, frame):
--- a/pysilhouette/scheduler.py
+++ b/pysilhouette/scheduler.py
@@ -62,8 +62,8 @@ class Scheduler(ER):
6262 fp = open(self.cf["%s.mkfifo.path" % entity], 'w')
6363 try:
6464 fp.write(self.cf['%s.mkfifo.start.code' % entity])
65- self.logger.info('Start code was written. - file=%s : code=%s'
66- % (self.cf["%s.mkfifo.path" % entity], self.cf['%s.mkfifo.start.code' % entity]))
65+ #self.logger.info('Start code was written. - file=%s : code=%s' % (self.cf["%s.mkfifo.path" % entity], self.cf['%s.mkfifo.start.code' % entity]))
66+ self.logger.info('Activity Information. - [fifo code=%s]' % (self.cf['%s.mkfifo.start.code' % entity]))
6767 finally:
6868 fp.close()
6969
@@ -78,7 +78,7 @@ class Scheduler(ER):
7878
7979 def sigterm_handler(signum, frame):
8080 logger = logging.getLogger('pysilhouette.scheduler.signal')
81- logger.info('Stop the schedulerd with signal- pid=%s, signal=%s' % (os.getpid(), signum))
81+ logger.warning('Stop the schedulerd with signal- pid=%s, signal=%s' % (os.getpid(), signum))
8282
8383
8484 def main():
@@ -116,7 +116,7 @@ def main():
116116 finally:
117117 if opts.daemon is True and os.path.isfile(opts.pidfile):
118118 os.unlink(opts.pidfile)
119- logger.info('Process file has been deleted.. - pidfile=%s' % opts.pidfile)
119+ logger.warning('Process file has been deleted.. - pidfile=%s' % opts.pidfile)
120120
121121 return PROCERROR
122122
--- a/pysilhouette/worker.py
+++ b/pysilhouette/worker.py
@@ -189,7 +189,7 @@ class SimpleWorker(Worker):
189189 self._cf = cf
190190 self._db = db
191191 self._jobgroup_id = jobgroup_id
192- self.logger = logging.getLogger('pysilhouette.performer.simpleworker')
192+ self.logger = logging.getLogger('pysilhouette.worker.simpleworker')
193193
194194 def _action(self, session, m_jobs):
195195 ret = True
@@ -361,7 +361,7 @@ class ThreadWorker(threading.Thread, SimpleWorker):
361361 self._db = db
362362
363363 def run(self):
364- self.logger = logging.getLogger('pysilhouette.performer.threadworker')
364+ self.logger = logging.getLogger('pysilhouette.worker.threadworker')
365365 try:
366366 self.process()
367367 except Exception, e:
旧リポジトリブラウザで表示