python 通知以监视 In_closted_write 和 in_moved_to 事件
python inotify to monitor for In_closted_write and in_moved_to events
我正在监视要移动到或创建的新文件的目录。
检测到新文件后,我调用另一个 python 脚本来处理该文件。
#!/usr/bin/python
import os
import signal
import sys
import logging
import inotify.adapters
import subprocess
_DEFAULT_LOG_FORMAT = ''
_LOGGER = logging.getLogger(__name__)
def _configure_logging():
_LOGGER.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
formatter = logging.Formatter(_DEFAULT_LOG_FORMAT)
ch.setFormatter(formatter)
_LOGGER.addHandler(ch)
def exit_gracefully(signum, frame):
signal.signal(signal.SIGINT, original_sigint)
sys.exit(1)
signal.signal(signal.SIGINT, exit_gracefully)
def main():
i = inotify.adapters.Inotify()
i.add_watch(b'/home/sort/tmp')
try:
for event in i.event_gen():
if event is not None:
if 'IN_MOVED_TO' in event[1] or 'IN_CLOSE_WRITE' in event[1]:
(header, type_names, watch_path, filename) = event
_LOGGER.info("%s" #"WD=(%d) MASK=(%d) COOKIE=(%d) LEN=(%d) MASK->NAMES=%s "
#"WATCH-PATH=[%s]"
"FILENAME=%s" + "/" + "%s",
type_names,#header.wd, header.mask, header.cookie, header.len, type_names,
watch_path.decode('utf-8'), filename.decode('utf-8'))
fnp = str(event[2] + "/" + event[3])
print fnp
proc = subprocess.Popen([orgpath, fnp], stderr=subprocess.STDOUT, bufsize=1)
#proc.communicate()
finally:
i.remove_watch(b'/home/sort/tmp')
if __name__ == '__main__':
_configure_logging()
orgdir = os.path.dirname(os.path.realpath(sys.argv[0]))
orgpath = os.path.join(orgdir, "organize.py")
original_sigint = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, exit_gracefully)
print("Watching /home/sort/tmp for new files")
main()
最终目标是在我调用 API 抓取元数据时一次只处理一个文件。在短时间内多次调用 API 可能会导致 API 密钥被禁止或暂时阻止。
现在,当我将多个文件复制到监控目录时,脚本会同时调用每个文件。
尝试将 for 循环放入 运行 python 文件..
for files in directory:
...code that runs the python file
如果它仍然 运行 太快,您可以设置一个计时器来限制 API 调用
import time
for files in directory:
...code that runs the python file
time.sleep(5)
我正在监视要移动到或创建的新文件的目录。 检测到新文件后,我调用另一个 python 脚本来处理该文件。
#!/usr/bin/python
import os
import signal
import sys
import logging
import inotify.adapters
import subprocess
_DEFAULT_LOG_FORMAT = ''
_LOGGER = logging.getLogger(__name__)
def _configure_logging():
_LOGGER.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
formatter = logging.Formatter(_DEFAULT_LOG_FORMAT)
ch.setFormatter(formatter)
_LOGGER.addHandler(ch)
def exit_gracefully(signum, frame):
signal.signal(signal.SIGINT, original_sigint)
sys.exit(1)
signal.signal(signal.SIGINT, exit_gracefully)
def main():
i = inotify.adapters.Inotify()
i.add_watch(b'/home/sort/tmp')
try:
for event in i.event_gen():
if event is not None:
if 'IN_MOVED_TO' in event[1] or 'IN_CLOSE_WRITE' in event[1]:
(header, type_names, watch_path, filename) = event
_LOGGER.info("%s" #"WD=(%d) MASK=(%d) COOKIE=(%d) LEN=(%d) MASK->NAMES=%s "
#"WATCH-PATH=[%s]"
"FILENAME=%s" + "/" + "%s",
type_names,#header.wd, header.mask, header.cookie, header.len, type_names,
watch_path.decode('utf-8'), filename.decode('utf-8'))
fnp = str(event[2] + "/" + event[3])
print fnp
proc = subprocess.Popen([orgpath, fnp], stderr=subprocess.STDOUT, bufsize=1)
#proc.communicate()
finally:
i.remove_watch(b'/home/sort/tmp')
if __name__ == '__main__':
_configure_logging()
orgdir = os.path.dirname(os.path.realpath(sys.argv[0]))
orgpath = os.path.join(orgdir, "organize.py")
original_sigint = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, exit_gracefully)
print("Watching /home/sort/tmp for new files")
main()
最终目标是在我调用 API 抓取元数据时一次只处理一个文件。在短时间内多次调用 API 可能会导致 API 密钥被禁止或暂时阻止。
现在,当我将多个文件复制到监控目录时,脚本会同时调用每个文件。
尝试将 for 循环放入 运行 python 文件..
for files in directory:
...code that runs the python file
如果它仍然 运行 太快,您可以设置一个计时器来限制 API 调用
import time
for files in directory:
...code that runs the python file
time.sleep(5)