--使用MikeyB的解决方案修改--
感谢 Mikey 指出一个简单的解决方案。我觉得有时在一个解决方案中投入了太多的心思,而它是一个解决问题的简单开销解决方案。
我添加了一个小函数,该函数循环遍历我想要监视的目录,并将变量设置为 True 或 False。
def file_check(working_pdf):
if len(gb1(working_pdf, '*.pdf')) == 0:
pdf_available = False
if len(gb1(working_pdf, '*.pdf')) > 0:
pdf_available = True
return pdf_available
然后在 PySimpleGUI 事件循环中调用它
if files_available is True:
for client in client_running_list:
working_pdf, ext = folder_Func(client)
pdf_available = file_check(working_pdf)
if pdf_available is True:
analyzer_queue.put(client)
for x in range(10):
t = Thread(target=analyzer)
t.daemon = True
t.start()
--原帖--
我有一个程序可以查看通过函数定义的目录,如果有文件,它会解析这些文件,然后将数据移动到数据库中。如果程序启动时目录中有文件,它会按预期运行,但是当添加新文件时,该函数不会执行。似乎无限循环没有通过目录执行。
我有一个通过 PySimpleGUI 使用“while True:”循环的 UI,所以我必须通过一个线程来分离该函数。我正在使用队列,并且试图确定我需要“while True:”循环以不断在文件夹中查找新文件的位置。
以下是部分代码(下面的缩进不正确):
def analyzer():
while True:
client = analyzer_queue.get()
working_pdf, archive_path_datetime = folder_Func(client)
while True:
if len(gb1(working_pdf, '*.pdf')) == 0:
break
else:
print(f'Found files in ', client, ' folder. Starting Parse.')
##########################################################
# Start Parse of PDF's
# Calls pdf parse function.
# Arguments are Client Number, and PDF to parse.
# Returns DF of items to insert into SQL Database
##########################################################
ch(working_pdf)
for pdf in gb1(working_pdf, "*.pdf"):
items_found_df = pdf_parse(client, pdf)
##########################################################
# Connect to SQL Server and insert items
# Calls database connection function.
##########################################################
azureDBengine = sqlalchemyConn()
items_found_df.to_sql("MainData_Capture",azureDBengine,if_exists='append',method='multi',index=False)
##########################################################
# Move file to Archive
##########################################################
if not ospath.exists(archive_path_datetime):
osmakedirs(archive_path_datetime)
print("Created Archive Folder.")
file_move(working_pdf, archive_path_datetime, pdf)
print('All Files Processed.')
analyzer_queue.task_done()
while True:
event_dashboard, values_dashboard = dashboard_form.Read(timeout=1000)
if dashboard_form is None or event_dashboard == 'Exit':
dashboard_form.Close()
break
for client in active_client_list:
client_start_button_action(client, event_dashboard, dashboard_form)
client_stop_button_action(client, event_dashboard, dashboard_form)
if event_dashboard == 'Start Analyze':
dashboard_form.FindElement(f'Start Analyze').Update(disabled=True)
dashboard_form.FindElement(f'Stop Analyze').Update(disabled=False)
print('Analyzer Started')
for client in client_running_list:
analyzer_queue.put(client)
for x in range(10):
t = Thread(target=analyzer)
t.daemon = True
t.start()
if event_dashboard == 'Stop Analyze':
dashboard_form.FindElement(f'Stop Analyze').Update(disabled=True)
dashboard_form.FindElement(f'Start Analyze').Update(disabled=False)
print('Analyzer Stopped')
analyzer_queue.empty()