1

我在使用此设置时遇到问题。总之,一旦用户在表单上按下提交,数据就会传递给 RQWorker 和 Redis 进行处理。

错误rqworker来自

23:56:44 RQ worker u'rq:worker:HAFun.12371' started, version 0.5.6
23:56:44 
23:56:44 *** Listening on default...
23:56:57 default: min_content.process_feed.process_checks(u'http://www.feedurl.com/url.xml', u'PM', u'alphanumeric', u'domain@domain.com') (9e736730-e97f-4ee5-b48d-448d5493dd6c)
23:56:57 ImportError: No module named min_content.process_feed
Traceback (most recent call last):
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/worker.py", line 568, in perform_job
    rv = job.perform()
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/job.py", line 495, in perform
    self._result = self.func(*self.args, **self.kwargs)
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/job.py", line 206, in func
    return import_attribute(self.func_name)
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/utils.py", line 150, in import_attribute
    module = importlib.import_module(module_name)
  File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
    __import__(name)
ImportError: No module named min_content.process_feed
Traceback (most recent call last):
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/worker.py", line 568, in perform_job
    rv = job.perform()
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/job.py", line 495, in perform
    self._result = self.func(*self.args, **self.kwargs)
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/job.py", line 206, in func
    return import_attribute(self.func_name)
  File "/var/www/min_content/min_content/venv/local/lib/python2.7/site-packages/rq/utils.py", line 150, in import_attribute
    module = importlib.import_module(module_name)
  File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
    __import__(name)
ImportError: No module named min_content.process_feed
23:56:57 Moving job to u'failed' queue

我尝试过以多种方式启动 rqworker

rqworker --url redis://localhost:6379
rqworker 

视图.py

from min_content import app
from flask import render_template
from .forms import SubmissionForm
from flask import request
from .process_feed import process_checks #this is the function that does the checks
from redis import Redis
from rq import Queue



def process():
    feedUrl = request.form['feedUrl']
    source = request.form['pmsc']
    ourAssignedId = request.form['assignedId']
    email_address = request.form['email_address']

    conn = redis.StrictRedis('localhost', 6379, 0)
    q = Queue(connection=conn)

    result = q.enqueue(process_checks, feedUrl,source,ourAssignedId, email_address)
    return 'It\'s running and we\'ll send you an email when its done<br /><br /><a href="/">Do another one</a>'

process_feed有一个process_checks按预期工作的函数。

我知道这是可行的,因为使用下面的行而不是 RQ 可以正常工作。

do_it = process_checks(feedUrl,source,ourAssignedId)

奇怪的是,在我关闭与 VPS 的 SSH 连接之前,这一切都运行良好。

运行ps -aux返回 this 表示 redis 正在运行

root     11894  0.1  0.4  38096  2348 ?        Ssl  Oct25   0:01 /usr/local/bin/redis-server *:6379 

重启redis什么都不做,重启apache2也不做

sudo service redis_6379 start
sudo service redis_6379 stop
sudo service apache2 restart

我完全按照本指南操作,就像我说的那样,这一直有效,直到我终止与我的 VPS 的 SSH 连接

如果这有什么不同,我正在虚拟环境中运行,我在我的 WSGI 文件中调用它

min_content.wsgi

#!/usr/bin/python
activate_this = '/var/www/min_content/min_content/venv/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/min_content")

from min_content import app as application
application.secret_key = 'blah blah blah

'

我已通过将其添加到脚本来确认 Redis 服务器正在运行

r = redis.StrictRedis('localhost', 6379, 0)
r.set(name='teststring', value='this is a test')
test_string = r.get(name='teststring')
print test_string

跑步redis-cli回报127.0.0.1:6379>

process_feed.py

import requests
import xml.etree.ElementTree as ET
import csv

def process_checks(feedUrl,source,ourAssignedId):
    feed_url = feedUrl
    source = source
    ourAssignedId = ourAssignedId

    all_the_data = []   

    #grab xml from URL
    try:
        r = requests.get(feed_url)
    except Exception as e:
        print "Failed to grab from " + feed_url
        return "Failed to grab from " + feed_url


    root = ET.fromstring(r.text)

    for advertiser in root.iter('advertiser'):
        assignedId = advertiser.find('assignedId').text
        if assignedId==ourAssignedId:
            #only process for PMs using our assignedId
            for listings in advertiser.iter('listingContentIndexEntry'):

                listingUrl = listings.find('listingUrl').text
                print "Processing " + listingUrl

                #now grab from URL
                listing_request = requests.get(listingUrl)

                #parse XML from URL
                #listing_root = ET.xpath(listing_request.text)

                if not ET.fromstring(listing_request.text.encode('utf8')):
                    print "Failed to load XML for" + listingUrl
                    continue
                else:
                    listing_root = ET.fromstring(listing_request.text.encode('utf8'))


                #'Stayz Property ID','External Reference','User Account External Reference','Provider','Address Line1','Active','Headline','Listing URL'
                stayzPropertyId = '' #the property manager enters this into the spreadsheet

                if not listing_root.find('.//externalId').text:
                    print 'No external Id in ' + listingUrl
                    listingExternalId = 'None'

                else:
                    listingExternalId = listing_root.find('externalId').text
                    listingExternalId =  '"' + listingExternalId + '"'


                userAccountExternalReference = assignedId
                print userAccountExternalReference
                provider = source
                addressLine1 = listing_root.find('.//addressLine1').text
                active = listing_root.find('active').text

                if not listing_root.find('.//headline/texts/text/textValue').text:
                    print 'No headline in ' + listingExternalId
                    headline = 'None'
                else:
                    headline = listing_root.find('.//headline/texts/text/textValue').text
                    headline = headline.encode('utf-8')

                if not listing_root.find('.//description/texts/text/textValue').text:
                    print 'No description in ' + listingExternalId
                    description = 'None'
                else:
                    description = listing_root.find('.//description/texts/text/textValue').text


                #now check the min content
                #headline length
                headline_length = len(headline)
                headline_length_check = 'FAIL'
                if headline_length<20:
                    headline_length_check = 'FAIL'
                else:
                    headline_length_check = 'TRUE'

                #description length
                description_length_check = 'FAIL'
                description_length = len(description)
                if description_length<400:
                    description_length_check = 'FAIL'
                else:
                    description_length_check = 'TRUE'



                #number of images
                num_images = 0
                num_images_check = 'FAIL'
                for images in listing_root.iter('image'):
                    num_images = num_images+1
                    if num_images <6:
                        num_images_check = 'FAIL'
                    else:
                        num_images_check = 'TRUE'

                #atleast one rate
                num_rates = 0
                num_rates_check = 'FAIL'
                for rates in listing_root.iter('rate'):
                    num_rates = num_rates+1
                    if num_rates < 1:
                        num_rates_check = 'FAIL'
                    else:
                        num_rates_check = 'TRUE'

                #atleast one bedroom


                #atleast one bathroom

                #a longitude and latitude





                #now add to our list of lists
                data = {'stayzPropertyId':'','listingExternalId':listingExternalId,'userAccountExternalReference':userAccountExternalReference,'provider':provider,'addressLine1':addressLine1,'active':active,'headline':headline,'listingUrl':listingUrl,'Headline > 20 characters?':headline_length_check,'Description > 400 characters?':description_length_check,'Number of Images > 6?':num_images_check,'At least one rate?':num_rates_check}
                #data_dict = ['',listingExternalId,userAccountExternalReference,provider,addressLine1,active,headline,listingUrl]

                all_the_data.append(data)







    files_location = './files/' + source + '__' + ourAssignedId + '_export.csv'
    with open(files_location,'w') as csvFile:
    #with open('./files/' + source + '_export.csv','a') as csvFile:
        fieldnames = ['stayzPropertyId','listingExternalId','userAccountExternalReference','provider','addressLine1','active','headline','listingUrl','Headline > 20 characters?','Description > 400 characters?','Number of Images > 6?','At least one rate?']
        writer = csv.DictWriter(csvFile,fieldnames=fieldnames)
        writer.writeheader()
        for row in all_the_data:
            try:
                writer.writerow(row)
            except:
                print "Failed to write row " + str(row)
                continue


    #send email via Mailgun
    return requests.post(
        "https://api.mailgun.net/v3/sandboxablahblablbah1.mailgun.org/messages",
        auth=("api", "key-blahblahblah"),
        #files=("attachment", open(files_location)),
        data={"from": "Mailgun Sandbox <postmaster@.mailgun.org>",
              "to": "Me <me@me.com>",
              "subject": "Feed Processed for " + ourAssignedId,
              "text": "Done",
              "html":"<b>Process the file</b>"})
4

0 回答 0