2

一般来说,我对 Ubuntu/Python/Bash/Gnome 很陌生,所以我仍然觉得我有可能做错了什么,但现在已经 3 天没有成功了......

这是脚本应该做的:
* [✓] 从 wallbase.cc 下载 1 张随机图像
* [✓] 将其保存到脚本运行的同一目录
* [x] 将其设置为壁纸

有两次尝试使用不同的命令设置两个壁纸,但在脚本中均无效。有一个打印语句(从底部算起的第 2 行)吐出正确的终端命令,因为我可以 C&P 打印结果并且它工作正常,只是在脚本中执行时它不起作用。

#!/usr/bin/env python
import urllib2
import os
from gi.repository import Gio

response = urllib2.urlopen("http://wallbase.cc/random/12/eqeq/1366x768/0.000/100/32")
page_source = response.read()
thlink_pos = page_source.find("ico-X")
address_start = (page_source.find("href=\"", thlink_pos) + 6)
address_end = page_source.find("\"", address_start + 1)

response = urllib2.urlopen(page_source[address_start:address_end])
page_source = response.read()

bigwall_pos = page_source.find("bigwall")
address_start = (page_source.find("src=\"", bigwall_pos) + 5)
address_end = page_source.find("\"", address_start + 1)

address = page_source[address_start:address_end]

slash_pos = address.rfind("/") + 1

pic_name = address[slash_pos:]

bashCommand = "wget " + page_source[address_start:address_end]
os.system(bashCommand)

print "Does my new image exists?", os.path.exists(os.getcwd() + "/" + pic_name)

#attempt 1
settings = Gio.Settings.new("org.gnome.desktop.background")
settings.set_string("picture-uri", "file://" + os.getcwd() + "/" + pic_name)
settings.apply()

#attempt 2
bashCommand = "gsettings set org.gnome.desktop.background picture-uri file://" + os.getcwd() + "/" + pic_name
print bashCommand
os.system(bashCommand)
settings.apply()
4

3 回答 3

2

您已成功更改设置,但仍未应用,请尝试下一步:

settings.apply()

设置“picture-uri”字符串后。

于 2012-08-30T14:01:32.043 回答
0

它适用于我(Ubuntu 12.04)。

我修改了你的脚本(与你的错误无关):

#!/usr/bin/python
"""Set desktop background using random images from http://wallbase.cc

It uses `gi.repository.Gio.Settings` to set the background.
"""
import functools
import itertools
import logging
import os
import posixpath
import random
import re
import sys
import time
import urllib
import urllib2
import urlparse
from collections import namedtuple

from bs4 import BeautifulSoup  # $ sudo apt-get install python-bs4
from gi.repository.Gio import Settings  # pylint: disable=F0401,E0611

DEFAULT_IMAGE_DIR = os.path.expanduser('~/Pictures/backgrounds')
HTMLPAGE_SIZE_MAX = 1 << 20  # bytes
TIMEOUT_MIN = 300  # seconds
TIMEOUT_DELTA = 30  # jitter

# "Anime/Manga", "Wallpapers/General", "High Resolution Images"
CATEGORY_W, CATEGORY_WG, CATEGORY_HR = range(1, 4)
PURITY_SFW, PURITY_SKETCHY, PURITY_NSFW, PURITY_DEFAULT = 4, 2, 1, 0
DAY_IN_SECONDS = 86400


UrlRetreiveResult = namedtuple('UrlRetreiveResult', "path headers")


def set_background(image_path, check_exist=True):
    """Change desktop background to image pointed by `image_path`.

    """
    if check_exist:  # make sure we can read it (at this time)
        with open(image_path, 'rb') as f:
            f.read(1)

    # prepare uri
    path = os.path.abspath(image_path)
    if isinstance(path, unicode):  # quote() doesn't like unicode
        path = path.encode('utf-8')
    uri = 'file://' + urllib.quote(path)

    # change background
    bg_setting = Settings.new('org.gnome.desktop.background')
    bg_setting.set_string('picture-uri', uri)
    bg_setting.apply()


def url2filename(url):
    """Return basename corresponding to url.

    >>> url2filename('http://example.com/path/to/file?opt=1')
    'file'
    """
    urlpath = urlparse.urlsplit(url).path  # pylint: disable=E1103
    basename = posixpath.basename(urllib.unquote(urlpath))
    if os.path.basename(basename) != basename:
        raise ValueError  # refuse 'dir%5Cbasename.ext' on Windows
    return basename


def download(url, dirpath, extensions=True, filename=None):
    """Download url to dirpath.

    Use basename of the url path as a filename.
    Create destination directory if necessary.

    Use `extensions` to require the file to have an extension or any
    of in a given sequence of extensions.

    Return (path, headers) on success.
    Don't retrieve url if path exists (headers are None in this case).
    """
    if not os.path.isdir(dirpath):
        os.makedirs(dirpath)
        logging.info('created directory %s', dirpath)

    # get filename from the url
    filename = url2filename(url) if filename is None else filename
    if os.path.basename(filename) != filename:
        logging.critical('filename must not have path separator in it "%s"',
                         filename)
        return

    if extensions:
        # require the file to have an extension
        root, ext = os.path.splitext(filename)
        if root and len(ext) > 1:
            # require the extension to be in the list
            try:
                it = iter(extensions)
            except TypeError:
                pass
            else:
                if ext not in it:
                    logging.warn(("file extension is not in the list"
                                  " url=%s"
                                  " extensions=%s"),
                                 url, extensions)
                    return
        else:
            logging.warn("file has no extension url=%s", url)
            return

    # download file
    path = os.path.join(dirpath, filename)
    logging.info("%s\n%s", url, path)
    if os.path.exists(path):  # don't retrieve if path exists
        logging.info('path exists')
        return UrlRetreiveResult(path, None)
    try:
        return UrlRetreiveResult(*urllib.urlretrieve(url, path,
                                                     _print_download_status))
    except IOError:
        logging.warn('failed to download {url} -> {path}'.format(
            url=url, path=path))


def _print_download_status(block_count, block_size, total_size):
    logging.debug('%10s bytes of %s', block_count * block_size, total_size)


def min_time_between_calls(min_delay):
    """Enforce minimum time delay between calls."""
    def decorator(func):
        lastcall = [None]  # emulate nonlocal keyword

        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            if lastcall[0] is not None:
                delay = time.time() - lastcall[0]
                if delay < min_delay:
                    _sleep(min_delay - delay)
            lastcall[0] = time.time()
            return func(*args, **kwargs)
        return wrapper
    return decorator


@min_time_between_calls(5)
def _makesoup(url):
    try:
        logging.info(vars(url) if isinstance(url, urllib2.Request) else url)
        page = urllib2.urlopen(url)
        soup = BeautifulSoup(page.read(HTMLPAGE_SIZE_MAX))
        return soup
    except (IOError, OSError) as e:
        logging.warn('failed to return soup for %s, error: %s',
                     getattr(url, 'get_full_url', lambda: url)(), e)


class WallbaseImages:
    """Given parameters it provides image urls to download."""

    def __init__(self,
                 categories=None,  # default; sequence of CATEGORY_*
                 resolution_exactly=True,  # False means 'at least'
                 resolution=None,  # all; (width, height)
                 aspect_ratios=None,  # all; sequence eg, [(5,4),(16,9)]
                 purity=PURITY_DEFAULT,  # combine with |
                 thumbs_per_page=None,  # default; an integer
                 ):
        """See usage below."""
        self.categories = categories
        self.resolution_exactly = resolution_exactly
        self.resolution = resolution
        self.aspect_ratios = aspect_ratios
        self.purity = purity
        self.thumbs_per_page = thumbs_per_page

    def _as_request(self):
        """Create a urllib2.Request() using given parameters."""
        # make url
        if self.categories is not None:
            categories = "".join(str(n) for n in (2, 1, 3)
                                 if n in self.categories)
        else:  # default
            categories = "0"

        if self.resolution_exactly:
            at_least_or_exactly_resolution = "eqeq"
        else:
            at_least_or_exactly_resolution = "gteq"

        if self.resolution is not None:
            resolution = "{width:d}x{height:d}".format(
                width=self.resolution[0], height=self.resolution[1])
        else:
            resolution = "0x0"

        if self.aspect_ratios is not None:
            aspect_ratios = "+".join("%.2f" % (w / float(h),)
                                     for w, h in self.aspect_ratios)
        else:  # default
            aspect_ratios = "0"

        purity = "{0:03b}".format(self.purity)
        thumbs = 20 if self.thumbs_per_page is None else self.thumbs_per_page
        url = ("http://wallbase.cc/random/"
               "{categories}/"
               "{at_least_or_exactly_resolution}/{resolution}/"
               "{aspect_ratios}/"
               "{purity}/{thumbs:d}").format(**locals())
        logging.info(url)
        # make post data
        data = urllib.urlencode(dict(query='', board=categories, nsfw=purity,
                                     res=resolution,
                                     res_opt=at_least_or_exactly_resolution,
                                     aspect=aspect_ratios,
                                     thpp=thumbs))
        req = urllib2.Request(url, data)
        return req

    def __iter__(self):
        """Yield background image urls."""
        # find links to bigwall pages
        # css-like: #thumbs div[class="thumb"] \
        #      a[class~="thlink" and href^="http://"]
        soup = _makesoup(self._as_request())
        if not soup:
            logging.warn("can't retrieve the main page")
            return
        thumbs_soup = soup.find(id="thumbs")
        for thumb in thumbs_soup.find_all('div', {'class': "thumb"}):
            bigwall_a = thumb.find('a', {'class': "thlink",
                                         'href': re.compile(r"^http://")})
            if bigwall_a is None:
                logging.warn("can't find thlink link")
                continue  # try the next thumb

            # find image url on the bigwall page
            # css-like: #bigwall > img[alt and src^="http://"]
            bigwall_soup = _makesoup(bigwall_a['href'])
            if bigwall_soup is not None:
                bigwall = bigwall_soup.find(id='bigwall')
                if bigwall is not None:
                    img = bigwall.find('img',
                                       src=re.compile(r"(?i)^http://.*\.jpg$"),
                                       alt=True)
                    if img is not None:
                        url = img['src']
                        filename = url2filename(url)
                        if filename.lower().endswith('.jpg'):
                            yield url, filename  # successfully found image url
                        else:
                            logging.warn('suspicious url "%s"', url)
                        continue
            logging.warn("can't parse bigwall page")


def main():
    level = logging.INFO
    if '-d' in sys.argv:
        sys.argv.remove('-d')
        level = logging.DEBUG
    # configure logging
    logging.basicConfig(format='%(levelname)s: %(asctime)s %(message)s',
                        level=level, datefmt='%Y-%m-%d %H:%M:%S %Z')

    if len(sys.argv) > 1:
        backgrounds_dir = sys.argv[1]
    else:
        backgrounds_dir = DEFAULT_IMAGE_DIR

    # infinite loop: Press Ctrl+C to interrupt it
    #NOTE: here's some arbitrary logic: modify for you needs e.g., break
    # after the first image found
    timeout = TIMEOUT_MIN  # seconds
    for i in itertools.cycle(xrange(timeout, DAY_IN_SECONDS)):
        found = False
        try:
            for url, filename in WallbaseImages(
                    categories=[CATEGORY_WG, CATEGORY_HR, CATEGORY_W],
                    purity=PURITY_SFW,
                    thumbs_per_page=60):
                res = download(url, backgrounds_dir, extensions=('.jpg',),
                               filename=filename)
                if res and res.path:
                    found = True
                    set_background(res.path)
                # don't hammer the site
                timeout = max(TIMEOUT_MIN, i % DAY_IN_SECONDS)
                _sleep(random.randint(timeout, timeout + TIMEOUT_DELTA))
        except Exception:  # pylint: disable=W0703
            logging.exception('unexpected error')
            _sleep(timeout)
        else:
            if not found:
                logging.error('failed to retrieve any images')
                _sleep(timeout)
        timeout = (timeout * 2) % DAY_IN_SECONDS


def _sleep(timeout):
    """Add logging to time.sleep() call."""
    logging.debug('sleep for %s seconds', timeout)
    time.sleep(timeout)


main()
于 2012-08-30T23:21:01.813 回答
-1

尝试实现一个 python 脚本,该脚本使用 PIL 库在图像上写入文本,然后使用 Gio 类更新 Gnome 背景“picture-uri”以指向该图像。python 脚本将在两个图像之间进行 ping pong 以始终修改未使用的图像,然后尝试通过更新设置来“切换”。这样做是为了避免任何闪烁,因为修改当前背景直接将其暂时删除。在 shell 中并直接调用脚本时,我很少看到任何问题,但在 cronjob 中它根本不会在 pong 上更新。我同时使用了同步和应用,并且在尝试切换图像之前会等待几分钟。没用。尝试 cron 作为用户(su -c "cmd" 用户),但也没有用。

当我注意到 Gnome 会检测到后台文件中的任何更改并进行更新时,我终于放弃了乒乓球方法。所以放弃了 ping pong 方法并转到一个临时文件,我只是使用 shutil 库在当前背景上复制它。奇迹般有效。

于 2012-09-03T22:41:14.527 回答