2

私は一般的にUbuntu/Python/Bash/Gnomeに非常に慣れていないので、何か間違ったことをしている可能性がまだあると感じていますが、成功せずに3日が経ちました...

スクリプトの役割は次のとおりです:
* [✓] wallbase.cc から 1 つのランダムな画像をダウンロードし
ます * [✓] スクリプトが実行されているのと同じディレクトリに保存します
* [x] 壁紙として設定します

異なるコマンドを使用して 2 つの壁紙を設定しようとする試みが 2 回行われ、スクリプト内ではどちらも機能しません。印刷結果を C&P して正常に動作するため、正しい端末コマンドを吐き出す print ステートメント (下から 2 行目) がありますが、スクリプトで実行すると機能しません。

#!/usr/bin/env python
import urllib2
import os
from gi.repository import Gio

response = urllib2.urlopen("http://wallbase.cc/random/12/eqeq/1366x768/0.000/100/32")
page_source = response.read()
thlink_pos = page_source.find("ico-X")
address_start = (page_source.find("href=\"", thlink_pos) + 6)
address_end = page_source.find("\"", address_start + 1)

response = urllib2.urlopen(page_source[address_start:address_end])
page_source = response.read()

bigwall_pos = page_source.find("bigwall")
address_start = (page_source.find("src=\"", bigwall_pos) + 5)
address_end = page_source.find("\"", address_start + 1)

address = page_source[address_start:address_end]

slash_pos = address.rfind("/") + 1

pic_name = address[slash_pos:]

bashCommand = "wget " + page_source[address_start:address_end]
os.system(bashCommand)

print "Does my new image exists?", os.path.exists(os.getcwd() + "/" + pic_name)

#attempt 1
settings = Gio.Settings.new("org.gnome.desktop.background")
settings.set_string("picture-uri", "file://" + os.getcwd() + "/" + pic_name)
settings.apply()

#attempt 2
bashCommand = "gsettings set org.gnome.desktop.background picture-uri file://" + os.getcwd() + "/" + pic_name
print bashCommand
os.system(bashCommand)
settings.apply()
4

3 に答える 3

2

設定は正常に変更されましたが、まだ適用されていません。次の手順をお試しください。

settings.apply()

「picture-uri」文字列を設定した後。

于 2012-08-30T14:01:32.043 に答える
0

それは私にとってはうまくいきます(Ubuntu 12.04)。

私はあなたのスクリプトを修正しました(あなたのエラーとは関係ありません):

#!/usr/bin/python
"""Set desktop background using random images from http://wallbase.cc

It uses `gi.repository.Gio.Settings` to set the background.
"""
import functools
import itertools
import logging
import os
import posixpath
import random
import re
import sys
import time
import urllib
import urllib2
import urlparse
from collections import namedtuple

from bs4 import BeautifulSoup  # $ sudo apt-get install python-bs4
from gi.repository.Gio import Settings  # pylint: disable=F0401,E0611

DEFAULT_IMAGE_DIR = os.path.expanduser('~/Pictures/backgrounds')
HTMLPAGE_SIZE_MAX = 1 << 20  # bytes
TIMEOUT_MIN = 300  # seconds
TIMEOUT_DELTA = 30  # jitter

# "Anime/Manga", "Wallpapers/General", "High Resolution Images"
CATEGORY_W, CATEGORY_WG, CATEGORY_HR = range(1, 4)
PURITY_SFW, PURITY_SKETCHY, PURITY_NSFW, PURITY_DEFAULT = 4, 2, 1, 0
DAY_IN_SECONDS = 86400


UrlRetreiveResult = namedtuple('UrlRetreiveResult', "path headers")


def set_background(image_path, check_exist=True):
    """Change desktop background to image pointed by `image_path`.

    """
    if check_exist:  # make sure we can read it (at this time)
        with open(image_path, 'rb') as f:
            f.read(1)

    # prepare uri
    path = os.path.abspath(image_path)
    if isinstance(path, unicode):  # quote() doesn't like unicode
        path = path.encode('utf-8')
    uri = 'file://' + urllib.quote(path)

    # change background
    bg_setting = Settings.new('org.gnome.desktop.background')
    bg_setting.set_string('picture-uri', uri)
    bg_setting.apply()


def url2filename(url):
    """Return basename corresponding to url.

    >>> url2filename('http://example.com/path/to/file?opt=1')
    'file'
    """
    urlpath = urlparse.urlsplit(url).path  # pylint: disable=E1103
    basename = posixpath.basename(urllib.unquote(urlpath))
    if os.path.basename(basename) != basename:
        raise ValueError  # refuse 'dir%5Cbasename.ext' on Windows
    return basename


def download(url, dirpath, extensions=True, filename=None):
    """Download url to dirpath.

    Use basename of the url path as a filename.
    Create destination directory if necessary.

    Use `extensions` to require the file to have an extension or any
    of in a given sequence of extensions.

    Return (path, headers) on success.
    Don't retrieve url if path exists (headers are None in this case).
    """
    if not os.path.isdir(dirpath):
        os.makedirs(dirpath)
        logging.info('created directory %s', dirpath)

    # get filename from the url
    filename = url2filename(url) if filename is None else filename
    if os.path.basename(filename) != filename:
        logging.critical('filename must not have path separator in it "%s"',
                         filename)
        return

    if extensions:
        # require the file to have an extension
        root, ext = os.path.splitext(filename)
        if root and len(ext) > 1:
            # require the extension to be in the list
            try:
                it = iter(extensions)
            except TypeError:
                pass
            else:
                if ext not in it:
                    logging.warn(("file extension is not in the list"
                                  " url=%s"
                                  " extensions=%s"),
                                 url, extensions)
                    return
        else:
            logging.warn("file has no extension url=%s", url)
            return

    # download file
    path = os.path.join(dirpath, filename)
    logging.info("%s\n%s", url, path)
    if os.path.exists(path):  # don't retrieve if path exists
        logging.info('path exists')
        return UrlRetreiveResult(path, None)
    try:
        return UrlRetreiveResult(*urllib.urlretrieve(url, path,
                                                     _print_download_status))
    except IOError:
        logging.warn('failed to download {url} -> {path}'.format(
            url=url, path=path))


def _print_download_status(block_count, block_size, total_size):
    logging.debug('%10s bytes of %s', block_count * block_size, total_size)


def min_time_between_calls(min_delay):
    """Enforce minimum time delay between calls."""
    def decorator(func):
        lastcall = [None]  # emulate nonlocal keyword

        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            if lastcall[0] is not None:
                delay = time.time() - lastcall[0]
                if delay < min_delay:
                    _sleep(min_delay - delay)
            lastcall[0] = time.time()
            return func(*args, **kwargs)
        return wrapper
    return decorator


@min_time_between_calls(5)
def _makesoup(url):
    try:
        logging.info(vars(url) if isinstance(url, urllib2.Request) else url)
        page = urllib2.urlopen(url)
        soup = BeautifulSoup(page.read(HTMLPAGE_SIZE_MAX))
        return soup
    except (IOError, OSError) as e:
        logging.warn('failed to return soup for %s, error: %s',
                     getattr(url, 'get_full_url', lambda: url)(), e)


class WallbaseImages:
    """Given parameters it provides image urls to download."""

    def __init__(self,
                 categories=None,  # default; sequence of CATEGORY_*
                 resolution_exactly=True,  # False means 'at least'
                 resolution=None,  # all; (width, height)
                 aspect_ratios=None,  # all; sequence eg, [(5,4),(16,9)]
                 purity=PURITY_DEFAULT,  # combine with |
                 thumbs_per_page=None,  # default; an integer
                 ):
        """See usage below."""
        self.categories = categories
        self.resolution_exactly = resolution_exactly
        self.resolution = resolution
        self.aspect_ratios = aspect_ratios
        self.purity = purity
        self.thumbs_per_page = thumbs_per_page

    def _as_request(self):
        """Create a urllib2.Request() using given parameters."""
        # make url
        if self.categories is not None:
            categories = "".join(str(n) for n in (2, 1, 3)
                                 if n in self.categories)
        else:  # default
            categories = "0"

        if self.resolution_exactly:
            at_least_or_exactly_resolution = "eqeq"
        else:
            at_least_or_exactly_resolution = "gteq"

        if self.resolution is not None:
            resolution = "{width:d}x{height:d}".format(
                width=self.resolution[0], height=self.resolution[1])
        else:
            resolution = "0x0"

        if self.aspect_ratios is not None:
            aspect_ratios = "+".join("%.2f" % (w / float(h),)
                                     for w, h in self.aspect_ratios)
        else:  # default
            aspect_ratios = "0"

        purity = "{0:03b}".format(self.purity)
        thumbs = 20 if self.thumbs_per_page is None else self.thumbs_per_page
        url = ("http://wallbase.cc/random/"
               "{categories}/"
               "{at_least_or_exactly_resolution}/{resolution}/"
               "{aspect_ratios}/"
               "{purity}/{thumbs:d}").format(**locals())
        logging.info(url)
        # make post data
        data = urllib.urlencode(dict(query='', board=categories, nsfw=purity,
                                     res=resolution,
                                     res_opt=at_least_or_exactly_resolution,
                                     aspect=aspect_ratios,
                                     thpp=thumbs))
        req = urllib2.Request(url, data)
        return req

    def __iter__(self):
        """Yield background image urls."""
        # find links to bigwall pages
        # css-like: #thumbs div[class="thumb"] \
        #      a[class~="thlink" and href^="http://"]
        soup = _makesoup(self._as_request())
        if not soup:
            logging.warn("can't retrieve the main page")
            return
        thumbs_soup = soup.find(id="thumbs")
        for thumb in thumbs_soup.find_all('div', {'class': "thumb"}):
            bigwall_a = thumb.find('a', {'class': "thlink",
                                         'href': re.compile(r"^http://")})
            if bigwall_a is None:
                logging.warn("can't find thlink link")
                continue  # try the next thumb

            # find image url on the bigwall page
            # css-like: #bigwall > img[alt and src^="http://"]
            bigwall_soup = _makesoup(bigwall_a['href'])
            if bigwall_soup is not None:
                bigwall = bigwall_soup.find(id='bigwall')
                if bigwall is not None:
                    img = bigwall.find('img',
                                       src=re.compile(r"(?i)^http://.*\.jpg$"),
                                       alt=True)
                    if img is not None:
                        url = img['src']
                        filename = url2filename(url)
                        if filename.lower().endswith('.jpg'):
                            yield url, filename  # successfully found image url
                        else:
                            logging.warn('suspicious url "%s"', url)
                        continue
            logging.warn("can't parse bigwall page")


def main():
    level = logging.INFO
    if '-d' in sys.argv:
        sys.argv.remove('-d')
        level = logging.DEBUG
    # configure logging
    logging.basicConfig(format='%(levelname)s: %(asctime)s %(message)s',
                        level=level, datefmt='%Y-%m-%d %H:%M:%S %Z')

    if len(sys.argv) > 1:
        backgrounds_dir = sys.argv[1]
    else:
        backgrounds_dir = DEFAULT_IMAGE_DIR

    # infinite loop: Press Ctrl+C to interrupt it
    #NOTE: here's some arbitrary logic: modify for you needs e.g., break
    # after the first image found
    timeout = TIMEOUT_MIN  # seconds
    for i in itertools.cycle(xrange(timeout, DAY_IN_SECONDS)):
        found = False
        try:
            for url, filename in WallbaseImages(
                    categories=[CATEGORY_WG, CATEGORY_HR, CATEGORY_W],
                    purity=PURITY_SFW,
                    thumbs_per_page=60):
                res = download(url, backgrounds_dir, extensions=('.jpg',),
                               filename=filename)
                if res and res.path:
                    found = True
                    set_background(res.path)
                # don't hammer the site
                timeout = max(TIMEOUT_MIN, i % DAY_IN_SECONDS)
                _sleep(random.randint(timeout, timeout + TIMEOUT_DELTA))
        except Exception:  # pylint: disable=W0703
            logging.exception('unexpected error')
            _sleep(timeout)
        else:
            if not found:
                logging.error('failed to retrieve any images')
                _sleep(timeout)
        timeout = (timeout * 2) % DAY_IN_SECONDS


def _sleep(timeout):
    """Add logging to time.sleep() call."""
    logging.debug('sleep for %s seconds', timeout)
    time.sleep(timeout)


main()
于 2012-08-30T23:21:01.813 に答える
-1

PIL ライブラリを使用して画像にテキストを書き込む Python スクリプトを実装してから、Gio クラスを使用してその画像を指すように Gnome の背景「picture-uri」を更新しようとしました。Python スクリプトは、2 つのイメージ間で ping pong を実行して、使用されていないイメージを常に変更し、設定を更新して「切り替え」を試みます。現在の背景を変更すると一時的にドロップアウトするため、ちらつきを避けるためにこれを行いました。シェルでスクリプトを直接呼び出している間、問題はほとんど見られませんでしたが、cronjob では単に pong で更新されませんでした。同期と適用の両方を使用し、イメージを切り替えようとする前に数分待ちました。うまくいきませんでした。ユーザーとしてcronを試しました(su -c "cmd"ユーザー)が、どちらも機能しませんでした。

Gnomeがバックグラウンドファイルの変更を検出して更新することに気付いたとき、最終的にピンポンアプローチをあきらめました。そのため、ピンポン方式をやめて一時ファイルに移動し、shutil ライブラリを使用して現在のバックグラウンドにコピーしました。魅力のように機能します。

于 2012-09-03T22:41:14.527 に答える