Skip to content

Commit

Permalink
Update alfred-workflow.
Browse files Browse the repository at this point in the history
  • Loading branch information
Dozer committed Apr 2, 2017
1 parent f66aabd commit a2bd25c
Show file tree
Hide file tree
Showing 6 changed files with 143 additions and 24 deletions.
2 changes: 1 addition & 1 deletion hasher.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from model.sha import SHA
from workflow import Workflow, ICON_INFO

__version__ = "1.2.2"
__version__ = "1.2.3"


class Hasher:
Expand Down
2 changes: 2 additions & 0 deletions info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,8 @@
<real>10</real>
</dict>
</dict>
<key>version</key>
<string>1.2.3</string>
<key>webaddress</key>
<string></string>
</dict>
Expand Down
38 changes: 18 additions & 20 deletions workflow/background.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,33 +108,31 @@ def _background(stdin='/dev/null', stdout='/dev/null',
:type stderr: filepath
"""
def _fork_and_exit_parent(errmsg):
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError as err:
wf().logger.critical('%s: (%d) %s', errmsg, err.errno,
err.strerror)
raise err

# Do first fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit first parent.
except OSError as e:
wf().logger.critical("fork #1 failed: ({0:d}) {1}".format(
e.errno, e.strerror))
sys.exit(1)
_fork_and_exit_parent('fork #1 failed')

# Decouple from parent environment.
os.chdir(wf().workflowdir)
os.umask(0)
os.setsid()

# Do second fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit second parent.
except OSError as e:
wf().logger.critical("fork #2 failed: ({0:d}) {1}".format(
e.errno, e.strerror))
sys.exit(1)
_fork_and_exit_parent('fork #2 failed')

# Now I am a daemon!
# Redirect standard file descriptors.
si = file(stdin, 'r', 0)
so = file(stdout, 'a+', 0)
se = file(stderr, 'a+', 0)
si = open(stdin, 'r', 0)
so = open(stdout, 'a+', 0)
se = open(stderr, 'a+', 0)
if hasattr(sys.stdin, 'fileno'):
os.dup2(si.fileno(), sys.stdin.fileno())
if hasattr(sys.stdout, 'fileno'):
Expand Down
2 changes: 1 addition & 1 deletion workflow/version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.24
1.25
36 changes: 34 additions & 2 deletions workflow/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from __future__ import print_function, unicode_literals

import atexit
import binascii
from contextlib import contextmanager
import cPickle
Expand Down Expand Up @@ -804,6 +805,7 @@ def __init__(self, protected_path, timeout=0, delay=0.05):
self.timeout = timeout
self.delay = delay
self._locked = False
atexit.register(self.release)

@property
def locked(self):
Expand All @@ -817,11 +819,14 @@ def acquire(self, blocking=True):
``False``.
Otherwise, check every `self.delay` seconds until it acquires
lock or exceeds `self.timeout` and raises an exception.
lock or exceeds `self.timeout` and raises an `~AcquisitionError`.
"""
start = time.time()
while True:

self._validate_lockfile()

try:
fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)
with os.fdopen(fd, 'w') as fd:
Expand All @@ -830,6 +835,7 @@ def acquire(self, blocking=True):
except OSError as err:
if err.errno != errno.EEXIST: # pragma: no cover
raise

if self.timeout and (time.time() - start) >= self.timeout:
raise AcquisitionError('Lock acquisition timed out.')
if not blocking:
Expand All @@ -839,10 +845,36 @@ def acquire(self, blocking=True):
self._locked = True
return True

def _validate_lockfile(self):
"""Check existence and validity of lockfile.
If the lockfile exists, but contains an invalid PID
or the PID of a non-existant process, it is removed.
"""
try:
with open(self.lockfile) as fp:
s = fp.read()
except Exception:
return

try:
pid = int(s)
except ValueError:
return self.release()

from background import _process_exists
if not _process_exists(pid):
self.release()

def release(self):
"""Release the lock by deleting `self.lockfile`."""
self._locked = False
os.unlink(self.lockfile)
try:
os.unlink(self.lockfile)
except (OSError, IOError) as err: # pragma: no cover
if err.errno != 2:
raise err

def __enter__(self):
"""Acquire lock."""
Expand Down
87 changes: 87 additions & 0 deletions workflow/workflow3.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,7 @@ def __init__(self, **kwargs):
Workflow.__init__(self, **kwargs)
self.variables = {}
self._rerun = 0
self._session_id = None

@property
def _default_cachedir(self):
Expand Down Expand Up @@ -373,6 +374,28 @@ def rerun(self, seconds):
"""
self._rerun = seconds

@property
def session_id(self):
"""A unique session ID every time the user uses the workflow.
.. versionadded:: 1.25
The session ID persists while the user is using this workflow.
It expires when the user runs a different workflow or closes
Alfred.
"""
if not self._session_id:
sid = os.getenv('_WF_SESSION_ID')
if not sid:
from uuid import uuid4
sid = uuid4().hex
self.setvar('_WF_SESSION_ID', sid)

self._session_id = sid

return self._session_id

def setvar(self, name, value):
"""Set a "global" workflow variable.
Expand Down Expand Up @@ -421,6 +444,70 @@ def add_item(self, title, subtitle='', arg=None, autocomplete=None,
self._items.append(item)
return item

def _mk_session_name(self, name):
"""New cache name/key based on session ID."""
return '_wfsess-{0}-{1}'.format(self.session_id, name)

def cache_data(self, name, data, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Args:
name (str): Cache key
data (object): Data to cache
session (bool, optional): Whether to scope the cache
to the current session.
``name`` and ``data`` are as for the
:meth:`~workflow.workflow.Workflow.cache_data` on
:class:`~workflow.workflow.Workflow`.
If ``session`` is ``True``, the ``name`` variable is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)

return super(Workflow3, self).cache_data(name, data)

def cached_data(self, name, data_func=None, max_age=60, session=False):
"""Cache API with session-scoped expiry.
.. versionadded:: 1.25
Args:
name (str): Cache key
data_func (callable): Callable that returns fresh data. It
is called if the cache has expired or doesn't exist.
max_age (int): Maximum allowable age of cache in seconds.
session (bool, optional): Whether to scope the cache
to the current session.
``name``, ``data_func`` and ``max_age`` are as for the
:meth:`~workflow.workflow.Workflow.cached_data` on
:class:`~workflow.workflow.Workflow`.
If ``session`` is ``True``, the ``name`` variable is prefixed
with :attr:`session_id`.
"""
if session:
name = self._mk_session_name(name)

return super(Workflow3, self).cached_data(name, data_func, max_age)

def clear_session_cache(self):
"""Remove *all* session data from the cache.
.. versionadded:: 1.25
"""
def _is_session_file(filename):
return filename.startswith('_wfsess-')

self.clear_cache(_is_session_file)

@property
def obj(self):
"""Feedback formatted for JSON serialization.
Expand Down

0 comments on commit a2bd25c

Please sign in to comment.