Fix problem with occasional stalls in `url-retrieve-synchronously'

* lisp/url/url.el (url-retrieve-synchronously): Use
`accept-process-output' on a null process.  That is the safer, more
conventional way of achieving non-blocking sleep-for (bug#49897).

Also rewrite for greater readability.
This commit is contained in:
dick r. chiang 2021-08-06 13:24:53 +02:00 committed by Lars Ingebrigtsen
parent b17fd982a3
commit 93e1248c20

View file

@ -235,85 +235,55 @@ If INHIBIT-COOKIES is non-nil, refuse to store cookies. If
TIMEOUT is passed, it should be a number that says (in seconds)
how long to wait for a response before giving up."
(url-do-setup)
(let ((retrieval-done nil)
(start-time (current-time))
(url-asynchronous nil)
(asynch-buffer nil)
(timed-out nil))
(setq asynch-buffer
(url-retrieve url (lambda (&rest ignored)
(url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer))
(setq retrieval-done t
asynch-buffer (current-buffer)))
nil silent inhibit-cookies))
(if (null asynch-buffer)
;; We do not need to do anything, it was a mailto or something
;; similar that takes processing completely outside of the URL
;; package.
nil
(let ((proc (get-buffer-process asynch-buffer)))
;; If the access method was synchronous, `retrieval-done' should
;; hopefully already be set to t. If it is nil, and `proc' is also
;; nil, it implies that the async process is not running in
;; asynch-buffer. This happens e.g. for FTP files. In such a case
;; url-file.el should probably set something like a `url-process'
;; buffer-local variable so we can find the exact process that we
;; should be waiting for. In the mean time, we'll just wait for any
;; process output.
(while (and (not retrieval-done)
(or (not timeout)
(not (setq timed-out
(time-less-p timeout
(time-since start-time))))))
(url-debug 'retrieval
"Spinning in url-retrieve-synchronously: %S (%S)"
retrieval-done asynch-buffer)
(if (buffer-local-value 'url-redirect-buffer asynch-buffer)
(setq proc (get-buffer-process
(setq asynch-buffer
(buffer-local-value 'url-redirect-buffer
asynch-buffer))))
(if (and proc (memq (process-status proc)
'(closed exit signal failed))
;; Make sure another process hasn't been started.
(eq proc (or (get-buffer-process asynch-buffer) proc)))
;; FIXME: It's not clear whether url-retrieve's callback is
;; guaranteed to be called or not. It seems that url-http
;; decides sometimes consciously not to call it, so it's not
;; clear that it's a bug, but even then we need to decide how
;; url-http can then warn us that the download has completed.
;; In the mean time, we use this here workaround.
;; XXX: The callback must always be called. Any
;; exception is a bug that should be fixed, not worked
;; around.
(progn ;; Call delete-process so we run any sentinel now.
(delete-process proc)
(setq retrieval-done t)))
;; We used to use `sit-for' here, but in some cases it wouldn't
;; work because apparently pending keyboard input would always
;; interrupt it before it got a chance to handle process input.
;; `sleep-for' was tried but it lead to other forms of
;; hanging. --Stef
(unless (or (with-local-quit
(accept-process-output proc 1))
(null proc))
;; accept-process-output returned nil, maybe because the process
;; exited (and may have been replaced with another). If we got
;; a quit, just stop.
(when quit-flag
(delete-process proc))
(setq proc (and (not quit-flag)
(get-buffer-process asynch-buffer))))))
;; On timeouts, make sure we kill any pending processes.
;; There may be more than one if we had a redirect.
(when timed-out
(when (process-live-p proc)
(delete-process proc))
(when-let ((aproc (get-buffer-process asynch-buffer)))
(when (process-live-p aproc)
(delete-process aproc))))))
asynch-buffer))
(let* (url-asynchronous
data-buffer
(callback (lambda (&rest _args)
(setq data-buffer (current-buffer))
(url-debug 'retrieval
"Synchronous fetching done (%S)"
data-buffer)))
(start-time (current-time))
(proc-buffer (url-retrieve url callback nil silent
inhibit-cookies)))
(if (not proc-buffer)
(url-debug 'retrieval "Synchronous fetching unnecessary %s" url)
(unwind-protect
(catch 'done
(while (not data-buffer)
(when (and timeout (time-less-p timeout
(time-since start-time)))
(url-debug 'retrieval "Timed out %s (after %ss)" url
(float-time (time-since start-time)))
(throw 'done 'timeout))
(url-debug 'retrieval
"Spinning in url-retrieve-synchronously: nil (%S)"
proc-buffer)
(when-let ((redirect-buffer
(buffer-local-value 'url-redirect-buffer
proc-buffer)))
(unless (eq redirect-buffer proc-buffer)
(url-debug
'retrieval "Redirect in url-retrieve-synchronously: %S -> %S"
proc-buffer redirect-buffer)
(let (kill-buffer-query-functions)
(kill-buffer proc-buffer))
;; Accommodate hack in commit 55d1d8b.
(setq proc-buffer redirect-buffer)))
(when-let ((proc (get-buffer-process proc-buffer)))
(when (memq (process-status proc)
'(closed exit signal failed))
;; Process sentinel vagaries occasionally cause
;; url-retrieve to fail calling callback.
(unless data-buffer
(url-debug 'retrieval "Dead process %s" url)
(throw 'done 'exception))))
;; Querying over consumer internet in the US takes 100
;; ms, so split the difference.
(accept-process-output nil 0.05)))
(unless (eq data-buffer proc-buffer)
(let (kill-buffer-query-functions)
(kill-buffer proc-buffer)))))
data-buffer))
;; url-mm-callback called from url-mm, which requires mm-decode.
(declare-function mm-dissect-buffer "mm-decode"