Expire URL items from the on-disk cache once in a while

* url.el (url-retrieve-number-of-calls): New variable.
(url-retrieve-internal): Use it to expire the cache once in a
while.

* url-cache.el (url-cache-prune-cache): New function.
This commit is contained in:
Lars Ingebrigtsen 2012-02-06 22:06:15 +01:00
parent c1f1086866
commit 1968bb1b5c
3 changed files with 39 additions and 0 deletions

View file

@ -1,5 +1,11 @@
2012-02-06 Lars Ingebrigtsen <larsi@gnus.org>
* url-cache.el (url-cache-prune-cache): New function.
* url.el (url-retrieve-number-of-calls): New variable.
(url-retrieve-internal): Use it to expire the cache once in a
while.
* url-queue.el (url-queue-setup-runners): New function that uses
`run-with-idle-timer' for extra asynchronicity.
(url-queue-remove-jobs-from-host): New function.

View file

@ -209,6 +209,32 @@ If `url-standalone-mode' is non-nil, cached items never expire."
(seconds-to-time (or expire-time url-cache-expire-time)))
(current-time))))))
(defun url-cache-prune-cache (&optional directory)
"Remove all expired files from the cache.
`url-cache-expire-time' says how old a file has to be to be
considered \"expired\"."
(let ((current-time (current-time))
(total-files 0)
(deleted-files 0))
(dolist (file (directory-files (or directory url-cache-directory) t))
(unless (member (file-name-nondirectory file) '("." ".."))
(setq total-files (1+ total-files))
(cond
((file-directory-p file)
(when (url-cache-prune-cache file)
(setq deleted-files (1+ deleted-files))))
((time-less-p
(time-add
(nth 5 (file-attributes file))
(seconds-to-time url-cache-expire-time))
current-time)
(delete-file file)
(setq deleted-files (1+ deleted-files))))))
(if (< deleted-files total-files)
nil
(delete-directory directory)
t)))
(provide 'url-cache)
;;; url-cache.el ends here

View file

@ -119,6 +119,9 @@ Sometimes while retrieving a URL, the URL library needs to use another buffer
than the one returned initially by `url-retrieve'. In this case, it sets this
variable in the original buffer as a forwarding pointer.")
(defvar url-retrieve-number-of-calls 0)
(autoload 'url-cache-prune-cache "url-cache")
;;;###autoload
(defun url-retrieve (url callback &optional cbargs silent)
"Retrieve URL asynchronously and call CALLBACK with CBARGS when finished.
@ -174,6 +177,10 @@ If SILENT, don't message progress reports and the like."
(unless (url-type url)
(error "Bad url: %s" (url-recreate-url url)))
(setf (url-silent url) silent)
;; Once in a while, remove old entries from the URL cache.
(when (zerop (% url-retrieve-number-of-calls 1000))
(url-cache-prune-cache))
(setq url-retrieve-number-of-calls (1+ url-retrieve-number-of-calls))
(let ((loader (url-scheme-get-property (url-type url) 'loader))
(url-using-proxy (if (url-host url)
(url-find-proxy-for-url url (url-host url))))