emacs-ipython-notebook/lisp/ein-query.el

219 lines
8.3 KiB
EmacsLisp
Raw Normal View History

2012-05-25 20:57:48 +02:00
;;; ein-query.el --- jQuery like interface on to of url-retrieve
;; Copyright (C) 2012- Takafumi Arakaki
2012-07-01 20:18:05 +02:00
;; Author: Takafumi Arakaki <aka.tkf at gmail.com>
2012-05-25 20:57:48 +02:00
;; This file is NOT part of GNU Emacs.
;; ein-query.el is free software: you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; ein-query.el is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with ein-query.el. If not, see <http://www.gnu.org/licenses/>.
;;; Commentary:
;;
;;; Code:
(eval-when-compile (require 'cl))
(require 'request)
(require 'request-deferred)
(require 'url)
2012-05-25 20:57:48 +02:00
(require 'ein-core)
2012-05-25 20:57:48 +02:00
(require 'ein-log)
2012-06-08 13:54:39 +02:00
;;; Utils
2012-05-25 20:57:48 +02:00
(defun ein:safe-funcall-packed (packed &rest args)
(when packed
(ein:log-ignore-errors (apply #'ein:funcall-packed packed args))))
2012-06-08 13:54:39 +02:00
;;; Variables
2012-05-27 01:58:39 +02:00
(defcustom ein:query-timeout
(if (eq request-backend 'url-retrieve) 1000 nil)
2012-06-21 00:38:15 +02:00
"Default query timeout for HTTP access in millisecond.
Setting this to `nil' means no timeout.
If you have ``curl`` command line program, it is automatically set to
`nil' as ``curl`` is reliable than `url-retrieve' therefore no need for
a workaround (see below).
2012-06-21 00:38:15 +02:00
If you do the same operation before the timeout, old operation
will be canceled \(see also `ein:query-singleton-ajax').
.. note:: This value exists because it looks like `url-retrieve'
2012-06-21 15:03:41 +02:00
occasionally fails to finish \(start?) querying. Timeout is
used to let user notice that their operation is not finished.
It also prevent opening a lot of useless process buffers.
You will see them when closing Emacs if there is no timeout.
2012-06-21 00:38:15 +02:00
2012-06-21 15:03:41 +02:00
If you know how to fix the problem with `url-retrieve', please
let me know or send pull request at github!
2012-06-21 00:38:15 +02:00
\(Related bug report in Emacs bug tracker:
http://debbugs.gnu.org/cgi/bugreport.cgi?bug=11469)"
2012-05-27 01:58:39 +02:00
:type '(choice (integer :tag "Timeout [ms]" 5000)
(const :tag "No timeout" nil))
:group 'ein)
2012-06-08 13:54:39 +02:00
;;; Jupyterhub
(defvar *ein:jupyterhub-servers* (make-hash-table :test #'equal))
(defstruct ein:$jh-conn
"Data representing a connection to a jupyterhub server."
url
version
user
token)
(defstruct ein:$jh-user
"A jupyterhub user, per https://jupyterhub.readthedocs.io/en/latest/_static/rest-api/index.html#/definitions/User."
name
admin
groups
server
pending
last-activity)
(defun ein:get-jh-conn (url)
(gethash url *ein:jupyterhub-servers*))
(defun ein:reset-jh-servers ()
(setq *ein:jupyterhub-servers* (make-hash-table :test #'equal)))
(defun ein:jupyterhub-url-p (url)
"Does URL reference a jupyterhub server? If so then return the
connection structure representing the server."
(let ((parsed (url-generic-parse-url url)))
(or (gethash (format "http://%s:%s" (url-host parsed) (url-port parsed))
*ein:jupyterhub-servers*)
(gethash (format "https://%s:%s" (url-host parsed) (url-port parsed))
*ein:jupyterhub-servers*))))
(defun ein:jupyterhub-correct-query-url-maybe (url-or-port)
(let* ((parsed-url (url-generic-parse-url url-or-port))
(hostport (format "http://%s:%s" (url-host parsed-url) (url-port parsed-url)))
(command (url-filename parsed-url)))
(ein:aif (ein:jupyterhub-url-p hostport)
(let ((user-server-path (ein:$jh-user-server (ein:$jh-conn-user it))))
(ein:url hostport
user-server-path
command))
url-or-port)))
2012-06-08 13:54:39 +02:00
;;; Functions
2012-06-20 20:56:04 +02:00
(defvar ein:query-running-process-table (make-hash-table :test 'equal))
(defun ein:query-prepare-header (url settings &optional securep)
"Ensure that REST calls to the jupyter server have the correct
_xsrf argument."
(let* ((parsed-url (url-generic-parse-url url))
(cookies (request-cookie-alist (url-host parsed-url)
"/" securep)))
(ein:aif (assoc-string "_xsrf" cookies)
(setq settings (plist-put settings :headers (append (plist-get settings :headers)
(list (cons "X-XSRFTOKEN" (cdr it)))))))
(ein:aif (ein:jupyterhub-url-p (format "http://%s:%s" (url-host parsed-url) (url-port parsed-url)))
(progn
(unless (string-equal (ein:$jh-conn-url it)
(ein:url (ein:$jh-conn-url it) "hub/login"))
(setq settings (plist-put settings :headers (append (plist-get settings :headers)
(list (cons "Referer"
(ein:url (ein:$jh-conn-url it)
"hub/login")))))))
(when (ein:$jh-conn-token it)
(setq settings (plist-put settings :headers (append (plist-get settings :headers)
(list (cons "Authorization"
(format "token %s"
(ein:$jh-conn-token it))))))))))
settings))
(defcustom ein:max-simultaneous-queries 100
"Limit number of simultaneous queries to Jupyter server.
If too many calls to `request' are made at once Emacs may
complaint and raise a 'Too Many Files' exception. By setting this
variable to a reasonable value you can avoid this situation."
:group 'ein
:type 'integer)
(defun* ein:query-singleton-ajax (key url &rest settings
&key
(timeout ein:query-timeout)
&allow-other-keys)
2012-06-20 20:56:04 +02:00
"Cancel the old process if there is a process associated with
KEY, then call `request' with URL and SETTINGS. KEY is compared by
2012-06-20 20:56:04 +02:00
`equal'."
(with-local-quit
(ein:query-gc-running-process-table)
(when timeout
(setq settings (plist-put settings :timeout (/ timeout 1000.0))))
(when (> (hash-table-count ein:query-running-process-table)
ein:max-simultaneous-queries)
(loop until (< (hash-table-count ein:query-running-process-table) ein:max-simultaneous-queries)
do (progn
(ein:query-gc-running-process-table)
(ein:log 'debug "Stuck waiting for %s processes to complete."
(hash-table-count ein:query-running-process-table))
(sleep-for 5))))
(ein:aif (gethash key ein:query-running-process-table)
(unless (request-response-done-p it)
;; This seems to result in clobbered cookie jars
;;(request-abort it) ; This will run callbacks
(ein:log 'info "Race! %s %s" key (request-response-data it))))
(let ((response (apply #'request (url-encode-url (ein:jupyterhub-correct-query-url-maybe url))
(ein:query-prepare-header url settings))))
(puthash key response ein:query-running-process-table)
response)))
2012-06-20 20:56:04 +02:00
(defun* ein:query-deferred (url &rest settings
&key
(timeout ein:query-timeout)
&allow-other-keys)
""
(apply #'request-deferred (url-encode-url url)
(ein:query-prepare-header url settings)))
2012-06-20 20:56:04 +02:00
(defun ein:query-gc-running-process-table ()
"Garbage collect dead processes in `ein:query-running-process-table'."
(maphash
(lambda (key buffer)
(when (request-response-done-p buffer)
2012-06-20 20:56:04 +02:00
(remhash key ein:query-running-process-table)))
ein:query-running-process-table))
(defun ein:get-response-redirect (response)
"Determine if the query has been redirected, and if so return then URL the request was redirected to."
(if (length (request-response-history response))
(let ((url (url-generic-parse-url (format "%s" (request-response-url response)))))
(format "%s://%s:%s"
(url-type url)
(url-host url)
(url-port url)))))
2012-12-17 16:48:18 +01:00
;;; Cookie
2012-12-29 19:30:00 +01:00
(defalias 'ein:query-get-cookie 'request-cookie-string)
2012-12-17 16:48:18 +01:00
2012-05-25 20:57:48 +02:00
(provide 'ein-query)
;;; ein-query.el ends here