204 lines
7.9 KiB
Scheme
204 lines
7.9 KiB
Scheme
;;; Guile Knots
|
|
;;; Copyright © 2026 Christopher Baines <mail@cbaines.net>
|
|
;;;
|
|
;;; This file is part of Guile Knots.
|
|
;;;
|
|
;;; The Guile Knots is free software; you can redistribute it and/or
|
|
;;; modify it under the terms of the GNU General Public License as
|
|
;;; published by the Free Software Foundation; either version 3 of the
|
|
;;; License, or (at your option) any later version.
|
|
;;;
|
|
;;; The Guile Knots is distributed in the hope that it will be useful,
|
|
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
;;; General Public License for more details.
|
|
;;;
|
|
;;; You should have received a copy of the GNU General Public License
|
|
;;; along with the guix-data-service. If not, see
|
|
;;; <http://www.gnu.org/licenses/>.
|
|
|
|
(define-module (knots web)
|
|
#:use-module (srfi srfi-1)
|
|
#:use-module (srfi srfi-71)
|
|
#:use-module (ice-9 match)
|
|
#:use-module (ice-9 exceptions)
|
|
#:use-module (web uri)
|
|
#:use-module (web request)
|
|
#:use-module (web response)
|
|
#:use-module (knots)
|
|
#:use-module (knots non-blocking)
|
|
#:use-module (knots resource-pool)
|
|
#:export (make-connection-cache
|
|
call-with-connection-cache
|
|
call-with-cached-connection
|
|
http-fold-requests))
|
|
|
|
(define* (make-connection-cache uri
|
|
max-cached-connections
|
|
#:key (verify-certificate? #t))
|
|
"Create a resource pool of up to MAX-CACHED-CONNECTIONS
|
|
to URI."
|
|
(make-resource-pool
|
|
(lambda ()
|
|
;; Open the socket in a temporary thread so that the blocking
|
|
;; connection attempt does not stall the fiber scheduler.
|
|
(call-with-temporary-thread
|
|
(lambda ()
|
|
(non-blocking-open-socket-for-uri
|
|
uri
|
|
#:verify-certificate? verify-certificate?))))
|
|
max-cached-connections
|
|
#:destructor close-port))
|
|
|
|
(define* (call-with-connection-cache uri
|
|
max-cached-connections
|
|
proc
|
|
#:key (verify-certificate? #t))
|
|
"Create a connection cache for URI with up to MAX-CACHED-CONNECTIONS,
|
|
call @code{(proc cache)}, then destroy the cache and return
|
|
the values returned by PROC."
|
|
(let ((cache (make-connection-cache
|
|
uri
|
|
max-cached-connections
|
|
#:verify-certificate? verify-certificate?)))
|
|
(call-with-values
|
|
(lambda ()
|
|
(proc cache))
|
|
(lambda vals
|
|
(destroy-resource-pool cache)
|
|
(apply values vals)))))
|
|
|
|
(define* (call-with-cached-connection
|
|
cache proc
|
|
#:key (close-connection-on-exception? #t))
|
|
"Check out a connection port from CACHE and call @code{(proc port)},
|
|
returning the result. The port is returned to the cache when PROC
|
|
returns, or closed on exception if CLOSE-CONNECTION-ON-EXCEPTION? is
|
|
true (the default)."
|
|
(with-exception-handler
|
|
(lambda (exn)
|
|
(if (resource-pool-destroy-resource-exception? exn)
|
|
(call-with-cached-connection
|
|
cache
|
|
proc
|
|
#:close-connection-on-exception?
|
|
close-connection-on-exception?)
|
|
(raise-exception exn)))
|
|
(lambda ()
|
|
(with-exception-handler
|
|
(lambda (exn)
|
|
(let ((stack
|
|
(match (fluid-ref %stacks)
|
|
((stack-tag . prompt-tag)
|
|
(make-stack #t
|
|
0 prompt-tag
|
|
0 (and prompt-tag 1)))
|
|
(_
|
|
(make-stack #t)))))
|
|
(raise-exception
|
|
(make-exception
|
|
exn
|
|
(make-knots-exception stack)))))
|
|
(lambda ()
|
|
(call-with-resource-from-pool cache
|
|
(lambda (port)
|
|
(when (port-closed? port)
|
|
(raise-exception
|
|
(make-resource-pool-destroy-resource-exception)))
|
|
(proc port))
|
|
#:destroy-resource-on-exception? close-connection-on-exception?))))
|
|
#:unwind? #t))
|
|
|
|
(define* (http-fold-requests connection-cache proc seed requests
|
|
#:key
|
|
(batch-size 1000))
|
|
"Fold PROC over HTTP request/response pairs using CONNECTION-CACHE
|
|
for connections. PROC is called as
|
|
@code{(proc request response body-port accumulator)} and its return
|
|
value becomes the new accumulator. Requests are sent in batches of
|
|
up to BATCH-SIZE before responses are read (HTTP pipelining).
|
|
|
|
When the server closes the connection mid-batch the remaining requests
|
|
are retried on a fresh connection from the cache."
|
|
|
|
(define &send-error
|
|
(make-exception-type '&send-error &exception '()))
|
|
(define make-send-error
|
|
(record-constructor &send-error))
|
|
(define send-error?
|
|
(exception-predicate &send-error))
|
|
|
|
(define (read-responses port batch result)
|
|
(let loop ((request (car batch))
|
|
(remaining-requests (cdr batch))
|
|
(result result))
|
|
(let ((response
|
|
(with-exception-handler
|
|
(lambda (exn)
|
|
(close-port port)
|
|
#f)
|
|
(lambda ()
|
|
(read-response port))
|
|
#:unwind? #t)))
|
|
(if (not response)
|
|
(values (cons request remaining-requests) result)
|
|
(let* ((body (response-body-port response))
|
|
(new-result (proc request response body result)))
|
|
(if (memq 'close (response-connection response))
|
|
(begin
|
|
(close-port port)
|
|
(values remaining-requests new-result))
|
|
(if (null? remaining-requests)
|
|
(values '() new-result)
|
|
(loop (car remaining-requests)
|
|
(cdr remaining-requests)
|
|
new-result))))))))
|
|
|
|
;; Send up to BATCH-SIZE requests then hand off to read-responses.
|
|
;; If writing fails the connection has dropped; raise &send-error so the
|
|
;; outer loop retries all remaining requests on a fresh connection.
|
|
(define (send-batch port batch)
|
|
(with-exception-handler
|
|
(lambda (exn)
|
|
(close-port port)
|
|
(raise-exception (make-send-error)))
|
|
(lambda ()
|
|
(for-each (lambda (req)
|
|
(write-request req port))
|
|
batch)
|
|
(force-output port))
|
|
#:unwind? #t))
|
|
|
|
(let loop ((remaining-requests requests)
|
|
(result seed))
|
|
(if (null? remaining-requests)
|
|
result
|
|
(let ((next-remaining-requests
|
|
next-result
|
|
(with-exception-handler
|
|
(lambda (exn)
|
|
(if (or (send-error? exn)
|
|
(resource-pool-destroy-resource-exception? exn))
|
|
(values remaining-requests result)
|
|
(raise-exception exn)))
|
|
(lambda ()
|
|
(call-with-resource-from-pool connection-cache
|
|
(lambda (port)
|
|
(if (port-closed? port)
|
|
(raise-exception
|
|
(make-resource-pool-destroy-resource-exception))
|
|
(let ((batch
|
|
pending
|
|
(split-at
|
|
remaining-requests
|
|
(min batch-size (length
|
|
remaining-requests)))))
|
|
(send-batch port batch)
|
|
(let ((remaining-requests
|
|
next-result
|
|
(read-responses port batch result)))
|
|
(values (append remaining-requests pending)
|
|
next-result)))))
|
|
#:destroy-resource-on-exception? #t))
|
|
#:unwind? #t)))
|
|
(loop next-remaining-requests next-result)))))
|