TCP transport long bytes with Aleph - tcp

I am trying to build RCP server based on aleph. It passed on all the test, but when the byte array for send or receive become large, the bytes seems been corrupted.
For example. I tried to send a byte array which length is 2936, but I only got 1024 bytes at server
I followed the example and did my own modifications for nippy. The encoding and decoding will be done by handlers themselves.
(defn wrap-duplex-stream
[s]
(let [out (s/stream)]
(s/connect out s)
(s/splice out s)))
(defn client
[host port]
(d/chain (tcp/client {:host host, :port port})
#(wrap-duplex-stream %)))
(defn start-server
[handler port]
(tcp/start-server
(fn [s info]
(handler (wrap-duplex-stream s) info))
{:port port}))

I finally made my own codec for byte array.
It is simple, but to figure how to make use it with gloss and aleph is time consuming
(defn buffer->byte-array [buf-seq]
(byte-streams/to-byte-array buf-seq))
(defn bytes-codec []
(reify
Reader
(read-bytes [this buf-seq]
(let [buf-seq (dup-bytes buf-seq)
byte-arr (buffer->byte-array buf-seq)]
[true byte-arr nil]))
Writer
(sizeof [x]
nil)
(write-bytes [x y byte-arr]
[(ByteBuffer/wrap byte-arr)])))
(def protocol
(gloss/compile-frame
(gloss/finite-frame
:uint32
(bytes-codec))
#(nippy/freeze %)
#(nippy/thaw %)))
(defn wrap-duplex-stream
[s]
(let [out (s/stream)]
(s/connect
(s/map #(io/encode protocol %) out)
s)
(s/splice
out
(io/decode-stream s protocol))))
(defn client
[host port]
(d/chain (tcp/client {:host host, :port port})
#(wrap-duplex-stream %)))
(defn start-server
[handler port]
(tcp/start-server
(fn [s info]
(handler (wrap-duplex-stream s) info))
{:port port}))

Related

Size of uploaded video is 0 on the server, even though the video has a size

I have the following re-frame event handlers with which I’m trying to upload a video to the server:
(reg-event-fx
:upload-shot-video-server
(fn [coeffects [_ blob]]
(let [body (js/FormData.)]
(.append body "video" blob)
{:http-xhrio {:method :post
:uri (str "http://d18a6571c2e5.ngrok.io" "/api/upload-shot-video")
:body body
:on-success [:upload-success]
:on-failure [:upload-error]
:response-format (edn/edn-response-format)}}))
)
(reg-event-fx
:upload-shot-video
(fn [coeffects _]
(prn "uploading video")
(let [response (js/fetch (-> coeffects :db :shot-video-uri))]
(try
(go
(let [blob (<p! (. (<p! response) blob))]
(js/console.log "blob is " blob)
(js/console.log "size of blob is " (.-size blob))
(dispatch [:upload-shot-video-server blob])))
(catch js/Error e (js/console.log "Error is " e)))
{})))
I have a handler on the server to take the input stream and save it as a file:
(defn upload-shot-video [req]
(prn "uploading video")
(prn "video is! " (-> req :params))
(prn "video is " (-> req :body))
(clojure.java.io/copy (-> req :body) (clojure.java.io/file "./resources/public/video.mov"))
(let [filename (str (rand-str 100) ".mov")]
(s3/put-object
:bucket-name "humboi-videos"
:key filename
:file "./resources/public/video.mov"
:access-control-list {:grant-permission ["AllUsers" "Read"]})
(db/add-video {:name (-> req :params :name)
:uri (str "https://humboi-videos.s3-us-west-1.amazonaws.com/" filename)}))
(r/response {:res "okay!"}))
However, the video that’s being saved as a file is 0 bytes large, even though the video blob is a non-zero sized video.
How to fix this error?
Could it be your server refusing big file sizes? I was using org.httpkit.server and it would silently refuse post with files above 8MB. I solved it like this:
(server/run-server app {:port your-port-number :max-body 128000000})) ;128MB

How do I catch the socket error in the TCP server of cl-async package?

How do I catch the unexpected disconnection of the TCP socket with cl-async tcp-server? For example the server below cannot handle the situation when the client closes the telnet terminal forcefully. I guess I should add the handler after the :event-cb keyword but I don't know how I can combine with the as:signal-handler.
(require 'cl-async)
(require 'babel)
(defun my-tcp-server ()
(format t "Starting server.~%")
(as:tcp-server
nil 8888 ; nil means 0.0.0.0 to listen for any address
;; read-cb
(lambda (socket data)
(let ((data-str ; stores the received data in utf8 string
(handler-case (babel:octets-to-string data :encoding :utf-8)
(babel-encodings:invalidutf8-continuation-byte (err)
(declare (ignore err))
(format nil "^#~%")))))
;; exits if received "bye"
(cond ((equal "bye" (string-right-trim '(#\Return #\Newline) data-str))
(as:close-socket socket)
(format t "Client disconnected.~%"))
(t (format t "~a" data-str) ; echo on the server side
(as:write-socket-data socket "Send to server > ")))))
;; handle SIGINT
:event-cb (as:signal-handler 2 (lambda (sig)
(declare (ignore sig))
(as:free-signal-handler 2)
(as:exit-event-loop)))
:connect-cb (lambda (socket)
(format t "Client connected.~%")
(as:write-socket-data socket "Send to server > "))))
(as:start-event-loop #'my-tcp-server)

ClojureScript file preloader - function or pattern to emulate promise?

I'm trying to create a file preloader within ClojureScript. My idea was a pattern like this:
(def urls (atom[]))
(def loaded-resources (atom []))
(def all-resources (promise))
(defn loading-callback []
(if (= (count urls) (count loaded-resources))
(deliver all-resources loaded-resources)))
;; fill urls array
;; start ajax-loading with loading-callback on success
So my main function could go on until it would require the resources and then wait for them, which works well in Clojure.
Unfortunately, promises don't exist in ClojureScript, so how can I work around that issue? There's promesa bringing promises to CLJS based on core.async channels, but it only allows future-like promises that wait for a single function to execute which won't suffice my needs (at least in the way I've been thinking about it yesterday...).
Any suggestions to solve this issue? Maybe use a completely different pattern? I want to keep the code as simple as possible to convince people in my team to try out CLJ/S.
EDIT:
After Alan's second idea:
(def urls (atom[]))
(def loaded-resources (atom []))
(defn loading-callback [data]
(swap! loaded-resources conj data))
(defn load! [post-loading-fn]
(add-watch loaded-resources :watch-loading
(fn [_ _ _ cur]
(if (= (count cur) (count #urls)) (post-loading-fn))))
;; init ajax loading
)
(defn init []
;; fill urls array
(load! main))
(main []
(do-terrific-stuff #loaded-resources))
Meanwhile I had tried to use core.async
(def urls (atom []))
(def loaded-resources (atom []))
(def resource-chan (chan))
(defn loading-callback [data]
(go (>! resource-chan data)))
;; fill url array from main
(load! []
;; init ajax loading
(go-loop []
(when-not (= (count #loaded-resources) (count #urls))
(swap! loaded-resources conj (<! resource-chan))
(recur)))
Not sure which version is better.
I can think of 2 approaches.
Change all-resources to another atom, initialized at nil. Poll it 2x-5x/sec until it is not nil and has the "delivered" result.
Use add-watch to register a callback function to execute when the value is changed. This takes the place of blocking until the value is delivered. It is described here: http://clojuredocs.org/clojure.core/add-watch
They show a good example:
(def a (atom {}))
(add-watch a :watcher
(fn [key atom old-state new-state]
(prn "-- Atom Changed --")
(prn "key" key)
(prn "atom" atom)
(prn "old-state" old-state)
(prn "new-state" new-state)))
(reset! a {:foo "bar"})
;; "-- Atom Changed --"
;; "key" :watcher
;; "atom" #<Atom#4b020acf: {:foo "bar"}>
;; "old-state" {}
;; "new-state" {:foo "bar"}
;; {:foo "bar"}
Assuming your load resource function returns a channel (like cljs-http/get).
In clj, all you need to do is hold on to them to do a "wait-all".
(let [cs (doall (map load-resource urls)) ;; initiate the get
... ;; other initialisation
res (map <!! cs)] ;; wait-all for the resources
(do-other-things res))
In cljs, you can accumulate the responses before you continue:
(go
(let [res (atom [])]
(doseq [c cs]
(swap! res conj (<! c)))
(do-other-things #res)))
JavaScript is a single threaded environment so there is no blocking wait.
If you wish to request multiple resources and continue iff they have all been served, I do recommend using core.async and especially pipeline-async. It has a knob to finetune the parallelism of asynchronous requests. Here is idiomatic ClojureScript code to achieve what you want:
(ns example.core
(:require [cljs.core.async :refer [chan take! put! pipeline-async]
:as async]))
(defn load-resources [urls on-resources]
(let [urls-ch (chan (count urls))
resources-ch (chan)]
;; Create pipeline:
(pipeline-async 10 ;; have at most 10 requests in flight at
;; the same time, finetune as desired
resources-ch
(fn [url done-ch]
;; Pseudo code:
(request-resource
url
(fn [loaded-resource]
(put! done-ch loaded-resource))))
urls-ch)
;; Eagerly aggregate result until results-ch closes, then call back:
(take! (async/into [] resources-ch) on-resources)
;; Start the party by putting all urls onto urls-ch
;; and then close it:
(async/onto-chan urls-ch urls)))

Clojure - core.async interface for apache kafka

I am using clj-kafka, and I am trying to make a core.async interface to it in the REPL.
I am getting some messages, but my structure feels wrong : I either cannot stop receiving messages, or have to launch the go routine again to receive more messages.
Here is my attempt :
(defn consume [topic]
(let [consume-chan (chan)]
(with-resource [c (consumer config)]
shutdown
(go (doseq [m (messages c "test")]
(>! chan message) ;; should I check the return value?
)))
consume-chan)) ;; is it the right place to return a channel ?
(def consume-chan (consume "test"))
;;(close! consume-chan)
(go (>! consume-chan "hi")) ;; manual test, but I have some messages in Kafka already
(def cons-ch (go
(with-resource [c (consumer config)]
shutdown
(doseq [m (messages c "test")]
(>! consume-chan m))))) ;; should I check something here ?
;;(close! cons-ch)
(def go-ch
(go-loop []
(if-let [km (<! consume-chan)]
(do (println "Got a value in this loop:" km)
(recur))
(do (println "Stop recurring - channel closed")))))
;;(close! go-ch)
How do I consume a lazy-sequence of messages with a core.async interface ?
Here's what I would do:
>! and <! return nil if the channel is closed, so make sure that the loop exits when this happens - that way you can easily end the loop from the outside by closing the channel.
Use a try/catch to check for exceptions inside the go block, and make any exception the return value so that they don't get lost.
Check for exceptions on read values, to catch anything from inside the channel.
The go blocks return a channel, and the return value of the code inside the block (like the exceptions from above) will be put on the channel. Check these channels for exceptions, possibly to rethrow.
You can now write to a channel like this:
(defn write-seq-to-channel
[channel
values-seq]
(a/go
(try
(loop [values values-seq]
(when (seq values)
(when (a/>! channel (first values))
(recur (rest values)))))
(catch Throwable e
e))))
and you read like this:
(defn read-from-channel-and-print
[channel]
(a/go
(try
(loop []
(let [value (a/<! channel)]
(when value
(when (instance? Throwable value)
(throw value))
(println "Value read:" value)
(recur))))
(catch Throwable e
e))))
You will now have two channels, so use something like alts! or alts!! to check for your loops exiting. Close the channel when you are done.

Using http-kit long polling with core.async channels

I have some long running process that returns a core.async channel with the result on it when the process has finished.
Now I'd like to return that result using long-polling with HTTP-kit.
Unfortunately I'm a bit confused what the right way of doing so is.
Currently I have a handler (hooked up to a route) that initiates the processing call and sends the result when done:
(defn handler
[request]
(let [c (process request)] ;; long running process that returns a channel
(http/with-channel request channel
(http/send! channel {:status 200
:body (<!! (go (<! c)))))
(http/on-close channel (fn [_] (async/close! c))))))
It works, but I'm unsure if this is the right way.
EDIT since <!! is blocking I'm now trying a non-blocking variant in a go-loop
(defn handler
[request]
(let [c (process request)]
(http/with-channel request channel
(async/go-loop [v (<! c)]
(http/send! channel {:status 200
:body v}))
(http/on-close channel (fn [_] (async/close! c))))))
Why not send on the channel in the go block?
(http/with-channel request channel
(go (http/send! channel (<! c))))
<!! is blocking - so there is no real advantage in your code from just directly calling <!! c in the handler:
(defn handler
[request]
(let [c (process request)] ;; long running process that returns a channel
{:status 200
:body (<!! c)}))
Edit in response to question update: The updated code works - this is a fully functioning namespace which works for me:
(ns async-resp.core
(:require [org.httpkit.server :as http]
[clojure.core.async :as async :refer (<! >! go chan go-loop close! timeout)]))
(defn process
[_]
(let [ch (chan)]
(go
(<! (timeout 5000))
(>! ch "TEST"))
ch))
(defn test-handler
[request]
(let [c (process request)]
(http/with-channel request channel
(go-loop [v (<! c)]
(http/send! channel {:status 200
:body v}))
(http/on-close channel (fn [_] (close! c))))))
(defn run
[]
(http/run-server test-handler {}))
As of the current moment in time though, I had to manually add the tools.analyzer.jvm dependency to project.clj - as I get compilation failures using core.async as-is.
Check you're running the latest core.async and analyzer?

Resources