Add snapshot button

This commit is contained in:
heyarne 2020-05-03 13:07:14 +02:00
commit 0043cb6a14
3 changed files with 87 additions and 44 deletions

View file

@ -21,4 +21,4 @@
(dom/render [views/app {:on-faces-detected draw-results}]
(.querySelector js/document "#app")))
(defonce initialize (init))
(defonce app (init))

View file

@ -34,45 +34,76 @@
(resolve result)))))))
;; this function will be repeatedly called on the video stream to detect faces
(def request-id (atom nil))
(defn detect-faces [model video on-faces-detected]
(-> (.estimateFaces model video)
(.then (fn [predictions]
(on-faces-detected video predictions))))
(js/requestAnimationFrame #(detect-faces model video on-faces-detected)))
(reset! request-id (js/requestAnimationFrame #(detect-faces model video on-faces-detected))))
(defn webcam-facemesh [{:keys [on-webcam-rejected
on-faces-detected]}]
(r/create-class
{:display-name "webcam-facemesh"
on-faces-detected
halt?]}]
(let [state (atom {:video nil
:model nil
:ctx nil})]
(r/create-class
{:display-name "webcam-facemesh"
:reagent-render
(fn [_ _]
[:div.capture-container
[:canvas#result]
[:video#capture]])
:reagent-render
(fn [_ _]
[:div.capture-container
[:canvas#result]
[:video#capture]])
:component-did-mount
(fn [this]
;; this function does the following
;; - set up the tensorflow model
;; - request access to the user's webcam
;; - continuously detect faces in the webcam feed
(let [container (dom/dom-node this)
video (.querySelector container "#capture")
canvas (.querySelector container "#result")
ctx (.getContext canvas "2d")
model (promisify init-model)
stream (promisify init-webcam)]
(-> (js/Promise.all #js [model stream])
(.then (fn [[model stream]]
(println "model and stream initialized")
(js/console.log model stream)
(set! (.-srcObject video) stream)
(set! (.-onloadedmetadata video)
(fn [_]
(.play video)
(set! (.-width canvas) (.-videoWidth video))
(set! (.-height canvas) (.-videoHeight video))
;; detect-faces will continously be called via requestAnimationFrame
;; on-faces-detected receives the canvas context as first param and detected predictions as second
(detect-faces model video (partial (:on-faces-detected (r/props this)) ctx)))))))))}))
:component-did-update
(fn [this]
;; this function makes sure to pause the video stream and face prediction
;; when the :halt? property is given
(let [{:keys [video model ctx]} @state]
(if (:halt? (r/props this))
(do
(println "Pausing video capture")
(.pause video)
(js/cancelAnimationFrame @request-id))
(do
(println "Resuming video capture")
(.play video)
(detect-faces model video (partial (:on-faces-detected (r/props this)) ctx))))))
:component-did-mount
(fn [this]
;; this function does the following
;; - set up the tensorflow model
;; - request access to the user's webcam
;; - continuously detect faces in the webcam feed
(let [container (dom/dom-node this)
video (.querySelector container "#capture")
canvas (.querySelector container "#result")
ctx (.getContext canvas "2d")
model (promisify init-model)
stream (promisify init-webcam)]
(-> (js/Promise.all #js [model stream])
(.then (fn [[model stream]]
(println "model and stream initialized")
(js/console.log model stream)
(set! (.-srcObject video) stream)
(set! (.-onloadedmetadata video)
(fn [_]
(.play video)
(set! (.-width canvas) (.-videoWidth video))
(set! (.-height canvas) (.-videoHeight video))
;; detect-faces will continously be called via requestAnimationFrame
;; on-faces-detected receives the canvas context as first param and detected predictions as second
(detect-faces model video (partial (:on-faces-detected (r/props this)) ctx))))
;; make the initialized variables available for
;; component-did-update
(swap! state assoc
:video video
:model model
:ctx ctx))))))
:component-will-unmount
(fn [this]
(js/cancelAnimationFrame @request-id))})))

View file

@ -10,9 +10,8 @@
(def events
{:welcome/continue (fn [db _]
(assoc db :status :running))
:running/capture (fn [db _]
(assoc db :current-capture {:img nil
:predictions nil}))})
:running/snapshot (fn [db result]
(assoc-in db [:snapshots :current] result))})
(defn dispatch [[event data]]
(when-let [handler (events event)]
@ -30,26 +29,39 @@ Seit der globalen Covid19-Pandemie sind wir alle dazu gezwungen, auf physischen
[:p "Ich würde mich freuen, wenn du mir dabei hilfst. Folge dazu einfach den Anweisungen. Das Ergebnis wird hoffentlich eine schöne Sammlung von Webcambildern und 3D-Modellen eurer Köpfe" [:sup "1"] "."]
[:button {:on-click #(dispatch [:welcome/continue])} "Weiter"]])
(defn running [{on-faces-detected :on-faces-detected}]
(defn video-snapshot [video-elem]
(let [canvas (js/document.createElement "canvas")
ctx (.getContext canvas "2d")]
(set! (.-width canvas) (.-videoWidth video-elem))
(set! (.-height canvas) (.-videoHeight video-elem))
(.drawImage ctx video-elem 0 0 (.-width canvas) (.-height canvas))
(.getImageData ctx 0 0 (.-width canvas) (.-height canvas))))
(defn running [{:keys [on-faces-detected halt?]}]
(let [result (atom {:video nil
:predictions nil})]
(fn []
(fn [{:keys [on-faces-detected halt?]}]
[:div.container
[:p "Tippe auf den Button um mir das untere Bild zu schicken."]
[webcam-facemesh {:on-faces-detected (fn [ctx video predictions]
(swap! result assoc
:video video
:predictions predictions)
(on-faces-detected ctx video predictions))}]
[:button {:on-click #(dispatch [:running/capture @result])} "Tip"]])))
(on-faces-detected ctx video predictions))
:halt? halt?}]
[:button {:on-click #(dispatch [:running/snapshot (update @result :video video-snapshot)])} "Cheese"]])))
(defn app [{:keys [on-faces-detected]}]
(let [status (:status @state)]
(let [state @state
status (:status state)
halt? (some? (get-in state [:snapshots :current]))]
[:<>
[welcome-message {:hidden? (not= :welcome-message status)}]
(case status
:permission-rejected [:div "Sad :("]
:running [running {:on-faces-detected on-faces-detected}]
:running [running {:on-faces-detected on-faces-detected
:halt? halt?}]
;; default
nil)]))